From f915ff99582ea6fa2816680a02ae86f40ae4c966 Mon Sep 17 00:00:00 2001 From: Yigal Date: Mon, 23 Dec 2024 14:49:21 +0200 Subject: [PATCH] stuff --- ai-agent.js | 31 +- index.js | 10 +- node_modules/.bin/openai | 1 + node_modules/.package-lock.json | 405 + node_modules/.yarn-integrity | 74 + node_modules/@actions/core/LICENSE.md | 9 + node_modules/@actions/core/README.md | 486 + node_modules/@actions/core/lib/command.d.ts | 15 + node_modules/@actions/core/lib/command.js | 96 + node_modules/@actions/core/lib/command.js.map | 1 + node_modules/@actions/core/lib/core.d.ts | 202 + node_modules/@actions/core/lib/core.js | 344 + node_modules/@actions/core/lib/core.js.map | 1 + .../@actions/core/lib/file-command.d.ts | 2 + .../@actions/core/lib/file-command.js | 62 + .../@actions/core/lib/file-command.js.map | 1 + .../@actions/core/lib/oidc-utils.d.ts | 7 + node_modules/@actions/core/lib/oidc-utils.js | 77 + .../@actions/core/lib/oidc-utils.js.map | 1 + .../@actions/core/lib/path-utils.d.ts | 25 + node_modules/@actions/core/lib/path-utils.js | 62 + .../@actions/core/lib/path-utils.js.map | 1 + node_modules/@actions/core/lib/platform.d.ts | 15 + node_modules/@actions/core/lib/platform.js | 94 + .../@actions/core/lib/platform.js.map | 1 + node_modules/@actions/core/lib/summary.d.ts | 202 + node_modules/@actions/core/lib/summary.js | 283 + node_modules/@actions/core/lib/summary.js.map | 1 + node_modules/@actions/core/lib/utils.d.ts | 14 + node_modules/@actions/core/lib/utils.js | 40 + node_modules/@actions/core/lib/utils.js.map | 1 + node_modules/@actions/core/package.json | 45 + node_modules/@actions/exec/LICENSE.md | 9 + node_modules/@actions/exec/README.md | 57 + node_modules/@actions/exec/lib/exec.d.ts | 24 + node_modules/@actions/exec/lib/exec.js | 103 + node_modules/@actions/exec/lib/exec.js.map | 1 + .../@actions/exec/lib/interfaces.d.ts | 57 + node_modules/@actions/exec/lib/interfaces.js | 3 + .../@actions/exec/lib/interfaces.js.map | 1 + .../@actions/exec/lib/toolrunner.d.ts | 37 + node_modules/@actions/exec/lib/toolrunner.js | 618 + .../@actions/exec/lib/toolrunner.js.map | 1 + node_modules/@actions/exec/package.json | 41 + node_modules/@actions/github/LICENSE.md | 9 + node_modules/@actions/github/README.md | 98 + node_modules/@actions/github/lib/context.d.ts | 32 + node_modules/@actions/github/lib/context.js | 54 + .../@actions/github/lib/context.js.map | 1 + node_modules/@actions/github/lib/github.d.ts | 11 + node_modules/@actions/github/lib/github.js | 37 + .../@actions/github/lib/github.js.map | 1 + .../@actions/github/lib/interfaces.d.ts | 40 + .../@actions/github/lib/interfaces.js | 4 + .../@actions/github/lib/interfaces.js.map | 1 + .../@actions/github/lib/internal/utils.d.ts | 6 + .../@actions/github/lib/internal/utils.js | 43 + .../@actions/github/lib/internal/utils.js.map | 1 + node_modules/@actions/github/lib/utils.d.ts | 15 + node_modules/@actions/github/lib/utils.js | 54 + node_modules/@actions/github/lib/utils.js.map | 1 + node_modules/@actions/github/package.json | 49 + node_modules/@actions/http-client/LICENSE | 21 + node_modules/@actions/http-client/README.md | 73 + .../@actions/http-client/lib/auth.d.ts | 26 + node_modules/@actions/http-client/lib/auth.js | 81 + .../@actions/http-client/lib/auth.js.map | 1 + .../@actions/http-client/lib/index.d.ts | 130 + .../@actions/http-client/lib/index.js | 652 + .../@actions/http-client/lib/index.js.map | 1 + .../@actions/http-client/lib/interfaces.d.ts | 46 + .../@actions/http-client/lib/interfaces.js | 3 + .../http-client/lib/interfaces.js.map | 1 + .../@actions/http-client/lib/proxy.d.ts | 2 + .../@actions/http-client/lib/proxy.js | 95 + .../@actions/http-client/lib/proxy.js.map | 1 + .../@actions/http-client/package.json | 51 + node_modules/@actions/io/LICENSE.md | 9 + node_modules/@actions/io/README.md | 53 + node_modules/@actions/io/lib/io-util.d.ts | 21 + node_modules/@actions/io/lib/io-util.js | 183 + node_modules/@actions/io/lib/io-util.js.map | 1 + node_modules/@actions/io/lib/io.d.ts | 64 + node_modules/@actions/io/lib/io.js | 299 + node_modules/@actions/io/lib/io.js.map | 1 + node_modules/@actions/io/package.json | 37 + node_modules/@fastify/busboy/LICENSE | 19 + node_modules/@fastify/busboy/README.md | 271 + .../@fastify/busboy/deps/dicer/LICENSE | 19 + .../@fastify/busboy/deps/dicer/lib/Dicer.js | 213 + .../busboy/deps/dicer/lib/HeaderParser.js | 100 + .../busboy/deps/dicer/lib/PartStream.js | 13 + .../@fastify/busboy/deps/dicer/lib/dicer.d.ts | 164 + .../@fastify/busboy/deps/streamsearch/sbmh.js | 228 + node_modules/@fastify/busboy/lib/main.d.ts | 196 + node_modules/@fastify/busboy/lib/main.js | 85 + .../@fastify/busboy/lib/types/multipart.js | 306 + .../@fastify/busboy/lib/types/urlencoded.js | 190 + .../@fastify/busboy/lib/utils/Decoder.js | 54 + .../@fastify/busboy/lib/utils/basename.js | 14 + .../@fastify/busboy/lib/utils/decodeText.js | 114 + .../@fastify/busboy/lib/utils/getLimit.js | 16 + .../@fastify/busboy/lib/utils/parseParams.js | 196 + node_modules/@fastify/busboy/package.json | 86 + node_modules/@octokit/auth-token/LICENSE | 21 + node_modules/@octokit/auth-token/README.md | 290 + .../@octokit/auth-token/dist-node/index.js | 55 + .../auth-token/dist-node/index.js.map | 1 + .../@octokit/auth-token/dist-src/auth.js | 21 + .../@octokit/auth-token/dist-src/hook.js | 6 + .../@octokit/auth-token/dist-src/index.js | 14 + .../@octokit/auth-token/dist-src/types.js | 1 + .../dist-src/with-authorization-prefix.js | 11 + .../@octokit/auth-token/dist-types/auth.d.ts | 2 + .../@octokit/auth-token/dist-types/hook.d.ts | 2 + .../@octokit/auth-token/dist-types/index.d.ts | 7 + .../@octokit/auth-token/dist-types/types.d.ts | 33 + .../dist-types/with-authorization-prefix.d.ts | 6 + .../@octokit/auth-token/dist-web/index.js | 55 + .../@octokit/auth-token/dist-web/index.js.map | 1 + node_modules/@octokit/auth-token/package.json | 45 + node_modules/@octokit/core/LICENSE | 21 + node_modules/@octokit/core/README.md | 448 + node_modules/@octokit/core/dist-node/index.js | 176 + .../@octokit/core/dist-node/index.js.map | 1 + node_modules/@octokit/core/dist-src/index.js | 125 + node_modules/@octokit/core/dist-src/types.js | 1 + .../@octokit/core/dist-src/version.js | 1 + .../@octokit/core/dist-types/index.d.ts | 30 + .../@octokit/core/dist-types/types.d.ts | 44 + .../@octokit/core/dist-types/version.d.ts | 1 + node_modules/@octokit/core/dist-web/index.js | 130 + .../@octokit/core/dist-web/index.js.map | 1 + node_modules/@octokit/core/package.json | 57 + node_modules/@octokit/endpoint/LICENSE | 21 + node_modules/@octokit/endpoint/README.md | 421 + .../@octokit/endpoint/dist-node/index.js | 390 + .../@octokit/endpoint/dist-node/index.js.map | 1 + .../@octokit/endpoint/dist-src/defaults.js | 17 + .../dist-src/endpoint-with-defaults.js | 5 + .../@octokit/endpoint/dist-src/index.js | 3 + .../@octokit/endpoint/dist-src/merge.js | 26 + .../@octokit/endpoint/dist-src/parse.js | 81 + .../dist-src/util/add-query-parameters.js | 17 + .../util/extract-url-variable-names.js | 11 + .../endpoint/dist-src/util/lowercase-keys.js | 9 + .../endpoint/dist-src/util/merge-deep.js | 16 + .../@octokit/endpoint/dist-src/util/omit.js | 8 + .../util/remove-undefined-properties.js | 8 + .../endpoint/dist-src/util/url-template.js | 164 + .../@octokit/endpoint/dist-src/version.js | 1 + .../endpoint/dist-src/with-defaults.js | 13 + .../endpoint/dist-types/defaults.d.ts | 2 + .../dist-types/endpoint-with-defaults.d.ts | 3 + .../@octokit/endpoint/dist-types/index.d.ts | 1 + .../@octokit/endpoint/dist-types/merge.d.ts | 2 + .../@octokit/endpoint/dist-types/parse.d.ts | 2 + .../dist-types/util/add-query-parameters.d.ts | 4 + .../util/extract-url-variable-names.d.ts | 1 + .../dist-types/util/lowercase-keys.d.ts | 5 + .../endpoint/dist-types/util/merge-deep.d.ts | 1 + .../endpoint/dist-types/util/omit.d.ts | 5 + .../util/remove-undefined-properties.d.ts | 1 + .../dist-types/util/url-template.d.ts | 3 + .../@octokit/endpoint/dist-types/version.d.ts | 1 + .../endpoint/dist-types/with-defaults.d.ts | 2 + .../@octokit/endpoint/dist-web/index.js | 381 + .../@octokit/endpoint/dist-web/index.js.map | 1 + node_modules/@octokit/endpoint/package.json | 44 + node_modules/@octokit/graphql/LICENSE | 21 + node_modules/@octokit/graphql/README.md | 409 + .../@octokit/graphql/dist-node/index.js | 118 + .../@octokit/graphql/dist-node/index.js.map | 1 + .../@octokit/graphql/dist-src/error.js | 21 + .../@octokit/graphql/dist-src/graphql.js | 52 + .../@octokit/graphql/dist-src/index.js | 18 + .../@octokit/graphql/dist-src/types.js | 1 + .../@octokit/graphql/dist-src/version.js | 1 + .../graphql/dist-src/with-defaults.js | 12 + .../@octokit/graphql/dist-types/error.d.ts | 13 + .../@octokit/graphql/dist-types/graphql.d.ts | 3 + .../@octokit/graphql/dist-types/index.d.ts | 5 + .../@octokit/graphql/dist-types/types.d.ts | 55 + .../@octokit/graphql/dist-types/version.d.ts | 1 + .../graphql/dist-types/with-defaults.d.ts | 3 + .../@octokit/graphql/dist-web/index.js | 106 + .../@octokit/graphql/dist-web/index.js.map | 1 + node_modules/@octokit/graphql/package.json | 47 + node_modules/@octokit/openapi-types/LICENSE | 7 + node_modules/@octokit/openapi-types/README.md | 17 + .../@octokit/openapi-types/package.json | 20 + .../@octokit/openapi-types/types.d.ts | 47095 ++++++++++++++++ .../@octokit/plugin-paginate-rest/LICENSE | 7 + .../@octokit/plugin-paginate-rest/README.md | 269 + .../plugin-paginate-rest/dist-node/index.js | 205 + .../dist-node/index.js.map | 1 + .../dist-src/compose-paginate.js | 5 + .../generated/paginating-endpoints.js | 216 + .../plugin-paginate-rest/dist-src/index.js | 17 + .../plugin-paginate-rest/dist-src/iterator.js | 39 + .../normalize-paginated-list-response.js | 47 + .../plugin-paginate-rest/dist-src/paginate.js | 24 + .../dist-src/paginating-endpoints.js | 10 + .../plugin-paginate-rest/dist-src/types.js | 1 + .../plugin-paginate-rest/dist-src/version.js | 1 + .../dist-types/compose-paginate.d.ts | 2 + .../generated/paginating-endpoints.d.ts | 1612 + .../dist-types/index.d.ts | 16 + .../dist-types/iterator.d.ts | 20 + .../normalize-paginated-list-response.d.ts | 18 + .../dist-types/paginate.d.ts | 3 + .../dist-types/paginating-endpoints.d.ts | 3 + .../dist-types/types.d.ts | 242 + .../dist-types/version.d.ts | 1 + .../plugin-paginate-rest/dist-web/index.js | 358 + .../dist-web/index.js.map | 1 + .../plugin-paginate-rest/package.json | 51 + .../plugin-rest-endpoint-methods/LICENSE | 7 + .../plugin-rest-endpoint-methods/README.md | 76 + .../dist-node/index.js | 1107 + .../dist-node/index.js.map | 1 + .../dist-src/endpoints-to-methods.js | 60 + .../dist-src/generated/endpoints.js | 1664 + .../dist-src/generated/method-types.js | 1 + .../parameters-and-response-types.js | 1 + .../dist-src/index.js | 18 + .../dist-src/types.js | 1 + .../dist-src/version.js | 1 + .../dist-types/endpoints-to-methods.d.ts | 4 + .../dist-types/generated/endpoints.d.ts | 3 + .../dist-types/generated/method-types.d.ts | 9945 ++++ .../parameters-and-response-types.d.ts | 3211 ++ .../dist-types/index.d.ts | 11 + .../dist-types/types.d.ts | 18 + .../dist-types/version.d.ts | 1 + .../dist-web/index.js | 1745 + .../dist-web/index.js.map | 1 + .../plugin-rest-endpoint-methods/package.json | 59 + node_modules/@octokit/request-error/LICENSE | 21 + node_modules/@octokit/request-error/README.md | 67 + .../@octokit/request-error/dist-node/index.js | 74 + .../request-error/dist-node/index.js.map | 1 + .../@octokit/request-error/dist-src/index.js | 55 + .../@octokit/request-error/dist-src/types.js | 1 + .../request-error/dist-types/index.d.ts | 33 + .../request-error/dist-types/types.d.ts | 9 + .../@octokit/request-error/dist-web/index.js | 59 + .../request-error/dist-web/index.js.map | 1 + .../@octokit/request-error/package.json | 47 + node_modules/@octokit/request/LICENSE | 21 + node_modules/@octokit/request/README.md | 551 + .../@octokit/request/dist-node/index.js | 177 + .../@octokit/request/dist-node/index.js.map | 1 + .../request/dist-src/fetch-wrapper.js | 119 + .../request/dist-src/get-buffer-response.js | 3 + .../@octokit/request/dist-src/index.js | 9 + .../@octokit/request/dist-src/version.js | 1 + .../request/dist-src/with-defaults.js | 22 + .../request/dist-types/fetch-wrapper.d.ts | 11 + .../dist-types/get-buffer-response.d.ts | 2 + .../@octokit/request/dist-types/index.d.ts | 1 + .../@octokit/request/dist-types/version.d.ts | 1 + .../request/dist-types/with-defaults.d.ts | 2 + .../@octokit/request/dist-web/index.js | 158 + .../@octokit/request/dist-web/index.js.map | 1 + node_modules/@octokit/request/package.json | 56 + node_modules/@octokit/types/LICENSE | 7 + node_modules/@octokit/types/README.md | 65 + .../@octokit/types/dist-node/index.js | 8 + .../@octokit/types/dist-node/index.js.map | 1 + .../@octokit/types/dist-src/AuthInterface.js | 1 + .../types/dist-src/EndpointDefaults.js | 1 + .../types/dist-src/EndpointInterface.js | 1 + .../types/dist-src/EndpointOptions.js | 1 + node_modules/@octokit/types/dist-src/Fetch.js | 1 + .../GetResponseTypeFromEndpointMethod.js | 1 + .../types/dist-src/OctokitResponse.js | 1 + .../@octokit/types/dist-src/RequestError.js | 1 + .../@octokit/types/dist-src/RequestHeaders.js | 1 + .../types/dist-src/RequestInterface.js | 1 + .../@octokit/types/dist-src/RequestMethod.js | 1 + .../@octokit/types/dist-src/RequestOptions.js | 1 + .../types/dist-src/RequestParameters.js | 1 + .../types/dist-src/RequestRequestOptions.js | 1 + .../types/dist-src/ResponseHeaders.js | 1 + node_modules/@octokit/types/dist-src/Route.js | 1 + .../@octokit/types/dist-src/Signal.js | 1 + .../types/dist-src/StrategyInterface.js | 1 + node_modules/@octokit/types/dist-src/Url.js | 1 + .../@octokit/types/dist-src/VERSION.js | 1 + .../types/dist-src/generated/Endpoints.js | 1 + node_modules/@octokit/types/dist-src/index.js | 21 + .../types/dist-types/AuthInterface.d.ts | 31 + .../types/dist-types/EndpointDefaults.d.ts | 21 + .../types/dist-types/EndpointInterface.d.ts | 65 + .../types/dist-types/EndpointOptions.d.ts | 7 + .../@octokit/types/dist-types/Fetch.d.ts | 4 + .../GetResponseTypeFromEndpointMethod.d.ts | 5 + .../types/dist-types/OctokitResponse.d.ts | 17 + .../types/dist-types/RequestError.d.ts | 11 + .../types/dist-types/RequestHeaders.d.ts | 15 + .../types/dist-types/RequestInterface.d.ts | 34 + .../types/dist-types/RequestMethod.d.ts | 4 + .../types/dist-types/RequestOptions.d.ts | 14 + .../types/dist-types/RequestParameters.d.ts | 45 + .../dist-types/RequestRequestOptions.d.ts | 26 + .../types/dist-types/ResponseHeaders.d.ts | 20 + .../@octokit/types/dist-types/Route.d.ts | 4 + .../@octokit/types/dist-types/Signal.d.ts | 6 + .../types/dist-types/StrategyInterface.d.ts | 4 + .../@octokit/types/dist-types/Url.d.ts | 4 + .../@octokit/types/dist-types/VERSION.d.ts | 1 + .../types/dist-types/generated/Endpoints.d.ts | 3571 ++ .../@octokit/types/dist-types/index.d.ts | 21 + node_modules/@octokit/types/dist-web/index.js | 4 + .../@octokit/types/dist-web/index.js.map | 1 + node_modules/@octokit/types/package.json | 54 + node_modules/@types/node-fetch/LICENSE | 21 + node_modules/@types/node-fetch/README.md | 15 + node_modules/@types/node-fetch/externals.d.ts | 32 + node_modules/@types/node-fetch/index.d.ts | 239 + .../node_modules/@types/node/LICENSE | 21 + .../node_modules/@types/node/README.md | 15 + .../node_modules/@types/node/assert.d.ts | 1040 + .../@types/node/assert/strict.d.ts | 8 + .../node_modules/@types/node/async_hooks.d.ts | 541 + .../@types/node/buffer.buffer.d.ts | 385 + .../node_modules/@types/node/buffer.d.ts | 1933 + .../@types/node/child_process.d.ts | 1549 + .../node_modules/@types/node/cluster.d.ts | 579 + .../@types/node/compatibility/disposable.d.ts | 16 + .../@types/node/compatibility/index.d.ts | 9 + .../@types/node/compatibility/indexable.d.ts | 23 + .../@types/node/compatibility/iterators.d.ts | 21 + .../node_modules/@types/node/console.d.ts | 452 + .../node_modules/@types/node/constants.d.ts | 19 + .../node_modules/@types/node/crypto.d.ts | 4475 ++ .../node_modules/@types/node/dgram.d.ts | 596 + .../@types/node/diagnostics_channel.d.ts | 554 + .../node_modules/@types/node/dns.d.ts | 865 + .../@types/node/dns/promises.d.ts | 476 + .../node_modules/@types/node/dom-events.d.ts | 124 + .../node_modules/@types/node/domain.d.ts | 170 + .../node_modules/@types/node/events.d.ts | 931 + .../node_modules/@types/node/fs.d.ts | 4396 ++ .../node_modules/@types/node/fs/promises.d.ts | 1275 + .../node_modules/@types/node/globals.d.ts | 566 + .../@types/node/globals.typedarray.d.ts | 21 + .../node_modules/@types/node/http.d.ts | 1958 + .../node_modules/@types/node/http2.d.ts | 2558 + .../node_modules/@types/node/https.d.ts | 543 + .../node_modules/@types/node/index.d.ts | 92 + .../node_modules/@types/node/inspector.d.ts | 3966 ++ .../node_modules/@types/node/module.d.ts | 402 + .../node_modules/@types/node/net.d.ts | 1001 + .../node_modules/@types/node/os.d.ts | 495 + .../node_modules/@types/node/package.json | 220 + .../node_modules/@types/node/path.d.ts | 200 + .../node_modules/@types/node/perf_hooks.d.ts | 965 + .../node_modules/@types/node/process.d.ts | 1963 + .../node_modules/@types/node/punycode.d.ts | 117 + .../node_modules/@types/node/querystring.d.ts | 153 + .../node_modules/@types/node/readline.d.ts | 589 + .../@types/node/readline/promises.d.ts | 162 + .../node_modules/@types/node/repl.d.ts | 430 + .../node_modules/@types/node/sea.d.ts | 153 + .../node_modules/@types/node/sqlite.d.ts | 213 + .../node_modules/@types/node/stream.d.ts | 1726 + .../@types/node/stream/consumers.d.ts | 12 + .../@types/node/stream/promises.d.ts | 90 + .../node_modules/@types/node/stream/web.d.ts | 609 + .../@types/node/string_decoder.d.ts | 67 + .../node_modules/@types/node/test.d.ts | 2248 + .../node_modules/@types/node/timers.d.ts | 240 + .../@types/node/timers/promises.d.ts | 97 + .../node_modules/@types/node/tls.d.ts | 1226 + .../@types/node/trace_events.d.ts | 197 + .../@types/node/ts5.6/buffer.buffer.d.ts | 385 + .../@types/node/ts5.6/globals.typedarray.d.ts | 19 + .../node_modules/@types/node/ts5.6/index.d.ts | 92 + .../node_modules/@types/node/tty.d.ts | 208 + .../node_modules/@types/node/url.d.ts | 972 + .../node_modules/@types/node/util.d.ts | 2371 + .../node_modules/@types/node/v8.d.ts | 808 + .../node_modules/@types/node/vm.d.ts | 976 + .../node_modules/@types/node/wasi.d.ts | 181 + .../@types/node/worker_threads.d.ts | 745 + .../node_modules/@types/node/zlib.d.ts | 539 + .../node_modules/undici-types/LICENSE | 21 + .../node_modules/undici-types/README.md | 6 + .../node_modules/undici-types/agent.d.ts | 31 + .../node_modules/undici-types/api.d.ts | 43 + .../undici-types/balanced-pool.d.ts | 29 + .../node_modules/undici-types/cache.d.ts | 36 + .../node_modules/undici-types/client.d.ts | 108 + .../node_modules/undici-types/connector.d.ts | 34 + .../undici-types/content-type.d.ts | 21 + .../node_modules/undici-types/cookies.d.ts | 28 + .../undici-types/diagnostics-channel.d.ts | 66 + .../node_modules/undici-types/dispatcher.d.ts | 255 + .../undici-types/env-http-proxy-agent.d.ts | 21 + .../node_modules/undici-types/errors.d.ts | 149 + .../undici-types/eventsource.d.ts | 61 + .../node_modules/undici-types/fetch.d.ts | 209 + .../node_modules/undici-types/file.d.ts | 39 + .../node_modules/undici-types/filereader.d.ts | 54 + .../node_modules/undici-types/formdata.d.ts | 108 + .../undici-types/global-dispatcher.d.ts | 9 + .../undici-types/global-origin.d.ts | 7 + .../node_modules/undici-types/handlers.d.ts | 15 + .../node_modules/undici-types/header.d.ts | 4 + .../node_modules/undici-types/index.d.ts | 71 + .../undici-types/interceptors.d.ts | 17 + .../node_modules/undici-types/mock-agent.d.ts | 50 + .../undici-types/mock-client.d.ts | 25 + .../undici-types/mock-errors.d.ts | 12 + .../undici-types/mock-interceptor.d.ts | 93 + .../node_modules/undici-types/mock-pool.d.ts | 25 + .../node_modules/undici-types/package.json | 55 + .../node_modules/undici-types/patch.d.ts | 33 + .../node_modules/undici-types/pool-stats.d.ts | 19 + .../node_modules/undici-types/pool.d.ts | 39 + .../undici-types/proxy-agent.d.ts | 28 + .../node_modules/undici-types/readable.d.ts | 60 + .../undici-types/retry-agent.d.ts | 8 + .../undici-types/retry-handler.d.ts | 116 + .../node_modules/undici-types/util.d.ts | 18 + .../node_modules/undici-types/webidl.d.ts | 222 + .../node_modules/undici-types/websocket.d.ts | 150 + node_modules/@types/node-fetch/package.json | 84 + node_modules/@types/node/LICENSE | 21 + node_modules/@types/node/README.md | 15 + node_modules/@types/node/assert.d.ts | 985 + node_modules/@types/node/assert/strict.d.ts | 8 + node_modules/@types/node/async_hooks.d.ts | 522 + node_modules/@types/node/buffer.buffer.d.ts | 385 + node_modules/@types/node/buffer.d.ts | 1903 + node_modules/@types/node/child_process.d.ts | 1549 + node_modules/@types/node/cluster.d.ts | 578 + .../@types/node/compatibility/disposable.d.ts | 14 + .../@types/node/compatibility/index.d.ts | 9 + .../@types/node/compatibility/indexable.d.ts | 20 + .../@types/node/compatibility/iterators.d.ts | 20 + node_modules/@types/node/console.d.ts | 452 + node_modules/@types/node/constants.d.ts | 19 + node_modules/@types/node/crypto.d.ts | 4453 ++ node_modules/@types/node/dgram.d.ts | 596 + .../@types/node/diagnostics_channel.d.ts | 546 + node_modules/@types/node/dns.d.ts | 853 + node_modules/@types/node/dns/promises.d.ts | 475 + node_modules/@types/node/dom-events.d.ts | 124 + node_modules/@types/node/domain.d.ts | 170 + node_modules/@types/node/events.d.ts | 819 + node_modules/@types/node/fs.d.ts | 4237 ++ node_modules/@types/node/fs/promises.d.ts | 1205 + node_modules/@types/node/globals.d.ts | 442 + .../@types/node/globals.typedarray.d.ts | 21 + node_modules/@types/node/http.d.ts | 1839 + node_modules/@types/node/http2.d.ts | 2517 + node_modules/@types/node/https.d.ts | 544 + node_modules/@types/node/index.d.ts | 90 + node_modules/@types/node/inspector.d.ts | 2775 + node_modules/@types/node/module.d.ts | 291 + node_modules/@types/node/net.d.ts | 924 + node_modules/@types/node/os.d.ts | 473 + node_modules/@types/node/package.json | 225 + node_modules/@types/node/path.d.ts | 191 + node_modules/@types/node/perf_hooks.d.ts | 753 + node_modules/@types/node/process.d.ts | 1553 + node_modules/@types/node/punycode.d.ts | 117 + node_modules/@types/node/querystring.d.ts | 141 + node_modules/@types/node/readline.d.ts | 715 + .../@types/node/readline/promises.d.ts | 154 + node_modules/@types/node/repl.d.ts | 430 + node_modules/@types/node/stream.d.ts | 1731 + .../@types/node/stream/consumers.d.ts | 12 + node_modules/@types/node/stream/promises.d.ts | 90 + node_modules/@types/node/stream/web.d.ts | 523 + node_modules/@types/node/string_decoder.d.ts | 67 + node_modules/@types/node/test.d.ts | 1245 + node_modules/@types/node/timers.d.ts | 126 + node_modules/@types/node/timers/promises.d.ts | 93 + node_modules/@types/node/tls.d.ts | 1203 + node_modules/@types/node/trace_events.d.ts | 171 + .../@types/node/ts5.6/buffer.buffer.d.ts | 385 + .../@types/node/ts5.6/globals.typedarray.d.ts | 19 + node_modules/@types/node/ts5.6/index.d.ts | 90 + node_modules/@types/node/tty.d.ts | 206 + node_modules/@types/node/url.d.ts | 957 + node_modules/@types/node/util.d.ts | 2083 + node_modules/@types/node/v8.d.ts | 753 + node_modules/@types/node/vm.d.ts | 666 + node_modules/@types/node/wasi.d.ts | 160 + node_modules/@types/node/worker_threads.d.ts | 695 + node_modules/@types/node/zlib.d.ts | 517 + node_modules/abort-controller/LICENSE | 21 + node_modules/abort-controller/README.md | 98 + node_modules/abort-controller/browser.js | 13 + node_modules/abort-controller/browser.mjs | 11 + .../dist/abort-controller.d.ts | 43 + .../abort-controller/dist/abort-controller.js | 127 + .../dist/abort-controller.js.map | 1 + .../dist/abort-controller.mjs | 118 + .../dist/abort-controller.mjs.map | 1 + .../dist/abort-controller.umd.js | 5 + .../dist/abort-controller.umd.js.map | 1 + node_modules/abort-controller/package.json | 97 + node_modules/abort-controller/polyfill.js | 21 + node_modules/abort-controller/polyfill.mjs | 19 + node_modules/agentkeepalive/History.md | 268 + node_modules/agentkeepalive/LICENSE | 23 + node_modules/agentkeepalive/README.md | 256 + node_modules/agentkeepalive/browser.js | 5 + node_modules/agentkeepalive/index.d.ts | 65 + node_modules/agentkeepalive/index.js | 5 + node_modules/agentkeepalive/lib/agent.js | 402 + node_modules/agentkeepalive/lib/constants.js | 14 + .../agentkeepalive/lib/https_agent.js | 51 + node_modules/agentkeepalive/package.json | 56 + node_modules/asynckit/LICENSE | 21 + node_modules/asynckit/README.md | 233 + node_modules/asynckit/bench.js | 76 + node_modules/asynckit/index.js | 6 + node_modules/asynckit/lib/abort.js | 29 + node_modules/asynckit/lib/async.js | 34 + node_modules/asynckit/lib/defer.js | 26 + node_modules/asynckit/lib/iterate.js | 75 + .../asynckit/lib/readable_asynckit.js | 91 + .../asynckit/lib/readable_parallel.js | 25 + node_modules/asynckit/lib/readable_serial.js | 25 + .../asynckit/lib/readable_serial_ordered.js | 29 + node_modules/asynckit/lib/state.js | 37 + node_modules/asynckit/lib/streamify.js | 141 + node_modules/asynckit/lib/terminator.js | 29 + node_modules/asynckit/package.json | 63 + node_modules/asynckit/parallel.js | 43 + node_modules/asynckit/serial.js | 17 + node_modules/asynckit/serialOrdered.js | 75 + node_modules/asynckit/stream.js | 21 + node_modules/before-after-hook/LICENSE | 201 + node_modules/before-after-hook/README.md | 653 + node_modules/before-after-hook/index.d.ts | 186 + node_modules/before-after-hook/index.js | 61 + node_modules/before-after-hook/lib/add.js | 46 + .../before-after-hook/lib/register.js | 27 + node_modules/before-after-hook/lib/remove.js | 19 + node_modules/before-after-hook/package.json | 71 + node_modules/combined-stream/License | 19 + node_modules/combined-stream/Readme.md | 138 + .../combined-stream/lib/combined_stream.js | 208 + node_modules/combined-stream/package.json | 25 + node_modules/combined-stream/yarn.lock | 17 + node_modules/delayed-stream/.npmignore | 1 + node_modules/delayed-stream/License | 19 + node_modules/delayed-stream/Makefile | 7 + node_modules/delayed-stream/Readme.md | 141 + .../delayed-stream/lib/delayed_stream.js | 107 + node_modules/delayed-stream/package.json | 27 + node_modules/deprecation/LICENSE | 15 + node_modules/deprecation/README.md | 77 + node_modules/deprecation/dist-node/index.js | 20 + node_modules/deprecation/dist-src/index.js | 14 + .../deprecation/dist-types/index.d.ts | 3 + node_modules/deprecation/dist-web/index.js | 16 + node_modules/deprecation/package.json | 34 + node_modules/event-target-shim/LICENSE | 22 + node_modules/event-target-shim/README.md | 293 + .../dist/event-target-shim.js | 871 + .../dist/event-target-shim.js.map | 1 + .../dist/event-target-shim.mjs | 862 + .../dist/event-target-shim.mjs.map | 1 + .../dist/event-target-shim.umd.js | 6 + .../dist/event-target-shim.umd.js.map | 1 + node_modules/event-target-shim/index.d.ts | 399 + node_modules/event-target-shim/package.json | 82 + .../form-data-encoder/@type/FileLike.d.ts | 23 + .../@type/FormDataEncoder.d.ts | 160 + .../form-data-encoder/@type/FormDataLike.d.ts | 40 + .../form-data-encoder/@type/index.d.ts | 5 + .../@type/util/createBoundary.d.ts | 13 + .../@type/util/escapeName.d.ts | 11 + .../@type/util/isFileLike.d.ts | 28 + .../@type/util/isFormData.d.ts | 15 + .../@type/util/isFunction.d.ts | 7 + .../@type/util/isPlainObject.d.ts | 2 + .../@type/util/normalizeValue.d.ts | 11 + .../form-data-encoder/lib/cjs/FileLike.js | 2 + .../lib/cjs/FormDataEncoder.js | 126 + .../form-data-encoder/lib/cjs/FormDataLike.js | 2 + .../form-data-encoder/lib/cjs/index.js | 17 + .../form-data-encoder/lib/cjs/package.json | 3 + .../lib/cjs/util/createBoundary.js | 12 + .../lib/cjs/util/escapeName.js | 7 + .../lib/cjs/util/isFileLike.js | 16 + .../lib/cjs/util/isFormData.js | 16 + .../lib/cjs/util/isFunction.js | 4 + .../lib/cjs/util/isPlainObject.js | 15 + .../lib/cjs/util/normalizeValue.js | 11 + .../form-data-encoder/lib/esm/FileLike.js | 1 + .../lib/esm/FormDataEncoder.js | 119 + .../form-data-encoder/lib/esm/FormDataLike.js | 1 + .../form-data-encoder/lib/esm/index.js | 5 + .../form-data-encoder/lib/esm/package.json | 3 + .../lib/esm/util/createBoundary.js | 10 + .../lib/esm/util/escapeName.js | 5 + .../lib/esm/util/isFileLike.js | 9 + .../lib/esm/util/isFormData.js | 9 + .../lib/esm/util/isFunction.js | 2 + .../lib/esm/util/isPlainObject.js | 13 + .../lib/esm/util/normalizeValue.js | 9 + node_modules/form-data-encoder/license | 21 + node_modules/form-data-encoder/package.json | 63 + node_modules/form-data-encoder/readme.md | 366 + node_modules/form-data/License | 19 + node_modules/form-data/Readme.md | 358 + node_modules/form-data/index.d.ts | 62 + node_modules/form-data/lib/browser.js | 2 + node_modules/form-data/lib/form_data.js | 501 + node_modules/form-data/lib/populate.js | 10 + node_modules/form-data/package.json | 71 + node_modules/formdata-node/@type/Blob.d.ts | 66 + .../formdata-node/@type/BlobPart.d.ts | 2 + node_modules/formdata-node/@type/File.d.ts | 53 + .../formdata-node/@type/FormData.d.ts | 105 + .../formdata-node/@type/blobHelpers.d.ts | 9 + node_modules/formdata-node/@type/browser.d.ts | 10 + .../@type/deprecateConstructorEntries.d.ts | 1 + .../formdata-node/@type/fileFromPath.d.ts | 55 + node_modules/formdata-node/@type/index.d.ts | 3 + node_modules/formdata-node/@type/isBlob.d.ts | 2 + node_modules/formdata-node/@type/isFile.d.ts | 7 + .../formdata-node/@type/isFunction.d.ts | 1 + .../formdata-node/@type/isPlainObject.d.ts | 2 + node_modules/formdata-node/lib/cjs/Blob.js | 122 + .../formdata-node/lib/cjs/BlobPart.js | 2 + node_modules/formdata-node/lib/cjs/File.js | 52 + .../formdata-node/lib/cjs/FormData.js | 148 + .../formdata-node/lib/cjs/blobHelpers.js | 80 + node_modules/formdata-node/lib/cjs/browser.js | 13 + .../lib/cjs/deprecateConstructorEntries.js | 6 + .../formdata-node/lib/cjs/fileFromPath.js | 97 + node_modules/formdata-node/lib/cjs/index.js | 15 + node_modules/formdata-node/lib/cjs/isBlob.js | 6 + node_modules/formdata-node/lib/cjs/isFile.js | 6 + .../formdata-node/lib/cjs/isFunction.js | 5 + .../formdata-node/lib/cjs/isPlainObject.js | 15 + .../formdata-node/lib/cjs/package.json | 3 + node_modules/formdata-node/lib/esm/Blob.js | 118 + .../formdata-node/lib/esm/BlobPart.js | 1 + node_modules/formdata-node/lib/esm/File.js | 48 + .../formdata-node/lib/esm/FormData.js | 144 + .../formdata-node/lib/esm/blobHelpers.js | 75 + node_modules/formdata-node/lib/esm/browser.js | 10 + .../lib/esm/deprecateConstructorEntries.js | 3 + .../formdata-node/lib/esm/fileFromPath.js | 79 + node_modules/formdata-node/lib/esm/index.js | 3 + node_modules/formdata-node/lib/esm/isBlob.js | 2 + node_modules/formdata-node/lib/esm/isFile.js | 2 + .../formdata-node/lib/esm/isFunction.js | 1 + .../formdata-node/lib/esm/isPlainObject.js | 13 + .../formdata-node/lib/esm/package.json | 3 + .../formdata-node/lib/node-domexception.d.ts | 5 + node_modules/formdata-node/license | 21 + node_modules/formdata-node/package.json | 94 + node_modules/formdata-node/readme.md | 444 + node_modules/humanize-ms/History.md | 25 + node_modules/humanize-ms/LICENSE | 17 + node_modules/humanize-ms/README.md | 40 + node_modules/humanize-ms/index.js | 24 + node_modules/humanize-ms/package.json | 37 + node_modules/is-plain-object/LICENSE | 21 + node_modules/is-plain-object/README.md | 125 + .../is-plain-object/dist/is-plain-object.js | 38 + .../is-plain-object/dist/is-plain-object.mjs | 34 + .../is-plain-object/is-plain-object.d.ts | 1 + node_modules/is-plain-object/package.json | 85 + node_modules/mime-db/HISTORY.md | 507 + node_modules/mime-db/LICENSE | 23 + node_modules/mime-db/README.md | 100 + node_modules/mime-db/db.json | 8519 +++ node_modules/mime-db/index.js | 12 + node_modules/mime-db/package.json | 60 + node_modules/mime-types/HISTORY.md | 397 + node_modules/mime-types/LICENSE | 23 + node_modules/mime-types/README.md | 113 + node_modules/mime-types/index.js | 188 + node_modules/mime-types/package.json | 44 + node_modules/ms/index.js | 162 + node_modules/ms/license.md | 21 + node_modules/ms/package.json | 38 + node_modules/ms/readme.md | 59 + .../.history/README_20210527203617.md | 2 + .../.history/README_20210527212714.md | 41 + .../.history/README_20210527213345.md | 36 + .../.history/README_20210527213411.md | 36 + .../.history/README_20210527213803.md | 36 + .../.history/README_20210527214323.md | 38 + .../.history/README_20210527214408.md | 38 + .../.history/index_20210527203842.js | 0 .../.history/index_20210527203947.js | 8 + .../.history/index_20210527204259.js | 9 + .../.history/index_20210527204418.js | 9 + .../.history/index_20210527204756.js | 11 + .../.history/index_20210527204833.js | 11 + .../.history/index_20210527211208.js | 15 + .../.history/index_20210527211248.js | 15 + .../.history/index_20210527212722.js | 23 + .../.history/index_20210527212731.js | 23 + .../.history/index_20210527212746.js | 15 + .../.history/index_20210527212900.js | 16 + .../.history/index_20210527213022.js | 16 + .../.history/index_20210527213822.js | 16 + .../.history/index_20210527213843.js | 17 + .../.history/index_20210527213852.js | 17 + .../.history/index_20210527213910.js | 16 + .../.history/index_20210527214034.js | 16 + .../.history/index_20210527214643.js | 41 + .../.history/index_20210527214654.js | 41 + .../.history/index_20210527214700.js | 16 + .../.history/package_20210527203733.json | 19 + .../.history/package_20210527203825.json | 16 + .../.history/package_20210527204621.json | 19 + .../.history/package_20210527204913.json | 25 + .../.history/package_20210527204925.json | 25 + .../.history/package_20210527205145.json | 29 + .../.history/package_20210527205156.json | 29 + .../.history/test_20210527205603.js | 0 .../.history/test_20210527205957.js | 3 + .../.history/test_20210527210021.js | 3 + node_modules/node-domexception/LICENSE | 21 + node_modules/node-domexception/README.md | 46 + node_modules/node-domexception/index.js | 16 + node_modules/node-domexception/package.json | 29 + node_modules/node-fetch/LICENSE.md | 22 + node_modules/node-fetch/README.md | 634 + node_modules/node-fetch/browser.js | 25 + node_modules/node-fetch/lib/index.es.js | 1777 + node_modules/node-fetch/lib/index.js | 1787 + node_modules/node-fetch/lib/index.mjs | 1775 + node_modules/node-fetch/package.json | 89 + node_modules/once/LICENSE | 15 + node_modules/once/README.md | 79 + node_modules/once/once.js | 42 + node_modules/once/package.json | 33 + node_modules/openai/CHANGELOG.md | 2020 + node_modules/openai/LICENSE | 201 + node_modules/openai/README.md | 673 + node_modules/openai/_shims/MultipartBody.d.ts | 9 + .../openai/_shims/MultipartBody.d.ts.map | 1 + node_modules/openai/_shims/MultipartBody.js | 16 + .../openai/_shims/MultipartBody.js.map | 1 + node_modules/openai/_shims/MultipartBody.mjs | 12 + .../openai/_shims/MultipartBody.mjs.map | 1 + node_modules/openai/_shims/README.md | 46 + .../openai/_shims/auto/runtime-bun.d.ts | 5 + .../openai/_shims/auto/runtime-bun.d.ts.map | 1 + .../openai/_shims/auto/runtime-bun.js | 21 + .../openai/_shims/auto/runtime-bun.js.map | 1 + .../openai/_shims/auto/runtime-bun.mjs | 2 + .../openai/_shims/auto/runtime-bun.mjs.map | 1 + .../openai/_shims/auto/runtime-node.d.ts | 5 + .../openai/_shims/auto/runtime-node.d.ts.map | 1 + .../openai/_shims/auto/runtime-node.js | 21 + .../openai/_shims/auto/runtime-node.js.map | 1 + .../openai/_shims/auto/runtime-node.mjs | 2 + .../openai/_shims/auto/runtime-node.mjs.map | 1 + node_modules/openai/_shims/auto/runtime.d.ts | 5 + .../openai/_shims/auto/runtime.d.ts.map | 1 + node_modules/openai/_shims/auto/runtime.js | 21 + .../openai/_shims/auto/runtime.js.map | 1 + node_modules/openai/_shims/auto/runtime.mjs | 2 + .../openai/_shims/auto/runtime.mjs.map | 1 + .../openai/_shims/auto/types-node.d.ts | 5 + .../openai/_shims/auto/types-node.d.ts.map | 1 + node_modules/openai/_shims/auto/types-node.js | 21 + .../openai/_shims/auto/types-node.js.map | 1 + .../openai/_shims/auto/types-node.mjs | 2 + .../openai/_shims/auto/types-node.mjs.map | 1 + node_modules/openai/_shims/auto/types.d.ts | 101 + node_modules/openai/_shims/auto/types.js | 3 + node_modules/openai/_shims/auto/types.mjs | 3 + node_modules/openai/_shims/bun-runtime.d.ts | 6 + .../openai/_shims/bun-runtime.d.ts.map | 1 + node_modules/openai/_shims/bun-runtime.js | 14 + node_modules/openai/_shims/bun-runtime.js.map | 1 + node_modules/openai/_shims/bun-runtime.mjs | 10 + .../openai/_shims/bun-runtime.mjs.map | 1 + node_modules/openai/_shims/index.d.ts | 81 + node_modules/openai/_shims/index.js | 13 + node_modules/openai/_shims/index.mjs | 7 + node_modules/openai/_shims/manual-types.d.ts | 12 + node_modules/openai/_shims/manual-types.js | 3 + node_modules/openai/_shims/manual-types.mjs | 3 + node_modules/openai/_shims/node-runtime.d.ts | 3 + .../openai/_shims/node-runtime.d.ts.map | 1 + node_modules/openai/_shims/node-runtime.js | 89 + .../openai/_shims/node-runtime.js.map | 1 + node_modules/openai/_shims/node-runtime.mjs | 56 + .../openai/_shims/node-runtime.mjs.map | 1 + node_modules/openai/_shims/node-types.d.ts | 42 + node_modules/openai/_shims/node-types.js | 3 + node_modules/openai/_shims/node-types.mjs | 3 + node_modules/openai/_shims/registry.d.ts | 37 + node_modules/openai/_shims/registry.d.ts.map | 1 + node_modules/openai/_shims/registry.js | 41 + node_modules/openai/_shims/registry.js.map | 1 + node_modules/openai/_shims/registry.mjs | 37 + node_modules/openai/_shims/registry.mjs.map | 1 + node_modules/openai/_shims/web-runtime.d.ts | 5 + .../openai/_shims/web-runtime.d.ts.map | 1 + node_modules/openai/_shims/web-runtime.js | 78 + node_modules/openai/_shims/web-runtime.js.map | 1 + node_modules/openai/_shims/web-runtime.mjs | 71 + .../openai/_shims/web-runtime.mjs.map | 1 + node_modules/openai/_shims/web-types.d.ts | 83 + node_modules/openai/_shims/web-types.js | 3 + node_modules/openai/_shims/web-types.mjs | 3 + .../_vendor/partial-json-parser/parser.d.ts | 7 + .../partial-json-parser/parser.d.ts.map | 1 + .../_vendor/partial-json-parser/parser.js | 246 + .../_vendor/partial-json-parser/parser.js.map | 1 + .../_vendor/partial-json-parser/parser.mjs | 241 + .../partial-json-parser/parser.mjs.map | 1 + .../_vendor/zod-to-json-schema/Options.d.ts | 32 + .../zod-to-json-schema/Options.d.ts.map | 1 + .../_vendor/zod-to-json-schema/Options.js | 42 + .../_vendor/zod-to-json-schema/Options.js.map | 1 + .../_vendor/zod-to-json-schema/Options.mjs | 38 + .../zod-to-json-schema/Options.mjs.map | 1 + .../_vendor/zod-to-json-schema/Refs.d.ts | 21 + .../_vendor/zod-to-json-schema/Refs.d.ts.map | 1 + .../openai/_vendor/zod-to-json-schema/Refs.js | 28 + .../_vendor/zod-to-json-schema/Refs.js.map | 1 + .../_vendor/zod-to-json-schema/Refs.mjs | 24 + .../_vendor/zod-to-json-schema/Refs.mjs.map | 1 + .../zod-to-json-schema/errorMessages.d.ts | 12 + .../zod-to-json-schema/errorMessages.d.ts.map | 1 + .../zod-to-json-schema/errorMessages.js | 20 + .../zod-to-json-schema/errorMessages.js.map | 1 + .../zod-to-json-schema/errorMessages.mjs | 15 + .../zod-to-json-schema/errorMessages.mjs.map | 1 + .../_vendor/zod-to-json-schema/index.d.ts | 38 + .../_vendor/zod-to-json-schema/index.d.ts.map | 1 + .../_vendor/zod-to-json-schema/index.js | 54 + .../_vendor/zod-to-json-schema/index.js.map | 1 + .../_vendor/zod-to-json-schema/index.mjs | 38 + .../_vendor/zod-to-json-schema/index.mjs.map | 1 + .../_vendor/zod-to-json-schema/parseDef.d.ts | 38 + .../zod-to-json-schema/parseDef.d.ts.map | 1 + .../_vendor/zod-to-json-schema/parseDef.js | 187 + .../zod-to-json-schema/parseDef.js.map | 1 + .../_vendor/zod-to-json-schema/parseDef.mjs | 183 + .../zod-to-json-schema/parseDef.mjs.map | 1 + .../zod-to-json-schema/parsers/any.d.ts | 3 + .../zod-to-json-schema/parsers/any.d.ts.map | 1 + .../_vendor/zod-to-json-schema/parsers/any.js | 8 + .../zod-to-json-schema/parsers/any.js.map | 1 + .../zod-to-json-schema/parsers/any.mjs | 4 + .../zod-to-json-schema/parsers/any.mjs.map | 1 + .../zod-to-json-schema/parsers/array.d.ts | 13 + .../zod-to-json-schema/parsers/array.d.ts.map | 1 + .../zod-to-json-schema/parsers/array.js | 30 + .../zod-to-json-schema/parsers/array.js.map | 1 + .../zod-to-json-schema/parsers/array.mjs | 26 + .../zod-to-json-schema/parsers/array.mjs.map | 1 + .../zod-to-json-schema/parsers/bigint.d.ts | 15 + .../parsers/bigint.d.ts.map | 1 + .../zod-to-json-schema/parsers/bigint.js | 54 + .../zod-to-json-schema/parsers/bigint.js.map | 1 + .../zod-to-json-schema/parsers/bigint.mjs | 50 + .../zod-to-json-schema/parsers/bigint.mjs.map | 1 + .../zod-to-json-schema/parsers/boolean.d.ts | 5 + .../parsers/boolean.d.ts.map | 1 + .../zod-to-json-schema/parsers/boolean.js | 10 + .../zod-to-json-schema/parsers/boolean.js.map | 1 + .../zod-to-json-schema/parsers/boolean.mjs | 6 + .../parsers/boolean.mjs.map | 1 + .../zod-to-json-schema/parsers/branded.d.ts | 4 + .../parsers/branded.d.ts.map | 1 + .../zod-to-json-schema/parsers/branded.js | 9 + .../zod-to-json-schema/parsers/branded.js.map | 1 + .../zod-to-json-schema/parsers/branded.mjs | 5 + .../parsers/branded.mjs.map | 1 + .../zod-to-json-schema/parsers/catch.d.ts | 4 + .../zod-to-json-schema/parsers/catch.d.ts.map | 1 + .../zod-to-json-schema/parsers/catch.js | 9 + .../zod-to-json-schema/parsers/catch.js.map | 1 + .../zod-to-json-schema/parsers/catch.mjs | 5 + .../zod-to-json-schema/parsers/catch.mjs.map | 1 + .../zod-to-json-schema/parsers/date.d.ts | 16 + .../zod-to-json-schema/parsers/date.d.ts.map | 1 + .../zod-to-json-schema/parsers/date.js | 51 + .../zod-to-json-schema/parsers/date.js.map | 1 + .../zod-to-json-schema/parsers/date.mjs | 47 + .../zod-to-json-schema/parsers/date.mjs.map | 1 + .../zod-to-json-schema/parsers/default.d.ts | 7 + .../parsers/default.d.ts.map | 1 + .../zod-to-json-schema/parsers/default.js | 12 + .../zod-to-json-schema/parsers/default.js.map | 1 + .../zod-to-json-schema/parsers/default.mjs | 8 + .../parsers/default.mjs.map | 1 + .../zod-to-json-schema/parsers/effects.d.ts | 5 + .../parsers/effects.d.ts.map | 1 + .../zod-to-json-schema/parsers/effects.js | 9 + .../zod-to-json-schema/parsers/effects.js.map | 1 + .../zod-to-json-schema/parsers/effects.mjs | 5 + .../parsers/effects.mjs.map | 1 + .../zod-to-json-schema/parsers/enum.d.ts | 7 + .../zod-to-json-schema/parsers/enum.d.ts.map | 1 + .../zod-to-json-schema/parsers/enum.js | 11 + .../zod-to-json-schema/parsers/enum.js.map | 1 + .../zod-to-json-schema/parsers/enum.mjs | 7 + .../zod-to-json-schema/parsers/enum.mjs.map | 1 + .../parsers/intersection.d.ts | 9 + .../parsers/intersection.d.ts.map | 1 + .../parsers/intersection.js | 54 + .../parsers/intersection.js.map | 1 + .../parsers/intersection.mjs | 50 + .../parsers/intersection.mjs.map | 1 + .../zod-to-json-schema/parsers/literal.d.ts | 10 + .../parsers/literal.d.ts.map | 1 + .../zod-to-json-schema/parsers/literal.js | 26 + .../zod-to-json-schema/parsers/literal.js.map | 1 + .../zod-to-json-schema/parsers/literal.mjs | 22 + .../parsers/literal.mjs.map | 1 + .../zod-to-json-schema/parsers/map.d.ts | 16 + .../zod-to-json-schema/parsers/map.d.ts.map | 1 + .../_vendor/zod-to-json-schema/parsers/map.js | 30 + .../zod-to-json-schema/parsers/map.js.map | 1 + .../zod-to-json-schema/parsers/map.mjs | 26 + .../zod-to-json-schema/parsers/map.mjs.map | 1 + .../parsers/nativeEnum.d.ts | 7 + .../parsers/nativeEnum.d.ts.map | 1 + .../zod-to-json-schema/parsers/nativeEnum.js | 21 + .../parsers/nativeEnum.js.map | 1 + .../zod-to-json-schema/parsers/nativeEnum.mjs | 17 + .../parsers/nativeEnum.mjs.map | 1 + .../zod-to-json-schema/parsers/never.d.ts | 5 + .../zod-to-json-schema/parsers/never.d.ts.map | 1 + .../zod-to-json-schema/parsers/never.js | 10 + .../zod-to-json-schema/parsers/never.js.map | 1 + .../zod-to-json-schema/parsers/never.mjs | 6 + .../zod-to-json-schema/parsers/never.mjs.map | 1 + .../zod-to-json-schema/parsers/null.d.ts | 6 + .../zod-to-json-schema/parsers/null.d.ts.map | 1 + .../zod-to-json-schema/parsers/null.js | 15 + .../zod-to-json-schema/parsers/null.js.map | 1 + .../zod-to-json-schema/parsers/null.mjs | 11 + .../zod-to-json-schema/parsers/null.mjs.map | 1 + .../zod-to-json-schema/parsers/nullable.d.ts | 11 + .../parsers/nullable.d.ts.map | 1 + .../zod-to-json-schema/parsers/nullable.js | 35 + .../parsers/nullable.js.map | 1 + .../zod-to-json-schema/parsers/nullable.mjs | 31 + .../parsers/nullable.mjs.map | 1 + .../zod-to-json-schema/parsers/number.d.ts | 14 + .../parsers/number.d.ts.map | 1 + .../zod-to-json-schema/parsers/number.js | 57 + .../zod-to-json-schema/parsers/number.js.map | 1 + .../zod-to-json-schema/parsers/number.mjs | 53 + .../zod-to-json-schema/parsers/number.mjs.map | 1 + .../zod-to-json-schema/parsers/object.d.ts | 11 + .../parsers/object.d.ts.map | 1 + .../zod-to-json-schema/parsers/object.js | 51 + .../zod-to-json-schema/parsers/object.js.map | 1 + .../zod-to-json-schema/parsers/object.mjs | 47 + .../zod-to-json-schema/parsers/object.mjs.map | 1 + .../zod-to-json-schema/parsers/optional.d.ts | 5 + .../parsers/optional.d.ts.map | 1 + .../zod-to-json-schema/parsers/optional.js | 25 + .../parsers/optional.js.map | 1 + .../zod-to-json-schema/parsers/optional.mjs | 21 + .../parsers/optional.mjs.map | 1 + .../zod-to-json-schema/parsers/pipeline.d.ts | 6 + .../parsers/pipeline.d.ts.map | 1 + .../zod-to-json-schema/parsers/pipeline.js | 25 + .../parsers/pipeline.js.map | 1 + .../zod-to-json-schema/parsers/pipeline.mjs | 21 + .../parsers/pipeline.mjs.map | 1 + .../zod-to-json-schema/parsers/promise.d.ts | 5 + .../parsers/promise.d.ts.map | 1 + .../zod-to-json-schema/parsers/promise.js | 9 + .../zod-to-json-schema/parsers/promise.js.map | 1 + .../zod-to-json-schema/parsers/promise.mjs | 5 + .../parsers/promise.mjs.map | 1 + .../zod-to-json-schema/parsers/readonly.d.ts | 4 + .../parsers/readonly.d.ts.map | 1 + .../zod-to-json-schema/parsers/readonly.js | 9 + .../parsers/readonly.js.map | 1 + .../zod-to-json-schema/parsers/readonly.mjs | 5 + .../parsers/readonly.mjs.map | 1 + .../zod-to-json-schema/parsers/record.d.ts | 14 + .../parsers/record.d.ts.map | 1 + .../zod-to-json-schema/parsers/record.js | 50 + .../zod-to-json-schema/parsers/record.js.map | 1 + .../zod-to-json-schema/parsers/record.mjs | 46 + .../zod-to-json-schema/parsers/record.mjs.map | 1 + .../zod-to-json-schema/parsers/set.d.ts | 14 + .../zod-to-json-schema/parsers/set.d.ts.map | 1 + .../_vendor/zod-to-json-schema/parsers/set.js | 25 + .../zod-to-json-schema/parsers/set.js.map | 1 + .../zod-to-json-schema/parsers/set.mjs | 21 + .../zod-to-json-schema/parsers/set.mjs.map | 1 + .../zod-to-json-schema/parsers/string.d.ts | 70 + .../parsers/string.d.ts.map | 1 + .../zod-to-json-schema/parsers/string.js | 316 + .../zod-to-json-schema/parsers/string.js.map | 1 + .../zod-to-json-schema/parsers/string.mjs | 312 + .../zod-to-json-schema/parsers/string.mjs.map | 1 + .../zod-to-json-schema/parsers/tuple.d.ts | 14 + .../zod-to-json-schema/parsers/tuple.d.ts.map | 1 + .../zod-to-json-schema/parsers/tuple.js | 37 + .../zod-to-json-schema/parsers/tuple.js.map | 1 + .../zod-to-json-schema/parsers/tuple.mjs | 33 + .../zod-to-json-schema/parsers/tuple.mjs.map | 1 + .../zod-to-json-schema/parsers/undefined.d.ts | 5 + .../parsers/undefined.d.ts.map | 1 + .../zod-to-json-schema/parsers/undefined.js | 10 + .../parsers/undefined.js.map | 1 + .../zod-to-json-schema/parsers/undefined.mjs | 6 + .../parsers/undefined.mjs.map | 1 + .../zod-to-json-schema/parsers/union.d.ts | 24 + .../zod-to-json-schema/parsers/union.d.ts.map | 1 + .../zod-to-json-schema/parsers/union.js | 77 + .../zod-to-json-schema/parsers/union.js.map | 1 + .../zod-to-json-schema/parsers/union.mjs | 73 + .../zod-to-json-schema/parsers/union.mjs.map | 1 + .../zod-to-json-schema/parsers/unknown.d.ts | 3 + .../parsers/unknown.d.ts.map | 1 + .../zod-to-json-schema/parsers/unknown.js | 8 + .../zod-to-json-schema/parsers/unknown.js.map | 1 + .../zod-to-json-schema/parsers/unknown.mjs | 4 + .../parsers/unknown.mjs.map | 1 + .../_vendor/zod-to-json-schema/util.d.ts | 4 + .../_vendor/zod-to-json-schema/util.d.ts.map | 1 + .../openai/_vendor/zod-to-json-schema/util.js | 16 + .../_vendor/zod-to-json-schema/util.js.map | 1 + .../_vendor/zod-to-json-schema/util.mjs | 11 + .../_vendor/zod-to-json-schema/util.mjs.map | 1 + .../zod-to-json-schema/zodToJsonSchema.d.ts | 11 + .../zodToJsonSchema.d.ts.map | 1 + .../zod-to-json-schema/zodToJsonSchema.js | 82 + .../zod-to-json-schema/zodToJsonSchema.js.map | 1 + .../zod-to-json-schema/zodToJsonSchema.mjs | 79 + .../zodToJsonSchema.mjs.map | 1 + node_modules/openai/bin/cli | 49 + node_modules/openai/core.d.ts | 249 + node_modules/openai/core.d.ts.map | 1 + node_modules/openai/core.js | 921 + node_modules/openai/core.js.map | 1 + node_modules/openai/core.mjs | 889 + node_modules/openai/core.mjs.map | 1 + node_modules/openai/error.d.ts | 57 + node_modules/openai/error.d.ts.map | 1 + node_modules/openai/error.js | 130 + node_modules/openai/error.js.map | 1 + node_modules/openai/error.mjs | 112 + node_modules/openai/error.mjs.map | 1 + node_modules/openai/helpers/zod.d.ts | 57 + node_modules/openai/helpers/zod.d.ts.map | 1 + node_modules/openai/helpers/zod.js | 85 + node_modules/openai/helpers/zod.js.map | 1 + node_modules/openai/helpers/zod.mjs | 80 + node_modules/openai/helpers/zod.mjs.map | 1 + node_modules/openai/index.d.mts | 232 + node_modules/openai/index.d.ts | 232 + node_modules/openai/index.d.ts.map | 1 + node_modules/openai/index.js | 297 + node_modules/openai/index.js.map | 1 + node_modules/openai/index.mjs | 251 + node_modules/openai/index.mjs.map | 1 + .../openai/internal/decoders/line.d.ts | 21 + .../openai/internal/decoders/line.d.ts.map | 1 + node_modules/openai/internal/decoders/line.js | 88 + .../openai/internal/decoders/line.js.map | 1 + .../openai/internal/decoders/line.mjs | 84 + .../openai/internal/decoders/line.mjs.map | 1 + node_modules/openai/internal/qs/formats.d.ts | 6 + .../openai/internal/qs/formats.d.ts.map | 1 + node_modules/openai/internal/qs/formats.js | 11 + .../openai/internal/qs/formats.js.map | 1 + node_modules/openai/internal/qs/formats.mjs | 8 + .../openai/internal/qs/formats.mjs.map | 1 + node_modules/openai/internal/qs/index.d.ts | 10 + .../openai/internal/qs/index.d.ts.map | 1 + node_modules/openai/internal/qs/index.js | 14 + node_modules/openai/internal/qs/index.js.map | 1 + node_modules/openai/internal/qs/index.mjs | 10 + node_modules/openai/internal/qs/index.mjs.map | 1 + .../openai/internal/qs/stringify.d.ts | 3 + .../openai/internal/qs/stringify.d.ts.map | 1 + node_modules/openai/internal/qs/stringify.js | 280 + .../openai/internal/qs/stringify.js.map | 1 + node_modules/openai/internal/qs/stringify.mjs | 276 + .../openai/internal/qs/stringify.mjs.map | 1 + node_modules/openai/internal/qs/types.d.ts | 57 + .../openai/internal/qs/types.d.ts.map | 1 + node_modules/openai/internal/qs/types.js | 3 + node_modules/openai/internal/qs/types.js.map | 1 + node_modules/openai/internal/qs/types.mjs | 2 + node_modules/openai/internal/qs/types.mjs.map | 1 + node_modules/openai/internal/qs/utils.d.ts | 14 + .../openai/internal/qs/utils.d.ts.map | 1 + node_modules/openai/internal/qs/utils.js | 229 + node_modules/openai/internal/qs/utils.js.map | 1 + node_modules/openai/internal/qs/utils.mjs | 217 + node_modules/openai/internal/qs/utils.mjs.map | 1 + .../lib/AbstractChatCompletionRunner.d.ts | 61 + .../lib/AbstractChatCompletionRunner.d.ts.map | 1 + .../lib/AbstractChatCompletionRunner.js | 372 + .../lib/AbstractChatCompletionRunner.js.map | 1 + .../lib/AbstractChatCompletionRunner.mjs | 368 + .../lib/AbstractChatCompletionRunner.mjs.map | 1 + node_modules/openai/lib/AssistantStream.d.ts | 61 + .../openai/lib/AssistantStream.d.ts.map | 1 + node_modules/openai/lib/AssistantStream.js | 581 + .../openai/lib/AssistantStream.js.map | 1 + node_modules/openai/lib/AssistantStream.mjs | 554 + .../openai/lib/AssistantStream.mjs.map | 1 + .../openai/lib/ChatCompletionRunner.d.ts | 21 + .../openai/lib/ChatCompletionRunner.d.ts.map | 1 + .../openai/lib/ChatCompletionRunner.js | 34 + .../openai/lib/ChatCompletionRunner.js.map | 1 + .../openai/lib/ChatCompletionRunner.mjs | 30 + .../openai/lib/ChatCompletionRunner.mjs.map | 1 + .../openai/lib/ChatCompletionStream.d.ts | 208 + .../openai/lib/ChatCompletionStream.d.ts.map | 1 + .../openai/lib/ChatCompletionStream.js | 503 + .../openai/lib/ChatCompletionStream.js.map | 1 + .../openai/lib/ChatCompletionStream.mjs | 499 + .../openai/lib/ChatCompletionStream.mjs.map | 1 + .../lib/ChatCompletionStreamingRunner.d.ts | 24 + .../ChatCompletionStreamingRunner.d.ts.map | 1 + .../lib/ChatCompletionStreamingRunner.js | 34 + .../lib/ChatCompletionStreamingRunner.js.map | 1 + .../lib/ChatCompletionStreamingRunner.mjs | 30 + .../lib/ChatCompletionStreamingRunner.mjs.map | 1 + node_modules/openai/lib/EventStream.d.ts | 63 + node_modules/openai/lib/EventStream.d.ts.map | 1 + node_modules/openai/lib/EventStream.js | 200 + node_modules/openai/lib/EventStream.js.map | 1 + node_modules/openai/lib/EventStream.mjs | 196 + node_modules/openai/lib/EventStream.mjs.map | 1 + node_modules/openai/lib/RunnableFunction.d.ts | 97 + .../openai/lib/RunnableFunction.d.ts.map | 1 + node_modules/openai/lib/RunnableFunction.js | 35 + .../openai/lib/RunnableFunction.js.map | 1 + node_modules/openai/lib/RunnableFunction.mjs | 29 + .../openai/lib/RunnableFunction.mjs.map | 1 + node_modules/openai/lib/Util.d.ts | 5 + node_modules/openai/lib/Util.d.ts.map | 1 + node_modules/openai/lib/Util.js | 26 + node_modules/openai/lib/Util.js.map | 1 + node_modules/openai/lib/Util.mjs | 22 + node_modules/openai/lib/Util.mjs.map | 1 + .../openai/lib/chatCompletionUtils.d.ts | 6 + .../openai/lib/chatCompletionUtils.d.ts.map | 1 + .../openai/lib/chatCompletionUtils.js | 20 + .../openai/lib/chatCompletionUtils.js.map | 1 + .../openai/lib/chatCompletionUtils.mjs | 13 + .../openai/lib/chatCompletionUtils.mjs.map | 1 + node_modules/openai/lib/jsonschema.d.ts | 106 + node_modules/openai/lib/jsonschema.d.ts.map | 1 + node_modules/openai/lib/jsonschema.js | 11 + node_modules/openai/lib/jsonschema.js.map | 1 + node_modules/openai/lib/jsonschema.mjs | 10 + node_modules/openai/lib/jsonschema.mjs.map | 1 + node_modules/openai/lib/parser.d.ts | 37 + node_modules/openai/lib/parser.d.ts.map | 1 + node_modules/openai/lib/parser.js | 133 + node_modules/openai/lib/parser.js.map | 1 + node_modules/openai/lib/parser.mjs | 121 + node_modules/openai/lib/parser.mjs.map | 1 + node_modules/openai/package.json | 112 + node_modules/openai/pagination.d.ts | 37 + node_modules/openai/pagination.d.ts.map | 1 + node_modules/openai/pagination.js | 64 + node_modules/openai/pagination.js.map | 1 + node_modules/openai/pagination.mjs | 59 + node_modules/openai/pagination.mjs.map | 1 + node_modules/openai/resource.d.ts | 6 + node_modules/openai/resource.d.ts.map | 1 + node_modules/openai/resource.js | 11 + node_modules/openai/resource.js.map | 1 + node_modules/openai/resource.mjs | 7 + node_modules/openai/resource.mjs.map | 1 + .../openai/resources/audio/audio.d.ts | 25 + .../openai/resources/audio/audio.d.ts.map | 1 + node_modules/openai/resources/audio/audio.js | 47 + .../openai/resources/audio/audio.js.map | 1 + node_modules/openai/resources/audio/audio.mjs | 20 + .../openai/resources/audio/audio.mjs.map | 1 + .../openai/resources/audio/index.d.ts | 5 + .../openai/resources/audio/index.d.ts.map | 1 + node_modules/openai/resources/audio/index.js | 13 + .../openai/resources/audio/index.js.map | 1 + node_modules/openai/resources/audio/index.mjs | 6 + .../openai/resources/audio/index.mjs.map | 1 + .../openai/resources/audio/speech.d.ts | 42 + .../openai/resources/audio/speech.d.ts.map | 1 + node_modules/openai/resources/audio/speech.js | 15 + .../openai/resources/audio/speech.js.map | 1 + .../openai/resources/audio/speech.mjs | 11 + .../openai/resources/audio/speech.mjs.map | 1 + .../resources/audio/transcriptions.d.ts | 162 + .../resources/audio/transcriptions.d.ts.map | 1 + .../openai/resources/audio/transcriptions.js | 36 + .../resources/audio/transcriptions.js.map | 1 + .../openai/resources/audio/transcriptions.mjs | 9 + .../resources/audio/transcriptions.mjs.map | 1 + .../openai/resources/audio/translations.d.ts | 71 + .../resources/audio/translations.d.ts.map | 1 + .../openai/resources/audio/translations.js | 36 + .../resources/audio/translations.js.map | 1 + .../openai/resources/audio/translations.mjs | 9 + .../resources/audio/translations.mjs.map | 1 + node_modules/openai/resources/batches.d.ts | 186 + .../openai/resources/batches.d.ts.map | 1 + node_modules/openai/resources/batches.js | 41 + node_modules/openai/resources/batches.js.map | 1 + node_modules/openai/resources/batches.mjs | 36 + node_modules/openai/resources/batches.mjs.map | 1 + .../openai/resources/beta/assistants.d.ts | 1140 + .../openai/resources/beta/assistants.d.ts.map | 1 + .../openai/resources/beta/assistants.js | 63 + .../openai/resources/beta/assistants.js.map | 1 + .../openai/resources/beta/assistants.mjs | 58 + .../openai/resources/beta/assistants.mjs.map | 1 + node_modules/openai/resources/beta/beta.d.ts | 22 + .../openai/resources/beta/beta.d.ts.map | 1 + node_modules/openai/resources/beta/beta.js | 52 + .../openai/resources/beta/beta.js.map | 1 + node_modules/openai/resources/beta/beta.mjs | 25 + .../openai/resources/beta/beta.mjs.map | 1 + .../openai/resources/beta/chat/chat.d.ts | 9 + .../openai/resources/beta/chat/chat.d.ts.map | 1 + .../openai/resources/beta/chat/chat.js | 40 + .../openai/resources/beta/chat/chat.js.map | 1 + .../openai/resources/beta/chat/chat.mjs | 13 + .../openai/resources/beta/chat/chat.mjs.map | 1 + .../resources/beta/chat/completions.d.ts | 58 + .../resources/beta/chat/completions.d.ts.map | 1 + .../openai/resources/beta/chat/completions.js | 52 + .../resources/beta/chat/completions.js.map | 1 + .../resources/beta/chat/completions.mjs | 43 + .../resources/beta/chat/completions.mjs.map | 1 + .../openai/resources/beta/chat/index.d.ts | 3 + .../openai/resources/beta/chat/index.d.ts.map | 1 + .../openai/resources/beta/chat/index.js | 9 + .../openai/resources/beta/chat/index.js.map | 1 + .../openai/resources/beta/chat/index.mjs | 4 + .../openai/resources/beta/chat/index.mjs.map | 1 + node_modules/openai/resources/beta/index.d.ts | 6 + .../openai/resources/beta/index.d.ts.map | 1 + node_modules/openai/resources/beta/index.js | 17 + .../openai/resources/beta/index.js.map | 1 + node_modules/openai/resources/beta/index.mjs | 7 + .../openai/resources/beta/index.mjs.map | 1 + .../openai/resources/beta/threads/index.d.ts | 4 + .../resources/beta/threads/index.d.ts.map | 1 + .../openai/resources/beta/threads/index.js | 13 + .../resources/beta/threads/index.js.map | 1 + .../openai/resources/beta/threads/index.mjs | 5 + .../resources/beta/threads/index.mjs.map | 1 + .../resources/beta/threads/messages.d.ts | 558 + .../resources/beta/threads/messages.d.ts.map | 1 + .../openai/resources/beta/threads/messages.js | 63 + .../resources/beta/threads/messages.js.map | 1 + .../resources/beta/threads/messages.mjs | 58 + .../resources/beta/threads/messages.mjs.map | 1 + .../resources/beta/threads/runs/index.d.ts | 3 + .../beta/threads/runs/index.d.ts.map | 1 + .../resources/beta/threads/runs/index.js | 11 + .../resources/beta/threads/runs/index.js.map | 1 + .../resources/beta/threads/runs/index.mjs | 4 + .../resources/beta/threads/runs/index.mjs.map | 1 + .../resources/beta/threads/runs/runs.d.ts | 1205 + .../resources/beta/threads/runs/runs.d.ts.map | 1 + .../resources/beta/threads/runs/runs.js | 190 + .../resources/beta/threads/runs/runs.js.map | 1 + .../resources/beta/threads/runs/runs.mjs | 162 + .../resources/beta/threads/runs/runs.mjs.map | 1 + .../resources/beta/threads/runs/steps.d.ts | 592 + .../beta/threads/runs/steps.d.ts.map | 1 + .../resources/beta/threads/runs/steps.js | 35 + .../resources/beta/threads/runs/steps.js.map | 1 + .../resources/beta/threads/runs/steps.mjs | 30 + .../resources/beta/threads/runs/steps.mjs.map | 1 + .../resources/beta/threads/threads.d.ts | 1216 + .../resources/beta/threads/threads.d.ts.map | 1 + .../openai/resources/beta/threads/threads.js | 108 + .../resources/beta/threads/threads.js.map | 1 + .../openai/resources/beta/threads/threads.mjs | 81 + .../resources/beta/threads/threads.mjs.map | 1 + .../beta/vector-stores/file-batches.d.ts | 145 + .../beta/vector-stores/file-batches.d.ts.map | 1 + .../beta/vector-stores/file-batches.js | 127 + .../beta/vector-stores/file-batches.js.map | 1 + .../beta/vector-stores/file-batches.mjs | 123 + .../beta/vector-stores/file-batches.mjs.map | 1 + .../resources/beta/vector-stores/files.d.ts | 159 + .../beta/vector-stores/files.d.ts.map | 1 + .../resources/beta/vector-stores/files.js | 122 + .../resources/beta/vector-stores/files.js.map | 1 + .../resources/beta/vector-stores/files.mjs | 117 + .../beta/vector-stores/files.mjs.map | 1 + .../resources/beta/vector-stores/index.d.ts | 4 + .../beta/vector-stores/index.d.ts.map | 1 + .../resources/beta/vector-stores/index.js | 13 + .../resources/beta/vector-stores/index.js.map | 1 + .../resources/beta/vector-stores/index.mjs | 5 + .../beta/vector-stores/index.mjs.map | 1 + .../beta/vector-stores/vector-stores.d.ts | 283 + .../beta/vector-stores/vector-stores.d.ts.map | 1 + .../beta/vector-stores/vector-stores.js | 98 + .../beta/vector-stores/vector-stores.js.map | 1 + .../beta/vector-stores/vector-stores.mjs | 70 + .../beta/vector-stores/vector-stores.mjs.map | 1 + node_modules/openai/resources/chat/chat.d.ts | 12 + .../openai/resources/chat/chat.d.ts.map | 1 + node_modules/openai/resources/chat/chat.js | 39 + .../openai/resources/chat/chat.js.map | 1 + node_modules/openai/resources/chat/chat.mjs | 12 + .../openai/resources/chat/chat.mjs.map | 1 + .../openai/resources/chat/completions.d.ts | 1083 + .../resources/chat/completions.d.ts.map | 1 + .../openai/resources/chat/completions.js | 12 + .../openai/resources/chat/completions.js.map | 1 + .../openai/resources/chat/completions.mjs | 8 + .../openai/resources/chat/completions.mjs.map | 1 + node_modules/openai/resources/chat/index.d.ts | 3 + .../openai/resources/chat/index.d.ts.map | 1 + node_modules/openai/resources/chat/index.js | 9 + .../openai/resources/chat/index.js.map | 1 + node_modules/openai/resources/chat/index.mjs | 4 + .../openai/resources/chat/index.mjs.map | 1 + .../openai/resources/completions.d.ts | 315 + .../openai/resources/completions.d.ts.map | 1 + node_modules/openai/resources/completions.js | 12 + .../openai/resources/completions.js.map | 1 + node_modules/openai/resources/completions.mjs | 8 + .../openai/resources/completions.mjs.map | 1 + node_modules/openai/resources/embeddings.d.ts | 101 + .../openai/resources/embeddings.d.ts.map | 1 + node_modules/openai/resources/embeddings.js | 15 + .../openai/resources/embeddings.js.map | 1 + node_modules/openai/resources/embeddings.mjs | 11 + .../openai/resources/embeddings.mjs.map | 1 + node_modules/openai/resources/files.d.ts | 152 + node_modules/openai/resources/files.d.ts.map | 1 + node_modules/openai/resources/files.js | 120 + node_modules/openai/resources/files.js.map | 1 + node_modules/openai/resources/files.mjs | 92 + node_modules/openai/resources/files.mjs.map | 1 + .../resources/fine-tuning/fine-tuning.d.ts | 10 + .../fine-tuning/fine-tuning.d.ts.map | 1 + .../resources/fine-tuning/fine-tuning.js | 41 + .../resources/fine-tuning/fine-tuning.js.map | 1 + .../resources/fine-tuning/fine-tuning.mjs | 14 + .../resources/fine-tuning/fine-tuning.mjs.map | 1 + .../openai/resources/fine-tuning/index.d.ts | 3 + .../resources/fine-tuning/index.d.ts.map | 1 + .../openai/resources/fine-tuning/index.js | 11 + .../openai/resources/fine-tuning/index.js.map | 1 + .../openai/resources/fine-tuning/index.mjs | 4 + .../resources/fine-tuning/index.mjs.map | 1 + .../fine-tuning/jobs/checkpoints.d.ts | 66 + .../fine-tuning/jobs/checkpoints.d.ts.map | 1 + .../resources/fine-tuning/jobs/checkpoints.js | 21 + .../fine-tuning/jobs/checkpoints.js.map | 1 + .../fine-tuning/jobs/checkpoints.mjs | 16 + .../fine-tuning/jobs/checkpoints.mjs.map | 1 + .../resources/fine-tuning/jobs/index.d.ts | 3 + .../resources/fine-tuning/jobs/index.d.ts.map | 1 + .../resources/fine-tuning/jobs/index.js | 12 + .../resources/fine-tuning/jobs/index.js.map | 1 + .../resources/fine-tuning/jobs/index.mjs | 4 + .../resources/fine-tuning/jobs/index.mjs.map | 1 + .../resources/fine-tuning/jobs/jobs.d.ts | 569 + .../resources/fine-tuning/jobs/jobs.d.ts.map | 1 + .../openai/resources/fine-tuning/jobs/jobs.js | 91 + .../resources/fine-tuning/jobs/jobs.js.map | 1 + .../resources/fine-tuning/jobs/jobs.mjs | 62 + .../resources/fine-tuning/jobs/jobs.mjs.map | 1 + node_modules/openai/resources/images.d.ts | 169 + node_modules/openai/resources/images.d.ts.map | 1 + node_modules/openai/resources/images.js | 51 + node_modules/openai/resources/images.js.map | 1 + node_modules/openai/resources/images.mjs | 24 + node_modules/openai/resources/images.mjs.map | 1 + node_modules/openai/resources/index.d.ts | 14 + node_modules/openai/resources/index.d.ts.map | 1 + node_modules/openai/resources/index.js | 46 + node_modules/openai/resources/index.js.map | 1 + node_modules/openai/resources/index.mjs | 15 + node_modules/openai/resources/index.mjs.map | 1 + node_modules/openai/resources/models.d.ts | 55 + node_modules/openai/resources/models.d.ts.map | 1 + node_modules/openai/resources/models.js | 38 + node_modules/openai/resources/models.js.map | 1 + node_modules/openai/resources/models.mjs | 33 + node_modules/openai/resources/models.mjs.map | 1 + .../openai/resources/moderations.d.ts | 294 + .../openai/resources/moderations.d.ts.map | 1 + node_modules/openai/resources/moderations.js | 16 + .../openai/resources/moderations.js.map | 1 + node_modules/openai/resources/moderations.mjs | 12 + .../openai/resources/moderations.mjs.map | 1 + node_modules/openai/resources/shared.d.ts | 92 + node_modules/openai/resources/shared.d.ts.map | 1 + node_modules/openai/resources/shared.js | 4 + node_modules/openai/resources/shared.js.map | 1 + node_modules/openai/resources/shared.mjs | 3 + node_modules/openai/resources/shared.mjs.map | 1 + .../openai/resources/uploads/index.d.ts | 3 + .../openai/resources/uploads/index.d.ts.map | 1 + .../openai/resources/uploads/index.js | 9 + .../openai/resources/uploads/index.js.map | 1 + .../openai/resources/uploads/index.mjs | 4 + .../openai/resources/uploads/index.mjs.map | 1 + .../openai/resources/uploads/parts.d.ts | 49 + .../openai/resources/uploads/parts.d.ts.map | 1 + .../openai/resources/uploads/parts.js | 49 + .../openai/resources/uploads/parts.js.map | 1 + .../openai/resources/uploads/parts.mjs | 22 + .../openai/resources/uploads/parts.mjs.map | 1 + .../openai/resources/uploads/uploads.d.ts | 134 + .../openai/resources/uploads/uploads.d.ts.map | 1 + .../openai/resources/uploads/uploads.js | 88 + .../openai/resources/uploads/uploads.js.map | 1 + .../openai/resources/uploads/uploads.mjs | 61 + .../openai/resources/uploads/uploads.mjs.map | 1 + node_modules/openai/shims/node.d.ts | 30 + node_modules/openai/shims/node.d.ts.map | 1 + node_modules/openai/shims/node.js | 31 + node_modules/openai/shims/node.js.map | 1 + node_modules/openai/shims/node.mjs | 5 + node_modules/openai/shims/node.mjs.map | 1 + node_modules/openai/shims/web.d.ts | 26 + node_modules/openai/shims/web.d.ts.map | 1 + node_modules/openai/shims/web.js | 31 + node_modules/openai/shims/web.js.map | 1 + node_modules/openai/shims/web.mjs | 5 + node_modules/openai/shims/web.mjs.map | 1 + .../openai/src/_shims/MultipartBody.ts | 9 + node_modules/openai/src/_shims/README.md | 46 + .../openai/src/_shims/auto/runtime-bun.ts | 4 + .../openai/src/_shims/auto/runtime-node.ts | 4 + .../openai/src/_shims/auto/runtime.ts | 4 + .../openai/src/_shims/auto/types-node.ts | 4 + .../openai/src/_shims/auto/types.d.ts | 101 + node_modules/openai/src/_shims/auto/types.js | 3 + node_modules/openai/src/_shims/auto/types.mjs | 3 + node_modules/openai/src/_shims/bun-runtime.ts | 14 + node_modules/openai/src/_shims/index.d.ts | 81 + node_modules/openai/src/_shims/index.js | 13 + node_modules/openai/src/_shims/index.mjs | 7 + .../openai/src/_shims/manual-types.d.ts | 12 + .../openai/src/_shims/manual-types.js | 3 + .../openai/src/_shims/manual-types.mjs | 3 + .../openai/src/_shims/node-runtime.ts | 81 + .../openai/src/_shims/node-types.d.ts | 42 + node_modules/openai/src/_shims/node-types.js | 3 + node_modules/openai/src/_shims/node-types.mjs | 3 + node_modules/openai/src/_shims/registry.ts | 65 + node_modules/openai/src/_shims/web-runtime.ts | 103 + node_modules/openai/src/_shims/web-types.d.ts | 83 + node_modules/openai/src/_shims/web-types.js | 3 + node_modules/openai/src/_shims/web-types.mjs | 3 + .../src/_vendor/partial-json-parser/README.md | 3 + .../src/_vendor/partial-json-parser/parser.ts | 247 + .../src/_vendor/zod-to-json-schema/LICENSE | 15 + .../src/_vendor/zod-to-json-schema/Options.ts | 80 + .../src/_vendor/zod-to-json-schema/README.md | 3 + .../src/_vendor/zod-to-json-schema/Refs.ts | 47 + .../zod-to-json-schema/errorMessages.ts | 31 + .../src/_vendor/zod-to-json-schema/index.ts | 37 + .../_vendor/zod-to-json-schema/parseDef.ts | 258 + .../_vendor/zod-to-json-schema/parsers/any.ts | 5 + .../zod-to-json-schema/parsers/array.ts | 36 + .../zod-to-json-schema/parsers/bigint.ts | 60 + .../zod-to-json-schema/parsers/boolean.ts | 9 + .../zod-to-json-schema/parsers/branded.ts | 7 + .../zod-to-json-schema/parsers/catch.ts | 7 + .../zod-to-json-schema/parsers/date.ts | 83 + .../zod-to-json-schema/parsers/default.ts | 10 + .../zod-to-json-schema/parsers/effects.ts | 11 + .../zod-to-json-schema/parsers/enum.ts | 13 + .../parsers/intersection.ts | 64 + .../zod-to-json-schema/parsers/literal.ts | 37 + .../_vendor/zod-to-json-schema/parsers/map.ts | 42 + .../zod-to-json-schema/parsers/nativeEnum.ts | 27 + .../zod-to-json-schema/parsers/never.ts | 9 + .../zod-to-json-schema/parsers/null.ts | 16 + .../zod-to-json-schema/parsers/nullable.ts | 49 + .../zod-to-json-schema/parsers/number.ts | 62 + .../zod-to-json-schema/parsers/object.ts | 63 + .../zod-to-json-schema/parsers/optional.ts | 25 + .../zod-to-json-schema/parsers/pipeline.ts | 28 + .../zod-to-json-schema/parsers/promise.ts | 7 + .../zod-to-json-schema/parsers/readonly.ts | 7 + .../zod-to-json-schema/parsers/record.ts | 73 + .../_vendor/zod-to-json-schema/parsers/set.ts | 36 + .../zod-to-json-schema/parsers/string.ts | 400 + .../zod-to-json-schema/parsers/tuple.ts | 54 + .../zod-to-json-schema/parsers/undefined.ts | 9 + .../zod-to-json-schema/parsers/union.ts | 119 + .../zod-to-json-schema/parsers/unknown.ts | 5 + .../src/_vendor/zod-to-json-schema/util.ts | 11 + .../zod-to-json-schema/zodToJsonSchema.ts | 120 + node_modules/openai/src/core.ts | 1233 + node_modules/openai/src/error.ts | 154 + node_modules/openai/src/helpers/zod.ts | 108 + node_modules/openai/src/index.ts | 668 + .../openai/src/internal/decoders/line.ts | 114 + .../openai/src/internal/qs/LICENSE.md | 13 + node_modules/openai/src/internal/qs/README.md | 3 + .../openai/src/internal/qs/formats.ts | 9 + node_modules/openai/src/internal/qs/index.ts | 13 + .../openai/src/internal/qs/stringify.ts | 388 + node_modules/openai/src/internal/qs/types.ts | 71 + node_modules/openai/src/internal/qs/utils.ts | 265 + node_modules/openai/src/lib/.keep | 4 + .../src/lib/AbstractChatCompletionRunner.ts | 503 + .../openai/src/lib/AssistantStream.ts | 774 + .../openai/src/lib/ChatCompletionRunner.ts | 76 + .../openai/src/lib/ChatCompletionStream.ts | 870 + .../src/lib/ChatCompletionStreamingRunner.ts | 72 + node_modules/openai/src/lib/EventStream.ts | 239 + .../openai/src/lib/RunnableFunction.ts | 136 + node_modules/openai/src/lib/Util.ts | 23 + .../openai/src/lib/chatCompletionUtils.ts | 28 + node_modules/openai/src/lib/jsonschema.ts | 148 + node_modules/openai/src/lib/parser.ts | 235 + node_modules/openai/src/pagination.ts | 98 + node_modules/openai/src/resource.ts | 11 + .../openai/src/resources/audio/audio.ts | 65 + .../openai/src/resources/audio/index.ts | 20 + .../openai/src/resources/audio/speech.ts | 53 + .../src/resources/audio/transcriptions.ts | 216 + .../src/resources/audio/translations.ts | 107 + node_modules/openai/src/resources/batches.ts | 258 + .../openai/src/resources/beta/assistants.ts | 1418 + .../openai/src/resources/beta/beta.ts | 128 + .../openai/src/resources/beta/chat/chat.ts | 12 + .../src/resources/beta/chat/completions.ts | 161 + .../openai/src/resources/beta/chat/index.ts | 4 + .../openai/src/resources/beta/index.ts | 54 + .../src/resources/beta/threads/index.ts | 73 + .../src/resources/beta/threads/messages.ts | 761 + .../src/resources/beta/threads/runs/index.ts | 46 + .../src/resources/beta/threads/runs/runs.ts | 1695 + .../src/resources/beta/threads/runs/steps.ts | 767 + .../src/resources/beta/threads/threads.ts | 1625 + .../beta/vector-stores/file-batches.ts | 304 + .../src/resources/beta/vector-stores/files.ts | 298 + .../src/resources/beta/vector-stores/index.ts | 32 + .../beta/vector-stores/vector-stores.ts | 424 + .../openai/src/resources/chat/chat.ts | 130 + .../openai/src/resources/chat/completions.ts | 1314 + .../openai/src/resources/chat/index.ts | 41 + .../openai/src/resources/completions.ts | 387 + .../openai/src/resources/embeddings.ts | 129 + node_modules/openai/src/resources/files.ts | 238 + .../src/resources/fine-tuning/fine-tuning.ts | 41 + .../openai/src/resources/fine-tuning/index.ts | 16 + .../resources/fine-tuning/jobs/checkpoints.ts | 111 + .../src/resources/fine-tuning/jobs/index.ts | 21 + .../src/resources/fine-tuning/jobs/jobs.ts | 722 + node_modules/openai/src/resources/images.ts | 218 + node_modules/openai/src/resources/index.ts | 63 + node_modules/openai/src/resources/models.ts | 75 + .../openai/src/resources/moderations.ts | 369 + node_modules/openai/src/resources/shared.ts | 109 + .../openai/src/resources/uploads/index.ts | 4 + .../openai/src/resources/uploads/parts.ts | 66 + .../openai/src/resources/uploads/uploads.ts | 172 + node_modules/openai/src/shims/node.ts | 50 + node_modules/openai/src/shims/web.ts | 50 + node_modules/openai/src/streaming.ts | 398 + node_modules/openai/src/tsconfig.json | 11 + node_modules/openai/src/uploads.ts | 255 + node_modules/openai/src/version.ts | 1 + node_modules/openai/streaming.d.ts | 41 + node_modules/openai/streaming.d.ts.map | 1 + node_modules/openai/streaming.js | 352 + node_modules/openai/streaming.js.map | 1 + node_modules/openai/streaming.mjs | 345 + node_modules/openai/streaming.mjs.map | 1 + node_modules/openai/uploads.d.ts | 75 + node_modules/openai/uploads.d.ts.map | 1 + node_modules/openai/uploads.js | 171 + node_modules/openai/uploads.js.map | 1 + node_modules/openai/uploads.mjs | 158 + node_modules/openai/uploads.mjs.map | 1 + node_modules/openai/version.d.ts | 2 + node_modules/openai/version.d.ts.map | 1 + node_modules/openai/version.js | 5 + node_modules/openai/version.js.map | 1 + node_modules/openai/version.mjs | 2 + node_modules/openai/version.mjs.map | 1 + node_modules/tr46/.npmignore | 4 + node_modules/tr46/index.js | 193 + node_modules/tr46/lib/.gitkeep | 0 node_modules/tr46/lib/mappingTable.json | 1 + node_modules/tr46/package.json | 31 + node_modules/tunnel/.idea/encodings.xml | 6 + node_modules/tunnel/.idea/modules.xml | 8 + node_modules/tunnel/.idea/node-tunnel.iml | 12 + node_modules/tunnel/.idea/vcs.xml | 6 + node_modules/tunnel/.idea/workspace.xml | 797 + node_modules/tunnel/.travis.yml | 6 + node_modules/tunnel/CHANGELOG.md | 22 + node_modules/tunnel/LICENSE | 21 + node_modules/tunnel/README.md | 185 + node_modules/tunnel/index.js | 1 + node_modules/tunnel/lib/tunnel.js | 264 + node_modules/tunnel/package.json | 34 + node_modules/undici-types/README.md | 6 + node_modules/undici-types/agent.d.ts | 31 + node_modules/undici-types/api.d.ts | 43 + node_modules/undici-types/balanced-pool.d.ts | 18 + node_modules/undici-types/cache.d.ts | 36 + node_modules/undici-types/client.d.ts | 97 + node_modules/undici-types/connector.d.ts | 34 + node_modules/undici-types/content-type.d.ts | 21 + node_modules/undici-types/cookies.d.ts | 28 + .../undici-types/diagnostics-channel.d.ts | 67 + node_modules/undici-types/dispatcher.d.ts | 241 + node_modules/undici-types/errors.d.ts | 128 + node_modules/undici-types/fetch.d.ts | 209 + node_modules/undici-types/file.d.ts | 39 + node_modules/undici-types/filereader.d.ts | 54 + node_modules/undici-types/formdata.d.ts | 108 + .../undici-types/global-dispatcher.d.ts | 9 + node_modules/undici-types/global-origin.d.ts | 7 + node_modules/undici-types/handlers.d.ts | 9 + node_modules/undici-types/header.d.ts | 4 + node_modules/undici-types/index.d.ts | 63 + node_modules/undici-types/interceptors.d.ts | 5 + node_modules/undici-types/mock-agent.d.ts | 50 + node_modules/undici-types/mock-client.d.ts | 25 + node_modules/undici-types/mock-errors.d.ts | 12 + .../undici-types/mock-interceptor.d.ts | 93 + node_modules/undici-types/mock-pool.d.ts | 25 + node_modules/undici-types/package.json | 55 + node_modules/undici-types/patch.d.ts | 71 + node_modules/undici-types/pool-stats.d.ts | 19 + node_modules/undici-types/pool.d.ts | 28 + node_modules/undici-types/proxy-agent.d.ts | 30 + node_modules/undici-types/readable.d.ts | 61 + node_modules/undici-types/webidl.d.ts | 220 + node_modules/undici-types/websocket.d.ts | 131 + node_modules/undici/LICENSE | 21 + node_modules/undici/README.md | 443 + node_modules/undici/docs/api/Agent.md | 80 + node_modules/undici/docs/api/BalancedPool.md | 99 + node_modules/undici/docs/api/CacheStorage.md | 30 + node_modules/undici/docs/api/Client.md | 273 + node_modules/undici/docs/api/Connector.md | 115 + node_modules/undici/docs/api/ContentType.md | 57 + node_modules/undici/docs/api/Cookies.md | 101 + .../undici/docs/api/DiagnosticsChannel.md | 204 + .../undici/docs/api/DispatchInterceptor.md | 60 + node_modules/undici/docs/api/Dispatcher.md | 887 + node_modules/undici/docs/api/Errors.md | 47 + node_modules/undici/docs/api/Fetch.md | 27 + node_modules/undici/docs/api/MockAgent.md | 540 + node_modules/undici/docs/api/MockClient.md | 77 + node_modules/undici/docs/api/MockErrors.md | 12 + node_modules/undici/docs/api/MockPool.md | 547 + node_modules/undici/docs/api/Pool.md | 84 + node_modules/undici/docs/api/PoolStats.md | 35 + node_modules/undici/docs/api/ProxyAgent.md | 126 + node_modules/undici/docs/api/RetryHandler.md | 108 + node_modules/undici/docs/api/WebSocket.md | 43 + node_modules/undici/docs/api/api-lifecycle.md | 62 + .../undici/docs/assets/lifecycle-diagram.png | Bin 0 -> 47090 bytes .../docs/best-practices/client-certificate.md | 64 + .../docs/best-practices/mocking-request.md | 136 + .../undici/docs/best-practices/proxy.md | 127 + .../docs/best-practices/writing-tests.md | 20 + node_modules/undici/index-fetch.js | 15 + node_modules/undici/index.d.ts | 3 + node_modules/undici/index.js | 167 + node_modules/undici/lib/agent.js | 148 + node_modules/undici/lib/api/abort-signal.js | 54 + node_modules/undici/lib/api/api-connect.js | 104 + node_modules/undici/lib/api/api-pipeline.js | 249 + node_modules/undici/lib/api/api-request.js | 180 + node_modules/undici/lib/api/api-stream.js | 220 + node_modules/undici/lib/api/api-upgrade.js | 105 + node_modules/undici/lib/api/index.js | 7 + node_modules/undici/lib/api/readable.js | 322 + node_modules/undici/lib/api/util.js | 46 + node_modules/undici/lib/balanced-pool.js | 190 + node_modules/undici/lib/cache/cache.js | 838 + node_modules/undici/lib/cache/cachestorage.js | 144 + node_modules/undici/lib/cache/symbols.js | 5 + node_modules/undici/lib/cache/util.js | 49 + node_modules/undici/lib/client.js | 2283 + .../undici/lib/compat/dispatcher-weakref.js | 48 + node_modules/undici/lib/cookies/constants.js | 12 + node_modules/undici/lib/cookies/index.js | 184 + node_modules/undici/lib/cookies/parse.js | 317 + node_modules/undici/lib/cookies/util.js | 291 + node_modules/undici/lib/core/connect.js | 189 + node_modules/undici/lib/core/constants.js | 118 + node_modules/undici/lib/core/errors.js | 230 + node_modules/undici/lib/core/request.js | 499 + node_modules/undici/lib/core/symbols.js | 63 + node_modules/undici/lib/core/util.js | 522 + node_modules/undici/lib/dispatcher-base.js | 192 + node_modules/undici/lib/dispatcher.js | 19 + node_modules/undici/lib/fetch/LICENSE | 21 + node_modules/undici/lib/fetch/body.js | 605 + node_modules/undici/lib/fetch/constants.js | 151 + node_modules/undici/lib/fetch/dataURL.js | 627 + node_modules/undici/lib/fetch/file.js | 344 + node_modules/undici/lib/fetch/formdata.js | 265 + node_modules/undici/lib/fetch/global.js | 40 + node_modules/undici/lib/fetch/headers.js | 589 + node_modules/undici/lib/fetch/index.js | 2148 + node_modules/undici/lib/fetch/request.js | 946 + node_modules/undici/lib/fetch/response.js | 571 + node_modules/undici/lib/fetch/symbols.js | 10 + node_modules/undici/lib/fetch/util.js | 1144 + node_modules/undici/lib/fetch/webidl.js | 646 + node_modules/undici/lib/fileapi/encoding.js | 290 + node_modules/undici/lib/fileapi/filereader.js | 344 + .../undici/lib/fileapi/progressevent.js | 78 + node_modules/undici/lib/fileapi/symbols.js | 10 + node_modules/undici/lib/fileapi/util.js | 392 + node_modules/undici/lib/global.js | 32 + .../undici/lib/handler/DecoratorHandler.js | 35 + .../undici/lib/handler/RedirectHandler.js | 221 + .../undici/lib/handler/RetryHandler.js | 336 + .../lib/interceptor/redirectInterceptor.js | 21 + node_modules/undici/lib/llhttp/constants.d.ts | 199 + node_modules/undici/lib/llhttp/constants.js | 278 + .../undici/lib/llhttp/constants.js.map | 1 + node_modules/undici/lib/llhttp/llhttp-wasm.js | 1 + node_modules/undici/lib/llhttp/llhttp.wasm | Bin 0 -> 55466 bytes .../undici/lib/llhttp/llhttp_simd-wasm.js | 1 + .../undici/lib/llhttp/llhttp_simd.wasm | Bin 0 -> 55450 bytes node_modules/undici/lib/llhttp/utils.d.ts | 4 + node_modules/undici/lib/llhttp/utils.js | 15 + node_modules/undici/lib/llhttp/utils.js.map | 1 + .../undici/lib/llhttp/wasm_build_env.txt | 32 + node_modules/undici/lib/mock/mock-agent.js | 171 + node_modules/undici/lib/mock/mock-client.js | 59 + node_modules/undici/lib/mock/mock-errors.js | 17 + .../undici/lib/mock/mock-interceptor.js | 206 + node_modules/undici/lib/mock/mock-pool.js | 59 + node_modules/undici/lib/mock/mock-symbols.js | 23 + node_modules/undici/lib/mock/mock-utils.js | 351 + .../mock/pending-interceptors-formatter.js | 40 + node_modules/undici/lib/mock/pluralizer.js | 29 + node_modules/undici/lib/node/fixed-queue.js | 117 + node_modules/undici/lib/pool-base.js | 194 + node_modules/undici/lib/pool-stats.js | 34 + node_modules/undici/lib/pool.js | 94 + node_modules/undici/lib/proxy-agent.js | 189 + node_modules/undici/lib/timers.js | 97 + .../undici/lib/websocket/connection.js | 291 + .../undici/lib/websocket/constants.js | 51 + node_modules/undici/lib/websocket/events.js | 303 + node_modules/undici/lib/websocket/frame.js | 73 + node_modules/undici/lib/websocket/receiver.js | 344 + node_modules/undici/lib/websocket/symbols.js | 12 + node_modules/undici/lib/websocket/util.js | 200 + .../undici/lib/websocket/websocket.js | 641 + node_modules/undici/package.json | 167 + node_modules/undici/types/README.md | 6 + node_modules/undici/types/agent.d.ts | 31 + node_modules/undici/types/api.d.ts | 43 + node_modules/undici/types/balanced-pool.d.ts | 18 + node_modules/undici/types/cache.d.ts | 36 + node_modules/undici/types/client.d.ts | 97 + node_modules/undici/types/connector.d.ts | 34 + node_modules/undici/types/content-type.d.ts | 21 + node_modules/undici/types/cookies.d.ts | 28 + .../undici/types/diagnostics-channel.d.ts | 67 + node_modules/undici/types/dispatcher.d.ts | 241 + node_modules/undici/types/errors.d.ts | 128 + node_modules/undici/types/fetch.d.ts | 209 + node_modules/undici/types/file.d.ts | 39 + node_modules/undici/types/filereader.d.ts | 54 + node_modules/undici/types/formdata.d.ts | 108 + .../undici/types/global-dispatcher.d.ts | 9 + node_modules/undici/types/global-origin.d.ts | 7 + node_modules/undici/types/handlers.d.ts | 9 + node_modules/undici/types/header.d.ts | 4 + node_modules/undici/types/index.d.ts | 65 + node_modules/undici/types/interceptors.d.ts | 5 + node_modules/undici/types/mock-agent.d.ts | 50 + node_modules/undici/types/mock-client.d.ts | 25 + node_modules/undici/types/mock-errors.d.ts | 12 + .../undici/types/mock-interceptor.d.ts | 93 + node_modules/undici/types/mock-pool.d.ts | 25 + node_modules/undici/types/patch.d.ts | 71 + node_modules/undici/types/pool-stats.d.ts | 19 + node_modules/undici/types/pool.d.ts | 28 + node_modules/undici/types/proxy-agent.d.ts | 30 + node_modules/undici/types/readable.d.ts | 61 + node_modules/undici/types/retry-handler.d.ts | 116 + node_modules/undici/types/webidl.d.ts | 220 + node_modules/undici/types/websocket.d.ts | 131 + node_modules/universal-user-agent/LICENSE.md | 7 + node_modules/universal-user-agent/README.md | 25 + .../universal-user-agent/dist-node/index.js | 18 + .../dist-node/index.js.map | 1 + .../universal-user-agent/dist-src/index.js | 9 + .../dist-types/index.d.ts | 1 + .../universal-user-agent/dist-web/index.js | 12 + .../dist-web/index.js.map | 1 + .../universal-user-agent/package.json | 31 + node_modules/web-streams-polyfill/LICENSE | 22 + node_modules/web-streams-polyfill/README.md | 135 + .../web-streams-polyfill/dist/polyfill.es5.js | 8 + .../web-streams-polyfill/dist/polyfill.js | 8 + .../web-streams-polyfill/dist/ponyfill.es5.js | 8 + .../dist/ponyfill.es5.mjs | 8 + .../web-streams-polyfill/dist/ponyfill.js | 8 + .../web-streams-polyfill/dist/ponyfill.mjs | 8 + .../web-streams-polyfill/es5/package.json | 6 + .../web-streams-polyfill/package.json | 101 + .../polyfill/es5/package.json | 5 + .../polyfill/package.json | 5 + .../web-streams-polyfill/types/polyfill.d.ts | 62 + .../web-streams-polyfill/types/ponyfill.d.ts | 775 + .../types/tsdoc-metadata.json | 11 + node_modules/webidl-conversions/LICENSE.md | 12 + node_modules/webidl-conversions/README.md | 53 + node_modules/webidl-conversions/lib/index.js | 189 + node_modules/webidl-conversions/package.json | 23 + node_modules/whatwg-url/LICENSE.txt | 21 + node_modules/whatwg-url/README.md | 67 + node_modules/whatwg-url/lib/URL-impl.js | 200 + node_modules/whatwg-url/lib/URL.js | 196 + node_modules/whatwg-url/lib/public-api.js | 11 + .../whatwg-url/lib/url-state-machine.js | 1297 + node_modules/whatwg-url/lib/utils.js | 20 + node_modules/whatwg-url/package.json | 32 + node_modules/wrappy/LICENSE | 15 + node_modules/wrappy/README.md | 36 + node_modules/wrappy/package.json | 29 + node_modules/wrappy/wrappy.js | 33 + package-lock.json | 685 + package.json | 2 +- test.js | 20 + 1851 files changed, 305970 insertions(+), 19 deletions(-) create mode 120000 node_modules/.bin/openai create mode 100644 node_modules/.package-lock.json create mode 100644 node_modules/.yarn-integrity create mode 100644 node_modules/@actions/core/LICENSE.md create mode 100644 node_modules/@actions/core/README.md create mode 100644 node_modules/@actions/core/lib/command.d.ts create mode 100644 node_modules/@actions/core/lib/command.js create mode 100644 node_modules/@actions/core/lib/command.js.map create mode 100644 node_modules/@actions/core/lib/core.d.ts create mode 100644 node_modules/@actions/core/lib/core.js create mode 100644 node_modules/@actions/core/lib/core.js.map create mode 100644 node_modules/@actions/core/lib/file-command.d.ts create mode 100644 node_modules/@actions/core/lib/file-command.js create mode 100644 node_modules/@actions/core/lib/file-command.js.map create mode 100644 node_modules/@actions/core/lib/oidc-utils.d.ts create mode 100644 node_modules/@actions/core/lib/oidc-utils.js create mode 100644 node_modules/@actions/core/lib/oidc-utils.js.map create mode 100644 node_modules/@actions/core/lib/path-utils.d.ts create mode 100644 node_modules/@actions/core/lib/path-utils.js create mode 100644 node_modules/@actions/core/lib/path-utils.js.map create mode 100644 node_modules/@actions/core/lib/platform.d.ts create mode 100644 node_modules/@actions/core/lib/platform.js create mode 100644 node_modules/@actions/core/lib/platform.js.map create mode 100644 node_modules/@actions/core/lib/summary.d.ts create mode 100644 node_modules/@actions/core/lib/summary.js create mode 100644 node_modules/@actions/core/lib/summary.js.map create mode 100644 node_modules/@actions/core/lib/utils.d.ts create mode 100644 node_modules/@actions/core/lib/utils.js create mode 100644 node_modules/@actions/core/lib/utils.js.map create mode 100644 node_modules/@actions/core/package.json create mode 100644 node_modules/@actions/exec/LICENSE.md create mode 100644 node_modules/@actions/exec/README.md create mode 100644 node_modules/@actions/exec/lib/exec.d.ts create mode 100644 node_modules/@actions/exec/lib/exec.js create mode 100644 node_modules/@actions/exec/lib/exec.js.map create mode 100644 node_modules/@actions/exec/lib/interfaces.d.ts create mode 100644 node_modules/@actions/exec/lib/interfaces.js create mode 100644 node_modules/@actions/exec/lib/interfaces.js.map create mode 100644 node_modules/@actions/exec/lib/toolrunner.d.ts create mode 100644 node_modules/@actions/exec/lib/toolrunner.js create mode 100644 node_modules/@actions/exec/lib/toolrunner.js.map create mode 100644 node_modules/@actions/exec/package.json create mode 100644 node_modules/@actions/github/LICENSE.md create mode 100644 node_modules/@actions/github/README.md create mode 100644 node_modules/@actions/github/lib/context.d.ts create mode 100644 node_modules/@actions/github/lib/context.js create mode 100644 node_modules/@actions/github/lib/context.js.map create mode 100644 node_modules/@actions/github/lib/github.d.ts create mode 100644 node_modules/@actions/github/lib/github.js create mode 100644 node_modules/@actions/github/lib/github.js.map create mode 100644 node_modules/@actions/github/lib/interfaces.d.ts create mode 100644 node_modules/@actions/github/lib/interfaces.js create mode 100644 node_modules/@actions/github/lib/interfaces.js.map create mode 100644 node_modules/@actions/github/lib/internal/utils.d.ts create mode 100644 node_modules/@actions/github/lib/internal/utils.js create mode 100644 node_modules/@actions/github/lib/internal/utils.js.map create mode 100644 node_modules/@actions/github/lib/utils.d.ts create mode 100644 node_modules/@actions/github/lib/utils.js create mode 100644 node_modules/@actions/github/lib/utils.js.map create mode 100644 node_modules/@actions/github/package.json create mode 100644 node_modules/@actions/http-client/LICENSE create mode 100644 node_modules/@actions/http-client/README.md create mode 100644 node_modules/@actions/http-client/lib/auth.d.ts create mode 100644 node_modules/@actions/http-client/lib/auth.js create mode 100644 node_modules/@actions/http-client/lib/auth.js.map create mode 100644 node_modules/@actions/http-client/lib/index.d.ts create mode 100644 node_modules/@actions/http-client/lib/index.js create mode 100644 node_modules/@actions/http-client/lib/index.js.map create mode 100644 node_modules/@actions/http-client/lib/interfaces.d.ts create mode 100644 node_modules/@actions/http-client/lib/interfaces.js create mode 100644 node_modules/@actions/http-client/lib/interfaces.js.map create mode 100644 node_modules/@actions/http-client/lib/proxy.d.ts create mode 100644 node_modules/@actions/http-client/lib/proxy.js create mode 100644 node_modules/@actions/http-client/lib/proxy.js.map create mode 100644 node_modules/@actions/http-client/package.json create mode 100644 node_modules/@actions/io/LICENSE.md create mode 100644 node_modules/@actions/io/README.md create mode 100644 node_modules/@actions/io/lib/io-util.d.ts create mode 100644 node_modules/@actions/io/lib/io-util.js create mode 100644 node_modules/@actions/io/lib/io-util.js.map create mode 100644 node_modules/@actions/io/lib/io.d.ts create mode 100644 node_modules/@actions/io/lib/io.js create mode 100644 node_modules/@actions/io/lib/io.js.map create mode 100644 node_modules/@actions/io/package.json create mode 100644 node_modules/@fastify/busboy/LICENSE create mode 100644 node_modules/@fastify/busboy/README.md create mode 100644 node_modules/@fastify/busboy/deps/dicer/LICENSE create mode 100644 node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js create mode 100644 node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js create mode 100644 node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js create mode 100644 node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts create mode 100644 node_modules/@fastify/busboy/deps/streamsearch/sbmh.js create mode 100644 node_modules/@fastify/busboy/lib/main.d.ts create mode 100644 node_modules/@fastify/busboy/lib/main.js create mode 100644 node_modules/@fastify/busboy/lib/types/multipart.js create mode 100644 node_modules/@fastify/busboy/lib/types/urlencoded.js create mode 100644 node_modules/@fastify/busboy/lib/utils/Decoder.js create mode 100644 node_modules/@fastify/busboy/lib/utils/basename.js create mode 100644 node_modules/@fastify/busboy/lib/utils/decodeText.js create mode 100644 node_modules/@fastify/busboy/lib/utils/getLimit.js create mode 100644 node_modules/@fastify/busboy/lib/utils/parseParams.js create mode 100644 node_modules/@fastify/busboy/package.json create mode 100644 node_modules/@octokit/auth-token/LICENSE create mode 100644 node_modules/@octokit/auth-token/README.md create mode 100644 node_modules/@octokit/auth-token/dist-node/index.js create mode 100644 node_modules/@octokit/auth-token/dist-node/index.js.map create mode 100644 node_modules/@octokit/auth-token/dist-src/auth.js create mode 100644 node_modules/@octokit/auth-token/dist-src/hook.js create mode 100644 node_modules/@octokit/auth-token/dist-src/index.js create mode 100644 node_modules/@octokit/auth-token/dist-src/types.js create mode 100644 node_modules/@octokit/auth-token/dist-src/with-authorization-prefix.js create mode 100644 node_modules/@octokit/auth-token/dist-types/auth.d.ts create mode 100644 node_modules/@octokit/auth-token/dist-types/hook.d.ts create mode 100644 node_modules/@octokit/auth-token/dist-types/index.d.ts create mode 100644 node_modules/@octokit/auth-token/dist-types/types.d.ts create mode 100644 node_modules/@octokit/auth-token/dist-types/with-authorization-prefix.d.ts create mode 100644 node_modules/@octokit/auth-token/dist-web/index.js create mode 100644 node_modules/@octokit/auth-token/dist-web/index.js.map create mode 100644 node_modules/@octokit/auth-token/package.json create mode 100644 node_modules/@octokit/core/LICENSE create mode 100644 node_modules/@octokit/core/README.md create mode 100644 node_modules/@octokit/core/dist-node/index.js create mode 100644 node_modules/@octokit/core/dist-node/index.js.map create mode 100644 node_modules/@octokit/core/dist-src/index.js create mode 100644 node_modules/@octokit/core/dist-src/types.js create mode 100644 node_modules/@octokit/core/dist-src/version.js create mode 100644 node_modules/@octokit/core/dist-types/index.d.ts create mode 100644 node_modules/@octokit/core/dist-types/types.d.ts create mode 100644 node_modules/@octokit/core/dist-types/version.d.ts create mode 100644 node_modules/@octokit/core/dist-web/index.js create mode 100644 node_modules/@octokit/core/dist-web/index.js.map create mode 100644 node_modules/@octokit/core/package.json create mode 100644 node_modules/@octokit/endpoint/LICENSE create mode 100644 node_modules/@octokit/endpoint/README.md create mode 100644 node_modules/@octokit/endpoint/dist-node/index.js create mode 100644 node_modules/@octokit/endpoint/dist-node/index.js.map create mode 100644 node_modules/@octokit/endpoint/dist-src/defaults.js create mode 100644 node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js create mode 100644 node_modules/@octokit/endpoint/dist-src/index.js create mode 100644 node_modules/@octokit/endpoint/dist-src/merge.js create mode 100644 node_modules/@octokit/endpoint/dist-src/parse.js create mode 100644 node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js create mode 100644 node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js create mode 100644 node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js create mode 100644 node_modules/@octokit/endpoint/dist-src/util/merge-deep.js create mode 100644 node_modules/@octokit/endpoint/dist-src/util/omit.js create mode 100644 node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js create mode 100644 node_modules/@octokit/endpoint/dist-src/util/url-template.js create mode 100644 node_modules/@octokit/endpoint/dist-src/version.js create mode 100644 node_modules/@octokit/endpoint/dist-src/with-defaults.js create mode 100644 node_modules/@octokit/endpoint/dist-types/defaults.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/endpoint-with-defaults.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/index.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/merge.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/parse.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/util/add-query-parameters.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/util/extract-url-variable-names.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/util/lowercase-keys.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/util/merge-deep.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/util/omit.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/util/remove-undefined-properties.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/util/url-template.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/version.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-types/with-defaults.d.ts create mode 100644 node_modules/@octokit/endpoint/dist-web/index.js create mode 100644 node_modules/@octokit/endpoint/dist-web/index.js.map create mode 100644 node_modules/@octokit/endpoint/package.json create mode 100644 node_modules/@octokit/graphql/LICENSE create mode 100644 node_modules/@octokit/graphql/README.md create mode 100644 node_modules/@octokit/graphql/dist-node/index.js create mode 100644 node_modules/@octokit/graphql/dist-node/index.js.map create mode 100644 node_modules/@octokit/graphql/dist-src/error.js create mode 100644 node_modules/@octokit/graphql/dist-src/graphql.js create mode 100644 node_modules/@octokit/graphql/dist-src/index.js create mode 100644 node_modules/@octokit/graphql/dist-src/types.js create mode 100644 node_modules/@octokit/graphql/dist-src/version.js create mode 100644 node_modules/@octokit/graphql/dist-src/with-defaults.js create mode 100644 node_modules/@octokit/graphql/dist-types/error.d.ts create mode 100644 node_modules/@octokit/graphql/dist-types/graphql.d.ts create mode 100644 node_modules/@octokit/graphql/dist-types/index.d.ts create mode 100644 node_modules/@octokit/graphql/dist-types/types.d.ts create mode 100644 node_modules/@octokit/graphql/dist-types/version.d.ts create mode 100644 node_modules/@octokit/graphql/dist-types/with-defaults.d.ts create mode 100644 node_modules/@octokit/graphql/dist-web/index.js create mode 100644 node_modules/@octokit/graphql/dist-web/index.js.map create mode 100644 node_modules/@octokit/graphql/package.json create mode 100644 node_modules/@octokit/openapi-types/LICENSE create mode 100644 node_modules/@octokit/openapi-types/README.md create mode 100644 node_modules/@octokit/openapi-types/package.json create mode 100644 node_modules/@octokit/openapi-types/types.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/LICENSE create mode 100644 node_modules/@octokit/plugin-paginate-rest/README.md create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-node/index.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-src/compose-paginate.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-src/generated/paginating-endpoints.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-src/index.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-src/paginate.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-src/paginating-endpoints.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-src/types.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-src/version.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-types/compose-paginate.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-types/index.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-types/normalize-paginated-list-response.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-types/paginate.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-types/paginating-endpoints.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-web/index.js create mode 100644 node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map create mode 100644 node_modules/@octokit/plugin-paginate-rest/package.json create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/README.md create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/types.js create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map create mode 100644 node_modules/@octokit/plugin-rest-endpoint-methods/package.json create mode 100644 node_modules/@octokit/request-error/LICENSE create mode 100644 node_modules/@octokit/request-error/README.md create mode 100644 node_modules/@octokit/request-error/dist-node/index.js create mode 100644 node_modules/@octokit/request-error/dist-node/index.js.map create mode 100644 node_modules/@octokit/request-error/dist-src/index.js create mode 100644 node_modules/@octokit/request-error/dist-src/types.js create mode 100644 node_modules/@octokit/request-error/dist-types/index.d.ts create mode 100644 node_modules/@octokit/request-error/dist-types/types.d.ts create mode 100644 node_modules/@octokit/request-error/dist-web/index.js create mode 100644 node_modules/@octokit/request-error/dist-web/index.js.map create mode 100644 node_modules/@octokit/request-error/package.json create mode 100644 node_modules/@octokit/request/LICENSE create mode 100644 node_modules/@octokit/request/README.md create mode 100644 node_modules/@octokit/request/dist-node/index.js create mode 100644 node_modules/@octokit/request/dist-node/index.js.map create mode 100644 node_modules/@octokit/request/dist-src/fetch-wrapper.js create mode 100644 node_modules/@octokit/request/dist-src/get-buffer-response.js create mode 100644 node_modules/@octokit/request/dist-src/index.js create mode 100644 node_modules/@octokit/request/dist-src/version.js create mode 100644 node_modules/@octokit/request/dist-src/with-defaults.js create mode 100644 node_modules/@octokit/request/dist-types/fetch-wrapper.d.ts create mode 100644 node_modules/@octokit/request/dist-types/get-buffer-response.d.ts create mode 100644 node_modules/@octokit/request/dist-types/index.d.ts create mode 100644 node_modules/@octokit/request/dist-types/version.d.ts create mode 100644 node_modules/@octokit/request/dist-types/with-defaults.d.ts create mode 100644 node_modules/@octokit/request/dist-web/index.js create mode 100644 node_modules/@octokit/request/dist-web/index.js.map create mode 100644 node_modules/@octokit/request/package.json create mode 100644 node_modules/@octokit/types/LICENSE create mode 100644 node_modules/@octokit/types/README.md create mode 100644 node_modules/@octokit/types/dist-node/index.js create mode 100644 node_modules/@octokit/types/dist-node/index.js.map create mode 100644 node_modules/@octokit/types/dist-src/AuthInterface.js create mode 100644 node_modules/@octokit/types/dist-src/EndpointDefaults.js create mode 100644 node_modules/@octokit/types/dist-src/EndpointInterface.js create mode 100644 node_modules/@octokit/types/dist-src/EndpointOptions.js create mode 100644 node_modules/@octokit/types/dist-src/Fetch.js create mode 100644 node_modules/@octokit/types/dist-src/GetResponseTypeFromEndpointMethod.js create mode 100644 node_modules/@octokit/types/dist-src/OctokitResponse.js create mode 100644 node_modules/@octokit/types/dist-src/RequestError.js create mode 100644 node_modules/@octokit/types/dist-src/RequestHeaders.js create mode 100644 node_modules/@octokit/types/dist-src/RequestInterface.js create mode 100644 node_modules/@octokit/types/dist-src/RequestMethod.js create mode 100644 node_modules/@octokit/types/dist-src/RequestOptions.js create mode 100644 node_modules/@octokit/types/dist-src/RequestParameters.js create mode 100644 node_modules/@octokit/types/dist-src/RequestRequestOptions.js create mode 100644 node_modules/@octokit/types/dist-src/ResponseHeaders.js create mode 100644 node_modules/@octokit/types/dist-src/Route.js create mode 100644 node_modules/@octokit/types/dist-src/Signal.js create mode 100644 node_modules/@octokit/types/dist-src/StrategyInterface.js create mode 100644 node_modules/@octokit/types/dist-src/Url.js create mode 100644 node_modules/@octokit/types/dist-src/VERSION.js create mode 100644 node_modules/@octokit/types/dist-src/generated/Endpoints.js create mode 100644 node_modules/@octokit/types/dist-src/index.js create mode 100644 node_modules/@octokit/types/dist-types/AuthInterface.d.ts create mode 100644 node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts create mode 100644 node_modules/@octokit/types/dist-types/EndpointInterface.d.ts create mode 100644 node_modules/@octokit/types/dist-types/EndpointOptions.d.ts create mode 100644 node_modules/@octokit/types/dist-types/Fetch.d.ts create mode 100644 node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts create mode 100644 node_modules/@octokit/types/dist-types/OctokitResponse.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestError.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestHeaders.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestInterface.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestMethod.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestOptions.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestParameters.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts create mode 100644 node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts create mode 100644 node_modules/@octokit/types/dist-types/Route.d.ts create mode 100644 node_modules/@octokit/types/dist-types/Signal.d.ts create mode 100644 node_modules/@octokit/types/dist-types/StrategyInterface.d.ts create mode 100644 node_modules/@octokit/types/dist-types/Url.d.ts create mode 100644 node_modules/@octokit/types/dist-types/VERSION.d.ts create mode 100644 node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts create mode 100644 node_modules/@octokit/types/dist-types/index.d.ts create mode 100644 node_modules/@octokit/types/dist-web/index.js create mode 100644 node_modules/@octokit/types/dist-web/index.js.map create mode 100644 node_modules/@octokit/types/package.json create mode 100644 node_modules/@types/node-fetch/LICENSE create mode 100644 node_modules/@types/node-fetch/README.md create mode 100644 node_modules/@types/node-fetch/externals.d.ts create mode 100644 node_modules/@types/node-fetch/index.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/LICENSE create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/README.md create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/assert.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/assert/strict.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/async_hooks.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/buffer.buffer.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/buffer.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/child_process.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/cluster.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/compatibility/disposable.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/compatibility/index.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/compatibility/indexable.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/compatibility/iterators.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/console.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/constants.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/crypto.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/dgram.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/diagnostics_channel.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/dns.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/dns/promises.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/dom-events.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/domain.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/events.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/fs.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/fs/promises.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/globals.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/globals.typedarray.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/http.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/http2.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/https.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/index.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/inspector.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/module.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/net.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/os.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/package.json create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/path.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/perf_hooks.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/process.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/punycode.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/querystring.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/readline.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/readline/promises.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/repl.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/sea.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/sqlite.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/stream.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/stream/consumers.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/stream/promises.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/stream/web.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/string_decoder.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/test.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/timers.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/timers/promises.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/tls.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/trace_events.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/buffer.buffer.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/globals.typedarray.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/index.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/tty.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/url.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/util.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/v8.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/vm.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/wasi.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/worker_threads.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/@types/node/zlib.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/LICENSE create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/README.md create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/agent.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/api.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/balanced-pool.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/cache.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/client.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/connector.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/content-type.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/cookies.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/diagnostics-channel.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/dispatcher.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/env-http-proxy-agent.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/errors.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/eventsource.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/fetch.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/file.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/filereader.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/formdata.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/global-dispatcher.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/global-origin.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/handlers.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/header.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/index.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/interceptors.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/mock-agent.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/mock-client.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/mock-errors.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/mock-interceptor.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/mock-pool.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/package.json create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/patch.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/pool-stats.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/pool.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/proxy-agent.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/readable.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/retry-agent.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/retry-handler.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/util.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/webidl.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/undici-types/websocket.d.ts create mode 100644 node_modules/@types/node-fetch/package.json create mode 100644 node_modules/@types/node/LICENSE create mode 100644 node_modules/@types/node/README.md create mode 100644 node_modules/@types/node/assert.d.ts create mode 100644 node_modules/@types/node/assert/strict.d.ts create mode 100644 node_modules/@types/node/async_hooks.d.ts create mode 100644 node_modules/@types/node/buffer.buffer.d.ts create mode 100644 node_modules/@types/node/buffer.d.ts create mode 100644 node_modules/@types/node/child_process.d.ts create mode 100644 node_modules/@types/node/cluster.d.ts create mode 100644 node_modules/@types/node/compatibility/disposable.d.ts create mode 100644 node_modules/@types/node/compatibility/index.d.ts create mode 100644 node_modules/@types/node/compatibility/indexable.d.ts create mode 100644 node_modules/@types/node/compatibility/iterators.d.ts create mode 100644 node_modules/@types/node/console.d.ts create mode 100644 node_modules/@types/node/constants.d.ts create mode 100644 node_modules/@types/node/crypto.d.ts create mode 100644 node_modules/@types/node/dgram.d.ts create mode 100644 node_modules/@types/node/diagnostics_channel.d.ts create mode 100644 node_modules/@types/node/dns.d.ts create mode 100644 node_modules/@types/node/dns/promises.d.ts create mode 100644 node_modules/@types/node/dom-events.d.ts create mode 100644 node_modules/@types/node/domain.d.ts create mode 100644 node_modules/@types/node/events.d.ts create mode 100644 node_modules/@types/node/fs.d.ts create mode 100644 node_modules/@types/node/fs/promises.d.ts create mode 100644 node_modules/@types/node/globals.d.ts create mode 100644 node_modules/@types/node/globals.typedarray.d.ts create mode 100644 node_modules/@types/node/http.d.ts create mode 100644 node_modules/@types/node/http2.d.ts create mode 100644 node_modules/@types/node/https.d.ts create mode 100644 node_modules/@types/node/index.d.ts create mode 100644 node_modules/@types/node/inspector.d.ts create mode 100644 node_modules/@types/node/module.d.ts create mode 100644 node_modules/@types/node/net.d.ts create mode 100644 node_modules/@types/node/os.d.ts create mode 100644 node_modules/@types/node/package.json create mode 100644 node_modules/@types/node/path.d.ts create mode 100644 node_modules/@types/node/perf_hooks.d.ts create mode 100644 node_modules/@types/node/process.d.ts create mode 100644 node_modules/@types/node/punycode.d.ts create mode 100644 node_modules/@types/node/querystring.d.ts create mode 100644 node_modules/@types/node/readline.d.ts create mode 100644 node_modules/@types/node/readline/promises.d.ts create mode 100644 node_modules/@types/node/repl.d.ts create mode 100644 node_modules/@types/node/stream.d.ts create mode 100644 node_modules/@types/node/stream/consumers.d.ts create mode 100644 node_modules/@types/node/stream/promises.d.ts create mode 100644 node_modules/@types/node/stream/web.d.ts create mode 100644 node_modules/@types/node/string_decoder.d.ts create mode 100644 node_modules/@types/node/test.d.ts create mode 100644 node_modules/@types/node/timers.d.ts create mode 100644 node_modules/@types/node/timers/promises.d.ts create mode 100644 node_modules/@types/node/tls.d.ts create mode 100644 node_modules/@types/node/trace_events.d.ts create mode 100644 node_modules/@types/node/ts5.6/buffer.buffer.d.ts create mode 100644 node_modules/@types/node/ts5.6/globals.typedarray.d.ts create mode 100644 node_modules/@types/node/ts5.6/index.d.ts create mode 100644 node_modules/@types/node/tty.d.ts create mode 100644 node_modules/@types/node/url.d.ts create mode 100644 node_modules/@types/node/util.d.ts create mode 100644 node_modules/@types/node/v8.d.ts create mode 100644 node_modules/@types/node/vm.d.ts create mode 100644 node_modules/@types/node/wasi.d.ts create mode 100644 node_modules/@types/node/worker_threads.d.ts create mode 100644 node_modules/@types/node/zlib.d.ts create mode 100644 node_modules/abort-controller/LICENSE create mode 100644 node_modules/abort-controller/README.md create mode 100644 node_modules/abort-controller/browser.js create mode 100644 node_modules/abort-controller/browser.mjs create mode 100644 node_modules/abort-controller/dist/abort-controller.d.ts create mode 100644 node_modules/abort-controller/dist/abort-controller.js create mode 100644 node_modules/abort-controller/dist/abort-controller.js.map create mode 100644 node_modules/abort-controller/dist/abort-controller.mjs create mode 100644 node_modules/abort-controller/dist/abort-controller.mjs.map create mode 100644 node_modules/abort-controller/dist/abort-controller.umd.js create mode 100644 node_modules/abort-controller/dist/abort-controller.umd.js.map create mode 100644 node_modules/abort-controller/package.json create mode 100644 node_modules/abort-controller/polyfill.js create mode 100644 node_modules/abort-controller/polyfill.mjs create mode 100644 node_modules/agentkeepalive/History.md create mode 100644 node_modules/agentkeepalive/LICENSE create mode 100644 node_modules/agentkeepalive/README.md create mode 100644 node_modules/agentkeepalive/browser.js create mode 100644 node_modules/agentkeepalive/index.d.ts create mode 100644 node_modules/agentkeepalive/index.js create mode 100644 node_modules/agentkeepalive/lib/agent.js create mode 100644 node_modules/agentkeepalive/lib/constants.js create mode 100644 node_modules/agentkeepalive/lib/https_agent.js create mode 100644 node_modules/agentkeepalive/package.json create mode 100644 node_modules/asynckit/LICENSE create mode 100644 node_modules/asynckit/README.md create mode 100644 node_modules/asynckit/bench.js create mode 100644 node_modules/asynckit/index.js create mode 100644 node_modules/asynckit/lib/abort.js create mode 100644 node_modules/asynckit/lib/async.js create mode 100644 node_modules/asynckit/lib/defer.js create mode 100644 node_modules/asynckit/lib/iterate.js create mode 100644 node_modules/asynckit/lib/readable_asynckit.js create mode 100644 node_modules/asynckit/lib/readable_parallel.js create mode 100644 node_modules/asynckit/lib/readable_serial.js create mode 100644 node_modules/asynckit/lib/readable_serial_ordered.js create mode 100644 node_modules/asynckit/lib/state.js create mode 100644 node_modules/asynckit/lib/streamify.js create mode 100644 node_modules/asynckit/lib/terminator.js create mode 100644 node_modules/asynckit/package.json create mode 100644 node_modules/asynckit/parallel.js create mode 100644 node_modules/asynckit/serial.js create mode 100644 node_modules/asynckit/serialOrdered.js create mode 100644 node_modules/asynckit/stream.js create mode 100644 node_modules/before-after-hook/LICENSE create mode 100644 node_modules/before-after-hook/README.md create mode 100644 node_modules/before-after-hook/index.d.ts create mode 100644 node_modules/before-after-hook/index.js create mode 100644 node_modules/before-after-hook/lib/add.js create mode 100644 node_modules/before-after-hook/lib/register.js create mode 100644 node_modules/before-after-hook/lib/remove.js create mode 100644 node_modules/before-after-hook/package.json create mode 100644 node_modules/combined-stream/License create mode 100644 node_modules/combined-stream/Readme.md create mode 100644 node_modules/combined-stream/lib/combined_stream.js create mode 100644 node_modules/combined-stream/package.json create mode 100644 node_modules/combined-stream/yarn.lock create mode 100644 node_modules/delayed-stream/.npmignore create mode 100644 node_modules/delayed-stream/License create mode 100644 node_modules/delayed-stream/Makefile create mode 100644 node_modules/delayed-stream/Readme.md create mode 100644 node_modules/delayed-stream/lib/delayed_stream.js create mode 100644 node_modules/delayed-stream/package.json create mode 100644 node_modules/deprecation/LICENSE create mode 100644 node_modules/deprecation/README.md create mode 100644 node_modules/deprecation/dist-node/index.js create mode 100644 node_modules/deprecation/dist-src/index.js create mode 100644 node_modules/deprecation/dist-types/index.d.ts create mode 100644 node_modules/deprecation/dist-web/index.js create mode 100644 node_modules/deprecation/package.json create mode 100644 node_modules/event-target-shim/LICENSE create mode 100644 node_modules/event-target-shim/README.md create mode 100644 node_modules/event-target-shim/dist/event-target-shim.js create mode 100644 node_modules/event-target-shim/dist/event-target-shim.js.map create mode 100644 node_modules/event-target-shim/dist/event-target-shim.mjs create mode 100644 node_modules/event-target-shim/dist/event-target-shim.mjs.map create mode 100644 node_modules/event-target-shim/dist/event-target-shim.umd.js create mode 100644 node_modules/event-target-shim/dist/event-target-shim.umd.js.map create mode 100644 node_modules/event-target-shim/index.d.ts create mode 100644 node_modules/event-target-shim/package.json create mode 100644 node_modules/form-data-encoder/@type/FileLike.d.ts create mode 100644 node_modules/form-data-encoder/@type/FormDataEncoder.d.ts create mode 100644 node_modules/form-data-encoder/@type/FormDataLike.d.ts create mode 100644 node_modules/form-data-encoder/@type/index.d.ts create mode 100644 node_modules/form-data-encoder/@type/util/createBoundary.d.ts create mode 100644 node_modules/form-data-encoder/@type/util/escapeName.d.ts create mode 100644 node_modules/form-data-encoder/@type/util/isFileLike.d.ts create mode 100644 node_modules/form-data-encoder/@type/util/isFormData.d.ts create mode 100644 node_modules/form-data-encoder/@type/util/isFunction.d.ts create mode 100644 node_modules/form-data-encoder/@type/util/isPlainObject.d.ts create mode 100644 node_modules/form-data-encoder/@type/util/normalizeValue.d.ts create mode 100644 node_modules/form-data-encoder/lib/cjs/FileLike.js create mode 100644 node_modules/form-data-encoder/lib/cjs/FormDataEncoder.js create mode 100644 node_modules/form-data-encoder/lib/cjs/FormDataLike.js create mode 100644 node_modules/form-data-encoder/lib/cjs/index.js create mode 100644 node_modules/form-data-encoder/lib/cjs/package.json create mode 100644 node_modules/form-data-encoder/lib/cjs/util/createBoundary.js create mode 100644 node_modules/form-data-encoder/lib/cjs/util/escapeName.js create mode 100644 node_modules/form-data-encoder/lib/cjs/util/isFileLike.js create mode 100644 node_modules/form-data-encoder/lib/cjs/util/isFormData.js create mode 100644 node_modules/form-data-encoder/lib/cjs/util/isFunction.js create mode 100644 node_modules/form-data-encoder/lib/cjs/util/isPlainObject.js create mode 100644 node_modules/form-data-encoder/lib/cjs/util/normalizeValue.js create mode 100644 node_modules/form-data-encoder/lib/esm/FileLike.js create mode 100644 node_modules/form-data-encoder/lib/esm/FormDataEncoder.js create mode 100644 node_modules/form-data-encoder/lib/esm/FormDataLike.js create mode 100644 node_modules/form-data-encoder/lib/esm/index.js create mode 100644 node_modules/form-data-encoder/lib/esm/package.json create mode 100644 node_modules/form-data-encoder/lib/esm/util/createBoundary.js create mode 100644 node_modules/form-data-encoder/lib/esm/util/escapeName.js create mode 100644 node_modules/form-data-encoder/lib/esm/util/isFileLike.js create mode 100644 node_modules/form-data-encoder/lib/esm/util/isFormData.js create mode 100644 node_modules/form-data-encoder/lib/esm/util/isFunction.js create mode 100644 node_modules/form-data-encoder/lib/esm/util/isPlainObject.js create mode 100644 node_modules/form-data-encoder/lib/esm/util/normalizeValue.js create mode 100644 node_modules/form-data-encoder/license create mode 100644 node_modules/form-data-encoder/package.json create mode 100644 node_modules/form-data-encoder/readme.md create mode 100644 node_modules/form-data/License create mode 100644 node_modules/form-data/Readme.md create mode 100644 node_modules/form-data/index.d.ts create mode 100644 node_modules/form-data/lib/browser.js create mode 100644 node_modules/form-data/lib/form_data.js create mode 100644 node_modules/form-data/lib/populate.js create mode 100644 node_modules/form-data/package.json create mode 100644 node_modules/formdata-node/@type/Blob.d.ts create mode 100644 node_modules/formdata-node/@type/BlobPart.d.ts create mode 100644 node_modules/formdata-node/@type/File.d.ts create mode 100644 node_modules/formdata-node/@type/FormData.d.ts create mode 100644 node_modules/formdata-node/@type/blobHelpers.d.ts create mode 100644 node_modules/formdata-node/@type/browser.d.ts create mode 100644 node_modules/formdata-node/@type/deprecateConstructorEntries.d.ts create mode 100644 node_modules/formdata-node/@type/fileFromPath.d.ts create mode 100644 node_modules/formdata-node/@type/index.d.ts create mode 100644 node_modules/formdata-node/@type/isBlob.d.ts create mode 100644 node_modules/formdata-node/@type/isFile.d.ts create mode 100644 node_modules/formdata-node/@type/isFunction.d.ts create mode 100644 node_modules/formdata-node/@type/isPlainObject.d.ts create mode 100644 node_modules/formdata-node/lib/cjs/Blob.js create mode 100644 node_modules/formdata-node/lib/cjs/BlobPart.js create mode 100644 node_modules/formdata-node/lib/cjs/File.js create mode 100644 node_modules/formdata-node/lib/cjs/FormData.js create mode 100644 node_modules/formdata-node/lib/cjs/blobHelpers.js create mode 100644 node_modules/formdata-node/lib/cjs/browser.js create mode 100644 node_modules/formdata-node/lib/cjs/deprecateConstructorEntries.js create mode 100644 node_modules/formdata-node/lib/cjs/fileFromPath.js create mode 100644 node_modules/formdata-node/lib/cjs/index.js create mode 100644 node_modules/formdata-node/lib/cjs/isBlob.js create mode 100644 node_modules/formdata-node/lib/cjs/isFile.js create mode 100644 node_modules/formdata-node/lib/cjs/isFunction.js create mode 100644 node_modules/formdata-node/lib/cjs/isPlainObject.js create mode 100644 node_modules/formdata-node/lib/cjs/package.json create mode 100644 node_modules/formdata-node/lib/esm/Blob.js create mode 100644 node_modules/formdata-node/lib/esm/BlobPart.js create mode 100644 node_modules/formdata-node/lib/esm/File.js create mode 100644 node_modules/formdata-node/lib/esm/FormData.js create mode 100644 node_modules/formdata-node/lib/esm/blobHelpers.js create mode 100644 node_modules/formdata-node/lib/esm/browser.js create mode 100644 node_modules/formdata-node/lib/esm/deprecateConstructorEntries.js create mode 100644 node_modules/formdata-node/lib/esm/fileFromPath.js create mode 100644 node_modules/formdata-node/lib/esm/index.js create mode 100644 node_modules/formdata-node/lib/esm/isBlob.js create mode 100644 node_modules/formdata-node/lib/esm/isFile.js create mode 100644 node_modules/formdata-node/lib/esm/isFunction.js create mode 100644 node_modules/formdata-node/lib/esm/isPlainObject.js create mode 100644 node_modules/formdata-node/lib/esm/package.json create mode 100644 node_modules/formdata-node/lib/node-domexception.d.ts create mode 100644 node_modules/formdata-node/license create mode 100644 node_modules/formdata-node/package.json create mode 100644 node_modules/formdata-node/readme.md create mode 100644 node_modules/humanize-ms/History.md create mode 100644 node_modules/humanize-ms/LICENSE create mode 100644 node_modules/humanize-ms/README.md create mode 100644 node_modules/humanize-ms/index.js create mode 100644 node_modules/humanize-ms/package.json create mode 100644 node_modules/is-plain-object/LICENSE create mode 100644 node_modules/is-plain-object/README.md create mode 100644 node_modules/is-plain-object/dist/is-plain-object.js create mode 100644 node_modules/is-plain-object/dist/is-plain-object.mjs create mode 100644 node_modules/is-plain-object/is-plain-object.d.ts create mode 100644 node_modules/is-plain-object/package.json create mode 100644 node_modules/mime-db/HISTORY.md create mode 100644 node_modules/mime-db/LICENSE create mode 100644 node_modules/mime-db/README.md create mode 100644 node_modules/mime-db/db.json create mode 100644 node_modules/mime-db/index.js create mode 100644 node_modules/mime-db/package.json create mode 100644 node_modules/mime-types/HISTORY.md create mode 100644 node_modules/mime-types/LICENSE create mode 100644 node_modules/mime-types/README.md create mode 100644 node_modules/mime-types/index.js create mode 100644 node_modules/mime-types/package.json create mode 100644 node_modules/ms/index.js create mode 100644 node_modules/ms/license.md create mode 100644 node_modules/ms/package.json create mode 100644 node_modules/ms/readme.md create mode 100644 node_modules/node-domexception/.history/README_20210527203617.md create mode 100644 node_modules/node-domexception/.history/README_20210527212714.md create mode 100644 node_modules/node-domexception/.history/README_20210527213345.md create mode 100644 node_modules/node-domexception/.history/README_20210527213411.md create mode 100644 node_modules/node-domexception/.history/README_20210527213803.md create mode 100644 node_modules/node-domexception/.history/README_20210527214323.md create mode 100644 node_modules/node-domexception/.history/README_20210527214408.md create mode 100644 node_modules/node-domexception/.history/index_20210527203842.js create mode 100644 node_modules/node-domexception/.history/index_20210527203947.js create mode 100644 node_modules/node-domexception/.history/index_20210527204259.js create mode 100644 node_modules/node-domexception/.history/index_20210527204418.js create mode 100644 node_modules/node-domexception/.history/index_20210527204756.js create mode 100644 node_modules/node-domexception/.history/index_20210527204833.js create mode 100644 node_modules/node-domexception/.history/index_20210527211208.js create mode 100644 node_modules/node-domexception/.history/index_20210527211248.js create mode 100644 node_modules/node-domexception/.history/index_20210527212722.js create mode 100644 node_modules/node-domexception/.history/index_20210527212731.js create mode 100644 node_modules/node-domexception/.history/index_20210527212746.js create mode 100644 node_modules/node-domexception/.history/index_20210527212900.js create mode 100644 node_modules/node-domexception/.history/index_20210527213022.js create mode 100644 node_modules/node-domexception/.history/index_20210527213822.js create mode 100644 node_modules/node-domexception/.history/index_20210527213843.js create mode 100644 node_modules/node-domexception/.history/index_20210527213852.js create mode 100644 node_modules/node-domexception/.history/index_20210527213910.js create mode 100644 node_modules/node-domexception/.history/index_20210527214034.js create mode 100644 node_modules/node-domexception/.history/index_20210527214643.js create mode 100644 node_modules/node-domexception/.history/index_20210527214654.js create mode 100644 node_modules/node-domexception/.history/index_20210527214700.js create mode 100644 node_modules/node-domexception/.history/package_20210527203733.json create mode 100644 node_modules/node-domexception/.history/package_20210527203825.json create mode 100644 node_modules/node-domexception/.history/package_20210527204621.json create mode 100644 node_modules/node-domexception/.history/package_20210527204913.json create mode 100644 node_modules/node-domexception/.history/package_20210527204925.json create mode 100644 node_modules/node-domexception/.history/package_20210527205145.json create mode 100644 node_modules/node-domexception/.history/package_20210527205156.json create mode 100644 node_modules/node-domexception/.history/test_20210527205603.js create mode 100644 node_modules/node-domexception/.history/test_20210527205957.js create mode 100644 node_modules/node-domexception/.history/test_20210527210021.js create mode 100644 node_modules/node-domexception/LICENSE create mode 100644 node_modules/node-domexception/README.md create mode 100644 node_modules/node-domexception/index.js create mode 100644 node_modules/node-domexception/package.json create mode 100644 node_modules/node-fetch/LICENSE.md create mode 100644 node_modules/node-fetch/README.md create mode 100644 node_modules/node-fetch/browser.js create mode 100644 node_modules/node-fetch/lib/index.es.js create mode 100644 node_modules/node-fetch/lib/index.js create mode 100644 node_modules/node-fetch/lib/index.mjs create mode 100644 node_modules/node-fetch/package.json create mode 100644 node_modules/once/LICENSE create mode 100644 node_modules/once/README.md create mode 100644 node_modules/once/once.js create mode 100644 node_modules/once/package.json create mode 100644 node_modules/openai/CHANGELOG.md create mode 100644 node_modules/openai/LICENSE create mode 100644 node_modules/openai/README.md create mode 100644 node_modules/openai/_shims/MultipartBody.d.ts create mode 100644 node_modules/openai/_shims/MultipartBody.d.ts.map create mode 100644 node_modules/openai/_shims/MultipartBody.js create mode 100644 node_modules/openai/_shims/MultipartBody.js.map create mode 100644 node_modules/openai/_shims/MultipartBody.mjs create mode 100644 node_modules/openai/_shims/MultipartBody.mjs.map create mode 100644 node_modules/openai/_shims/README.md create mode 100644 node_modules/openai/_shims/auto/runtime-bun.d.ts create mode 100644 node_modules/openai/_shims/auto/runtime-bun.d.ts.map create mode 100644 node_modules/openai/_shims/auto/runtime-bun.js create mode 100644 node_modules/openai/_shims/auto/runtime-bun.js.map create mode 100644 node_modules/openai/_shims/auto/runtime-bun.mjs create mode 100644 node_modules/openai/_shims/auto/runtime-bun.mjs.map create mode 100644 node_modules/openai/_shims/auto/runtime-node.d.ts create mode 100644 node_modules/openai/_shims/auto/runtime-node.d.ts.map create mode 100644 node_modules/openai/_shims/auto/runtime-node.js create mode 100644 node_modules/openai/_shims/auto/runtime-node.js.map create mode 100644 node_modules/openai/_shims/auto/runtime-node.mjs create mode 100644 node_modules/openai/_shims/auto/runtime-node.mjs.map create mode 100644 node_modules/openai/_shims/auto/runtime.d.ts create mode 100644 node_modules/openai/_shims/auto/runtime.d.ts.map create mode 100644 node_modules/openai/_shims/auto/runtime.js create mode 100644 node_modules/openai/_shims/auto/runtime.js.map create mode 100644 node_modules/openai/_shims/auto/runtime.mjs create mode 100644 node_modules/openai/_shims/auto/runtime.mjs.map create mode 100644 node_modules/openai/_shims/auto/types-node.d.ts create mode 100644 node_modules/openai/_shims/auto/types-node.d.ts.map create mode 100644 node_modules/openai/_shims/auto/types-node.js create mode 100644 node_modules/openai/_shims/auto/types-node.js.map create mode 100644 node_modules/openai/_shims/auto/types-node.mjs create mode 100644 node_modules/openai/_shims/auto/types-node.mjs.map create mode 100644 node_modules/openai/_shims/auto/types.d.ts create mode 100644 node_modules/openai/_shims/auto/types.js create mode 100644 node_modules/openai/_shims/auto/types.mjs create mode 100644 node_modules/openai/_shims/bun-runtime.d.ts create mode 100644 node_modules/openai/_shims/bun-runtime.d.ts.map create mode 100644 node_modules/openai/_shims/bun-runtime.js create mode 100644 node_modules/openai/_shims/bun-runtime.js.map create mode 100644 node_modules/openai/_shims/bun-runtime.mjs create mode 100644 node_modules/openai/_shims/bun-runtime.mjs.map create mode 100644 node_modules/openai/_shims/index.d.ts create mode 100644 node_modules/openai/_shims/index.js create mode 100644 node_modules/openai/_shims/index.mjs create mode 100644 node_modules/openai/_shims/manual-types.d.ts create mode 100644 node_modules/openai/_shims/manual-types.js create mode 100644 node_modules/openai/_shims/manual-types.mjs create mode 100644 node_modules/openai/_shims/node-runtime.d.ts create mode 100644 node_modules/openai/_shims/node-runtime.d.ts.map create mode 100644 node_modules/openai/_shims/node-runtime.js create mode 100644 node_modules/openai/_shims/node-runtime.js.map create mode 100644 node_modules/openai/_shims/node-runtime.mjs create mode 100644 node_modules/openai/_shims/node-runtime.mjs.map create mode 100644 node_modules/openai/_shims/node-types.d.ts create mode 100644 node_modules/openai/_shims/node-types.js create mode 100644 node_modules/openai/_shims/node-types.mjs create mode 100644 node_modules/openai/_shims/registry.d.ts create mode 100644 node_modules/openai/_shims/registry.d.ts.map create mode 100644 node_modules/openai/_shims/registry.js create mode 100644 node_modules/openai/_shims/registry.js.map create mode 100644 node_modules/openai/_shims/registry.mjs create mode 100644 node_modules/openai/_shims/registry.mjs.map create mode 100644 node_modules/openai/_shims/web-runtime.d.ts create mode 100644 node_modules/openai/_shims/web-runtime.d.ts.map create mode 100644 node_modules/openai/_shims/web-runtime.js create mode 100644 node_modules/openai/_shims/web-runtime.js.map create mode 100644 node_modules/openai/_shims/web-runtime.mjs create mode 100644 node_modules/openai/_shims/web-runtime.mjs.map create mode 100644 node_modules/openai/_shims/web-types.d.ts create mode 100644 node_modules/openai/_shims/web-types.js create mode 100644 node_modules/openai/_shims/web-types.mjs create mode 100644 node_modules/openai/_vendor/partial-json-parser/parser.d.ts create mode 100644 node_modules/openai/_vendor/partial-json-parser/parser.d.ts.map create mode 100644 node_modules/openai/_vendor/partial-json-parser/parser.js create mode 100644 node_modules/openai/_vendor/partial-json-parser/parser.js.map create mode 100644 node_modules/openai/_vendor/partial-json-parser/parser.mjs create mode 100644 node_modules/openai/_vendor/partial-json-parser/parser.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Options.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Options.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Options.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Options.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Options.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Options.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Refs.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Refs.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Refs.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Refs.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Refs.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/Refs.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/errorMessages.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/errorMessages.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/errorMessages.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/errorMessages.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/errorMessages.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/errorMessages.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/index.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/index.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/index.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/index.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/index.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/index.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parseDef.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parseDef.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parseDef.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parseDef.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parseDef.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parseDef.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/any.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/any.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/any.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/any.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/any.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/any.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/array.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/array.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/array.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/array.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/array.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/array.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/date.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/date.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/date.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/date.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/date.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/date.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/default.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/default.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/default.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/default.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/default.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/default.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/map.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/map.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/map.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/map.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/map.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/map.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/never.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/never.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/never.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/never.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/never.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/never.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/null.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/null.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/null.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/null.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/null.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/null.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/number.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/number.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/number.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/number.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/number.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/number.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/object.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/object.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/object.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/object.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/object.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/object.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/record.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/record.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/record.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/record.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/record.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/record.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/set.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/set.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/set.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/set.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/set.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/set.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/string.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/string.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/string.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/string.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/string.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/string.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/union.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/union.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/union.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/union.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/union.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/union.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/util.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/util.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/util.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/util.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/util.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/util.mjs.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.d.ts create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.d.ts.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.js create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.js.map create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.mjs create mode 100644 node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.mjs.map create mode 100755 node_modules/openai/bin/cli create mode 100644 node_modules/openai/core.d.ts create mode 100644 node_modules/openai/core.d.ts.map create mode 100644 node_modules/openai/core.js create mode 100644 node_modules/openai/core.js.map create mode 100644 node_modules/openai/core.mjs create mode 100644 node_modules/openai/core.mjs.map create mode 100644 node_modules/openai/error.d.ts create mode 100644 node_modules/openai/error.d.ts.map create mode 100644 node_modules/openai/error.js create mode 100644 node_modules/openai/error.js.map create mode 100644 node_modules/openai/error.mjs create mode 100644 node_modules/openai/error.mjs.map create mode 100644 node_modules/openai/helpers/zod.d.ts create mode 100644 node_modules/openai/helpers/zod.d.ts.map create mode 100644 node_modules/openai/helpers/zod.js create mode 100644 node_modules/openai/helpers/zod.js.map create mode 100644 node_modules/openai/helpers/zod.mjs create mode 100644 node_modules/openai/helpers/zod.mjs.map create mode 100644 node_modules/openai/index.d.mts create mode 100644 node_modules/openai/index.d.ts create mode 100644 node_modules/openai/index.d.ts.map create mode 100644 node_modules/openai/index.js create mode 100644 node_modules/openai/index.js.map create mode 100644 node_modules/openai/index.mjs create mode 100644 node_modules/openai/index.mjs.map create mode 100644 node_modules/openai/internal/decoders/line.d.ts create mode 100644 node_modules/openai/internal/decoders/line.d.ts.map create mode 100644 node_modules/openai/internal/decoders/line.js create mode 100644 node_modules/openai/internal/decoders/line.js.map create mode 100644 node_modules/openai/internal/decoders/line.mjs create mode 100644 node_modules/openai/internal/decoders/line.mjs.map create mode 100644 node_modules/openai/internal/qs/formats.d.ts create mode 100644 node_modules/openai/internal/qs/formats.d.ts.map create mode 100644 node_modules/openai/internal/qs/formats.js create mode 100644 node_modules/openai/internal/qs/formats.js.map create mode 100644 node_modules/openai/internal/qs/formats.mjs create mode 100644 node_modules/openai/internal/qs/formats.mjs.map create mode 100644 node_modules/openai/internal/qs/index.d.ts create mode 100644 node_modules/openai/internal/qs/index.d.ts.map create mode 100644 node_modules/openai/internal/qs/index.js create mode 100644 node_modules/openai/internal/qs/index.js.map create mode 100644 node_modules/openai/internal/qs/index.mjs create mode 100644 node_modules/openai/internal/qs/index.mjs.map create mode 100644 node_modules/openai/internal/qs/stringify.d.ts create mode 100644 node_modules/openai/internal/qs/stringify.d.ts.map create mode 100644 node_modules/openai/internal/qs/stringify.js create mode 100644 node_modules/openai/internal/qs/stringify.js.map create mode 100644 node_modules/openai/internal/qs/stringify.mjs create mode 100644 node_modules/openai/internal/qs/stringify.mjs.map create mode 100644 node_modules/openai/internal/qs/types.d.ts create mode 100644 node_modules/openai/internal/qs/types.d.ts.map create mode 100644 node_modules/openai/internal/qs/types.js create mode 100644 node_modules/openai/internal/qs/types.js.map create mode 100644 node_modules/openai/internal/qs/types.mjs create mode 100644 node_modules/openai/internal/qs/types.mjs.map create mode 100644 node_modules/openai/internal/qs/utils.d.ts create mode 100644 node_modules/openai/internal/qs/utils.d.ts.map create mode 100644 node_modules/openai/internal/qs/utils.js create mode 100644 node_modules/openai/internal/qs/utils.js.map create mode 100644 node_modules/openai/internal/qs/utils.mjs create mode 100644 node_modules/openai/internal/qs/utils.mjs.map create mode 100644 node_modules/openai/lib/AbstractChatCompletionRunner.d.ts create mode 100644 node_modules/openai/lib/AbstractChatCompletionRunner.d.ts.map create mode 100644 node_modules/openai/lib/AbstractChatCompletionRunner.js create mode 100644 node_modules/openai/lib/AbstractChatCompletionRunner.js.map create mode 100644 node_modules/openai/lib/AbstractChatCompletionRunner.mjs create mode 100644 node_modules/openai/lib/AbstractChatCompletionRunner.mjs.map create mode 100644 node_modules/openai/lib/AssistantStream.d.ts create mode 100644 node_modules/openai/lib/AssistantStream.d.ts.map create mode 100644 node_modules/openai/lib/AssistantStream.js create mode 100644 node_modules/openai/lib/AssistantStream.js.map create mode 100644 node_modules/openai/lib/AssistantStream.mjs create mode 100644 node_modules/openai/lib/AssistantStream.mjs.map create mode 100644 node_modules/openai/lib/ChatCompletionRunner.d.ts create mode 100644 node_modules/openai/lib/ChatCompletionRunner.d.ts.map create mode 100644 node_modules/openai/lib/ChatCompletionRunner.js create mode 100644 node_modules/openai/lib/ChatCompletionRunner.js.map create mode 100644 node_modules/openai/lib/ChatCompletionRunner.mjs create mode 100644 node_modules/openai/lib/ChatCompletionRunner.mjs.map create mode 100644 node_modules/openai/lib/ChatCompletionStream.d.ts create mode 100644 node_modules/openai/lib/ChatCompletionStream.d.ts.map create mode 100644 node_modules/openai/lib/ChatCompletionStream.js create mode 100644 node_modules/openai/lib/ChatCompletionStream.js.map create mode 100644 node_modules/openai/lib/ChatCompletionStream.mjs create mode 100644 node_modules/openai/lib/ChatCompletionStream.mjs.map create mode 100644 node_modules/openai/lib/ChatCompletionStreamingRunner.d.ts create mode 100644 node_modules/openai/lib/ChatCompletionStreamingRunner.d.ts.map create mode 100644 node_modules/openai/lib/ChatCompletionStreamingRunner.js create mode 100644 node_modules/openai/lib/ChatCompletionStreamingRunner.js.map create mode 100644 node_modules/openai/lib/ChatCompletionStreamingRunner.mjs create mode 100644 node_modules/openai/lib/ChatCompletionStreamingRunner.mjs.map create mode 100644 node_modules/openai/lib/EventStream.d.ts create mode 100644 node_modules/openai/lib/EventStream.d.ts.map create mode 100644 node_modules/openai/lib/EventStream.js create mode 100644 node_modules/openai/lib/EventStream.js.map create mode 100644 node_modules/openai/lib/EventStream.mjs create mode 100644 node_modules/openai/lib/EventStream.mjs.map create mode 100644 node_modules/openai/lib/RunnableFunction.d.ts create mode 100644 node_modules/openai/lib/RunnableFunction.d.ts.map create mode 100644 node_modules/openai/lib/RunnableFunction.js create mode 100644 node_modules/openai/lib/RunnableFunction.js.map create mode 100644 node_modules/openai/lib/RunnableFunction.mjs create mode 100644 node_modules/openai/lib/RunnableFunction.mjs.map create mode 100644 node_modules/openai/lib/Util.d.ts create mode 100644 node_modules/openai/lib/Util.d.ts.map create mode 100644 node_modules/openai/lib/Util.js create mode 100644 node_modules/openai/lib/Util.js.map create mode 100644 node_modules/openai/lib/Util.mjs create mode 100644 node_modules/openai/lib/Util.mjs.map create mode 100644 node_modules/openai/lib/chatCompletionUtils.d.ts create mode 100644 node_modules/openai/lib/chatCompletionUtils.d.ts.map create mode 100644 node_modules/openai/lib/chatCompletionUtils.js create mode 100644 node_modules/openai/lib/chatCompletionUtils.js.map create mode 100644 node_modules/openai/lib/chatCompletionUtils.mjs create mode 100644 node_modules/openai/lib/chatCompletionUtils.mjs.map create mode 100644 node_modules/openai/lib/jsonschema.d.ts create mode 100644 node_modules/openai/lib/jsonschema.d.ts.map create mode 100644 node_modules/openai/lib/jsonschema.js create mode 100644 node_modules/openai/lib/jsonschema.js.map create mode 100644 node_modules/openai/lib/jsonschema.mjs create mode 100644 node_modules/openai/lib/jsonschema.mjs.map create mode 100644 node_modules/openai/lib/parser.d.ts create mode 100644 node_modules/openai/lib/parser.d.ts.map create mode 100644 node_modules/openai/lib/parser.js create mode 100644 node_modules/openai/lib/parser.js.map create mode 100644 node_modules/openai/lib/parser.mjs create mode 100644 node_modules/openai/lib/parser.mjs.map create mode 100644 node_modules/openai/package.json create mode 100644 node_modules/openai/pagination.d.ts create mode 100644 node_modules/openai/pagination.d.ts.map create mode 100644 node_modules/openai/pagination.js create mode 100644 node_modules/openai/pagination.js.map create mode 100644 node_modules/openai/pagination.mjs create mode 100644 node_modules/openai/pagination.mjs.map create mode 100644 node_modules/openai/resource.d.ts create mode 100644 node_modules/openai/resource.d.ts.map create mode 100644 node_modules/openai/resource.js create mode 100644 node_modules/openai/resource.js.map create mode 100644 node_modules/openai/resource.mjs create mode 100644 node_modules/openai/resource.mjs.map create mode 100644 node_modules/openai/resources/audio/audio.d.ts create mode 100644 node_modules/openai/resources/audio/audio.d.ts.map create mode 100644 node_modules/openai/resources/audio/audio.js create mode 100644 node_modules/openai/resources/audio/audio.js.map create mode 100644 node_modules/openai/resources/audio/audio.mjs create mode 100644 node_modules/openai/resources/audio/audio.mjs.map create mode 100644 node_modules/openai/resources/audio/index.d.ts create mode 100644 node_modules/openai/resources/audio/index.d.ts.map create mode 100644 node_modules/openai/resources/audio/index.js create mode 100644 node_modules/openai/resources/audio/index.js.map create mode 100644 node_modules/openai/resources/audio/index.mjs create mode 100644 node_modules/openai/resources/audio/index.mjs.map create mode 100644 node_modules/openai/resources/audio/speech.d.ts create mode 100644 node_modules/openai/resources/audio/speech.d.ts.map create mode 100644 node_modules/openai/resources/audio/speech.js create mode 100644 node_modules/openai/resources/audio/speech.js.map create mode 100644 node_modules/openai/resources/audio/speech.mjs create mode 100644 node_modules/openai/resources/audio/speech.mjs.map create mode 100644 node_modules/openai/resources/audio/transcriptions.d.ts create mode 100644 node_modules/openai/resources/audio/transcriptions.d.ts.map create mode 100644 node_modules/openai/resources/audio/transcriptions.js create mode 100644 node_modules/openai/resources/audio/transcriptions.js.map create mode 100644 node_modules/openai/resources/audio/transcriptions.mjs create mode 100644 node_modules/openai/resources/audio/transcriptions.mjs.map create mode 100644 node_modules/openai/resources/audio/translations.d.ts create mode 100644 node_modules/openai/resources/audio/translations.d.ts.map create mode 100644 node_modules/openai/resources/audio/translations.js create mode 100644 node_modules/openai/resources/audio/translations.js.map create mode 100644 node_modules/openai/resources/audio/translations.mjs create mode 100644 node_modules/openai/resources/audio/translations.mjs.map create mode 100644 node_modules/openai/resources/batches.d.ts create mode 100644 node_modules/openai/resources/batches.d.ts.map create mode 100644 node_modules/openai/resources/batches.js create mode 100644 node_modules/openai/resources/batches.js.map create mode 100644 node_modules/openai/resources/batches.mjs create mode 100644 node_modules/openai/resources/batches.mjs.map create mode 100644 node_modules/openai/resources/beta/assistants.d.ts create mode 100644 node_modules/openai/resources/beta/assistants.d.ts.map create mode 100644 node_modules/openai/resources/beta/assistants.js create mode 100644 node_modules/openai/resources/beta/assistants.js.map create mode 100644 node_modules/openai/resources/beta/assistants.mjs create mode 100644 node_modules/openai/resources/beta/assistants.mjs.map create mode 100644 node_modules/openai/resources/beta/beta.d.ts create mode 100644 node_modules/openai/resources/beta/beta.d.ts.map create mode 100644 node_modules/openai/resources/beta/beta.js create mode 100644 node_modules/openai/resources/beta/beta.js.map create mode 100644 node_modules/openai/resources/beta/beta.mjs create mode 100644 node_modules/openai/resources/beta/beta.mjs.map create mode 100644 node_modules/openai/resources/beta/chat/chat.d.ts create mode 100644 node_modules/openai/resources/beta/chat/chat.d.ts.map create mode 100644 node_modules/openai/resources/beta/chat/chat.js create mode 100644 node_modules/openai/resources/beta/chat/chat.js.map create mode 100644 node_modules/openai/resources/beta/chat/chat.mjs create mode 100644 node_modules/openai/resources/beta/chat/chat.mjs.map create mode 100644 node_modules/openai/resources/beta/chat/completions.d.ts create mode 100644 node_modules/openai/resources/beta/chat/completions.d.ts.map create mode 100644 node_modules/openai/resources/beta/chat/completions.js create mode 100644 node_modules/openai/resources/beta/chat/completions.js.map create mode 100644 node_modules/openai/resources/beta/chat/completions.mjs create mode 100644 node_modules/openai/resources/beta/chat/completions.mjs.map create mode 100644 node_modules/openai/resources/beta/chat/index.d.ts create mode 100644 node_modules/openai/resources/beta/chat/index.d.ts.map create mode 100644 node_modules/openai/resources/beta/chat/index.js create mode 100644 node_modules/openai/resources/beta/chat/index.js.map create mode 100644 node_modules/openai/resources/beta/chat/index.mjs create mode 100644 node_modules/openai/resources/beta/chat/index.mjs.map create mode 100644 node_modules/openai/resources/beta/index.d.ts create mode 100644 node_modules/openai/resources/beta/index.d.ts.map create mode 100644 node_modules/openai/resources/beta/index.js create mode 100644 node_modules/openai/resources/beta/index.js.map create mode 100644 node_modules/openai/resources/beta/index.mjs create mode 100644 node_modules/openai/resources/beta/index.mjs.map create mode 100644 node_modules/openai/resources/beta/threads/index.d.ts create mode 100644 node_modules/openai/resources/beta/threads/index.d.ts.map create mode 100644 node_modules/openai/resources/beta/threads/index.js create mode 100644 node_modules/openai/resources/beta/threads/index.js.map create mode 100644 node_modules/openai/resources/beta/threads/index.mjs create mode 100644 node_modules/openai/resources/beta/threads/index.mjs.map create mode 100644 node_modules/openai/resources/beta/threads/messages.d.ts create mode 100644 node_modules/openai/resources/beta/threads/messages.d.ts.map create mode 100644 node_modules/openai/resources/beta/threads/messages.js create mode 100644 node_modules/openai/resources/beta/threads/messages.js.map create mode 100644 node_modules/openai/resources/beta/threads/messages.mjs create mode 100644 node_modules/openai/resources/beta/threads/messages.mjs.map create mode 100644 node_modules/openai/resources/beta/threads/runs/index.d.ts create mode 100644 node_modules/openai/resources/beta/threads/runs/index.d.ts.map create mode 100644 node_modules/openai/resources/beta/threads/runs/index.js create mode 100644 node_modules/openai/resources/beta/threads/runs/index.js.map create mode 100644 node_modules/openai/resources/beta/threads/runs/index.mjs create mode 100644 node_modules/openai/resources/beta/threads/runs/index.mjs.map create mode 100644 node_modules/openai/resources/beta/threads/runs/runs.d.ts create mode 100644 node_modules/openai/resources/beta/threads/runs/runs.d.ts.map create mode 100644 node_modules/openai/resources/beta/threads/runs/runs.js create mode 100644 node_modules/openai/resources/beta/threads/runs/runs.js.map create mode 100644 node_modules/openai/resources/beta/threads/runs/runs.mjs create mode 100644 node_modules/openai/resources/beta/threads/runs/runs.mjs.map create mode 100644 node_modules/openai/resources/beta/threads/runs/steps.d.ts create mode 100644 node_modules/openai/resources/beta/threads/runs/steps.d.ts.map create mode 100644 node_modules/openai/resources/beta/threads/runs/steps.js create mode 100644 node_modules/openai/resources/beta/threads/runs/steps.js.map create mode 100644 node_modules/openai/resources/beta/threads/runs/steps.mjs create mode 100644 node_modules/openai/resources/beta/threads/runs/steps.mjs.map create mode 100644 node_modules/openai/resources/beta/threads/threads.d.ts create mode 100644 node_modules/openai/resources/beta/threads/threads.d.ts.map create mode 100644 node_modules/openai/resources/beta/threads/threads.js create mode 100644 node_modules/openai/resources/beta/threads/threads.js.map create mode 100644 node_modules/openai/resources/beta/threads/threads.mjs create mode 100644 node_modules/openai/resources/beta/threads/threads.mjs.map create mode 100644 node_modules/openai/resources/beta/vector-stores/file-batches.d.ts create mode 100644 node_modules/openai/resources/beta/vector-stores/file-batches.d.ts.map create mode 100644 node_modules/openai/resources/beta/vector-stores/file-batches.js create mode 100644 node_modules/openai/resources/beta/vector-stores/file-batches.js.map create mode 100644 node_modules/openai/resources/beta/vector-stores/file-batches.mjs create mode 100644 node_modules/openai/resources/beta/vector-stores/file-batches.mjs.map create mode 100644 node_modules/openai/resources/beta/vector-stores/files.d.ts create mode 100644 node_modules/openai/resources/beta/vector-stores/files.d.ts.map create mode 100644 node_modules/openai/resources/beta/vector-stores/files.js create mode 100644 node_modules/openai/resources/beta/vector-stores/files.js.map create mode 100644 node_modules/openai/resources/beta/vector-stores/files.mjs create mode 100644 node_modules/openai/resources/beta/vector-stores/files.mjs.map create mode 100644 node_modules/openai/resources/beta/vector-stores/index.d.ts create mode 100644 node_modules/openai/resources/beta/vector-stores/index.d.ts.map create mode 100644 node_modules/openai/resources/beta/vector-stores/index.js create mode 100644 node_modules/openai/resources/beta/vector-stores/index.js.map create mode 100644 node_modules/openai/resources/beta/vector-stores/index.mjs create mode 100644 node_modules/openai/resources/beta/vector-stores/index.mjs.map create mode 100644 node_modules/openai/resources/beta/vector-stores/vector-stores.d.ts create mode 100644 node_modules/openai/resources/beta/vector-stores/vector-stores.d.ts.map create mode 100644 node_modules/openai/resources/beta/vector-stores/vector-stores.js create mode 100644 node_modules/openai/resources/beta/vector-stores/vector-stores.js.map create mode 100644 node_modules/openai/resources/beta/vector-stores/vector-stores.mjs create mode 100644 node_modules/openai/resources/beta/vector-stores/vector-stores.mjs.map create mode 100644 node_modules/openai/resources/chat/chat.d.ts create mode 100644 node_modules/openai/resources/chat/chat.d.ts.map create mode 100644 node_modules/openai/resources/chat/chat.js create mode 100644 node_modules/openai/resources/chat/chat.js.map create mode 100644 node_modules/openai/resources/chat/chat.mjs create mode 100644 node_modules/openai/resources/chat/chat.mjs.map create mode 100644 node_modules/openai/resources/chat/completions.d.ts create mode 100644 node_modules/openai/resources/chat/completions.d.ts.map create mode 100644 node_modules/openai/resources/chat/completions.js create mode 100644 node_modules/openai/resources/chat/completions.js.map create mode 100644 node_modules/openai/resources/chat/completions.mjs create mode 100644 node_modules/openai/resources/chat/completions.mjs.map create mode 100644 node_modules/openai/resources/chat/index.d.ts create mode 100644 node_modules/openai/resources/chat/index.d.ts.map create mode 100644 node_modules/openai/resources/chat/index.js create mode 100644 node_modules/openai/resources/chat/index.js.map create mode 100644 node_modules/openai/resources/chat/index.mjs create mode 100644 node_modules/openai/resources/chat/index.mjs.map create mode 100644 node_modules/openai/resources/completions.d.ts create mode 100644 node_modules/openai/resources/completions.d.ts.map create mode 100644 node_modules/openai/resources/completions.js create mode 100644 node_modules/openai/resources/completions.js.map create mode 100644 node_modules/openai/resources/completions.mjs create mode 100644 node_modules/openai/resources/completions.mjs.map create mode 100644 node_modules/openai/resources/embeddings.d.ts create mode 100644 node_modules/openai/resources/embeddings.d.ts.map create mode 100644 node_modules/openai/resources/embeddings.js create mode 100644 node_modules/openai/resources/embeddings.js.map create mode 100644 node_modules/openai/resources/embeddings.mjs create mode 100644 node_modules/openai/resources/embeddings.mjs.map create mode 100644 node_modules/openai/resources/files.d.ts create mode 100644 node_modules/openai/resources/files.d.ts.map create mode 100644 node_modules/openai/resources/files.js create mode 100644 node_modules/openai/resources/files.js.map create mode 100644 node_modules/openai/resources/files.mjs create mode 100644 node_modules/openai/resources/files.mjs.map create mode 100644 node_modules/openai/resources/fine-tuning/fine-tuning.d.ts create mode 100644 node_modules/openai/resources/fine-tuning/fine-tuning.d.ts.map create mode 100644 node_modules/openai/resources/fine-tuning/fine-tuning.js create mode 100644 node_modules/openai/resources/fine-tuning/fine-tuning.js.map create mode 100644 node_modules/openai/resources/fine-tuning/fine-tuning.mjs create mode 100644 node_modules/openai/resources/fine-tuning/fine-tuning.mjs.map create mode 100644 node_modules/openai/resources/fine-tuning/index.d.ts create mode 100644 node_modules/openai/resources/fine-tuning/index.d.ts.map create mode 100644 node_modules/openai/resources/fine-tuning/index.js create mode 100644 node_modules/openai/resources/fine-tuning/index.js.map create mode 100644 node_modules/openai/resources/fine-tuning/index.mjs create mode 100644 node_modules/openai/resources/fine-tuning/index.mjs.map create mode 100644 node_modules/openai/resources/fine-tuning/jobs/checkpoints.d.ts create mode 100644 node_modules/openai/resources/fine-tuning/jobs/checkpoints.d.ts.map create mode 100644 node_modules/openai/resources/fine-tuning/jobs/checkpoints.js create mode 100644 node_modules/openai/resources/fine-tuning/jobs/checkpoints.js.map create mode 100644 node_modules/openai/resources/fine-tuning/jobs/checkpoints.mjs create mode 100644 node_modules/openai/resources/fine-tuning/jobs/checkpoints.mjs.map create mode 100644 node_modules/openai/resources/fine-tuning/jobs/index.d.ts create mode 100644 node_modules/openai/resources/fine-tuning/jobs/index.d.ts.map create mode 100644 node_modules/openai/resources/fine-tuning/jobs/index.js create mode 100644 node_modules/openai/resources/fine-tuning/jobs/index.js.map create mode 100644 node_modules/openai/resources/fine-tuning/jobs/index.mjs create mode 100644 node_modules/openai/resources/fine-tuning/jobs/index.mjs.map create mode 100644 node_modules/openai/resources/fine-tuning/jobs/jobs.d.ts create mode 100644 node_modules/openai/resources/fine-tuning/jobs/jobs.d.ts.map create mode 100644 node_modules/openai/resources/fine-tuning/jobs/jobs.js create mode 100644 node_modules/openai/resources/fine-tuning/jobs/jobs.js.map create mode 100644 node_modules/openai/resources/fine-tuning/jobs/jobs.mjs create mode 100644 node_modules/openai/resources/fine-tuning/jobs/jobs.mjs.map create mode 100644 node_modules/openai/resources/images.d.ts create mode 100644 node_modules/openai/resources/images.d.ts.map create mode 100644 node_modules/openai/resources/images.js create mode 100644 node_modules/openai/resources/images.js.map create mode 100644 node_modules/openai/resources/images.mjs create mode 100644 node_modules/openai/resources/images.mjs.map create mode 100644 node_modules/openai/resources/index.d.ts create mode 100644 node_modules/openai/resources/index.d.ts.map create mode 100644 node_modules/openai/resources/index.js create mode 100644 node_modules/openai/resources/index.js.map create mode 100644 node_modules/openai/resources/index.mjs create mode 100644 node_modules/openai/resources/index.mjs.map create mode 100644 node_modules/openai/resources/models.d.ts create mode 100644 node_modules/openai/resources/models.d.ts.map create mode 100644 node_modules/openai/resources/models.js create mode 100644 node_modules/openai/resources/models.js.map create mode 100644 node_modules/openai/resources/models.mjs create mode 100644 node_modules/openai/resources/models.mjs.map create mode 100644 node_modules/openai/resources/moderations.d.ts create mode 100644 node_modules/openai/resources/moderations.d.ts.map create mode 100644 node_modules/openai/resources/moderations.js create mode 100644 node_modules/openai/resources/moderations.js.map create mode 100644 node_modules/openai/resources/moderations.mjs create mode 100644 node_modules/openai/resources/moderations.mjs.map create mode 100644 node_modules/openai/resources/shared.d.ts create mode 100644 node_modules/openai/resources/shared.d.ts.map create mode 100644 node_modules/openai/resources/shared.js create mode 100644 node_modules/openai/resources/shared.js.map create mode 100644 node_modules/openai/resources/shared.mjs create mode 100644 node_modules/openai/resources/shared.mjs.map create mode 100644 node_modules/openai/resources/uploads/index.d.ts create mode 100644 node_modules/openai/resources/uploads/index.d.ts.map create mode 100644 node_modules/openai/resources/uploads/index.js create mode 100644 node_modules/openai/resources/uploads/index.js.map create mode 100644 node_modules/openai/resources/uploads/index.mjs create mode 100644 node_modules/openai/resources/uploads/index.mjs.map create mode 100644 node_modules/openai/resources/uploads/parts.d.ts create mode 100644 node_modules/openai/resources/uploads/parts.d.ts.map create mode 100644 node_modules/openai/resources/uploads/parts.js create mode 100644 node_modules/openai/resources/uploads/parts.js.map create mode 100644 node_modules/openai/resources/uploads/parts.mjs create mode 100644 node_modules/openai/resources/uploads/parts.mjs.map create mode 100644 node_modules/openai/resources/uploads/uploads.d.ts create mode 100644 node_modules/openai/resources/uploads/uploads.d.ts.map create mode 100644 node_modules/openai/resources/uploads/uploads.js create mode 100644 node_modules/openai/resources/uploads/uploads.js.map create mode 100644 node_modules/openai/resources/uploads/uploads.mjs create mode 100644 node_modules/openai/resources/uploads/uploads.mjs.map create mode 100644 node_modules/openai/shims/node.d.ts create mode 100644 node_modules/openai/shims/node.d.ts.map create mode 100644 node_modules/openai/shims/node.js create mode 100644 node_modules/openai/shims/node.js.map create mode 100644 node_modules/openai/shims/node.mjs create mode 100644 node_modules/openai/shims/node.mjs.map create mode 100644 node_modules/openai/shims/web.d.ts create mode 100644 node_modules/openai/shims/web.d.ts.map create mode 100644 node_modules/openai/shims/web.js create mode 100644 node_modules/openai/shims/web.js.map create mode 100644 node_modules/openai/shims/web.mjs create mode 100644 node_modules/openai/shims/web.mjs.map create mode 100644 node_modules/openai/src/_shims/MultipartBody.ts create mode 100644 node_modules/openai/src/_shims/README.md create mode 100644 node_modules/openai/src/_shims/auto/runtime-bun.ts create mode 100644 node_modules/openai/src/_shims/auto/runtime-node.ts create mode 100644 node_modules/openai/src/_shims/auto/runtime.ts create mode 100644 node_modules/openai/src/_shims/auto/types-node.ts create mode 100644 node_modules/openai/src/_shims/auto/types.d.ts create mode 100644 node_modules/openai/src/_shims/auto/types.js create mode 100644 node_modules/openai/src/_shims/auto/types.mjs create mode 100644 node_modules/openai/src/_shims/bun-runtime.ts create mode 100644 node_modules/openai/src/_shims/index.d.ts create mode 100644 node_modules/openai/src/_shims/index.js create mode 100644 node_modules/openai/src/_shims/index.mjs create mode 100644 node_modules/openai/src/_shims/manual-types.d.ts create mode 100644 node_modules/openai/src/_shims/manual-types.js create mode 100644 node_modules/openai/src/_shims/manual-types.mjs create mode 100644 node_modules/openai/src/_shims/node-runtime.ts create mode 100644 node_modules/openai/src/_shims/node-types.d.ts create mode 100644 node_modules/openai/src/_shims/node-types.js create mode 100644 node_modules/openai/src/_shims/node-types.mjs create mode 100644 node_modules/openai/src/_shims/registry.ts create mode 100644 node_modules/openai/src/_shims/web-runtime.ts create mode 100644 node_modules/openai/src/_shims/web-types.d.ts create mode 100644 node_modules/openai/src/_shims/web-types.js create mode 100644 node_modules/openai/src/_shims/web-types.mjs create mode 100644 node_modules/openai/src/_vendor/partial-json-parser/README.md create mode 100644 node_modules/openai/src/_vendor/partial-json-parser/parser.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/LICENSE create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/Options.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/README.md create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/Refs.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/errorMessages.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/index.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parseDef.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/any.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/array.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/bigint.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/boolean.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/branded.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/catch.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/date.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/default.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/effects.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/enum.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/intersection.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/literal.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/map.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/nativeEnum.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/never.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/null.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/nullable.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/number.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/object.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/optional.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/pipeline.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/promise.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/readonly.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/record.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/set.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/string.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/tuple.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/undefined.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/union.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/parsers/unknown.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/util.ts create mode 100644 node_modules/openai/src/_vendor/zod-to-json-schema/zodToJsonSchema.ts create mode 100644 node_modules/openai/src/core.ts create mode 100644 node_modules/openai/src/error.ts create mode 100644 node_modules/openai/src/helpers/zod.ts create mode 100644 node_modules/openai/src/index.ts create mode 100644 node_modules/openai/src/internal/decoders/line.ts create mode 100644 node_modules/openai/src/internal/qs/LICENSE.md create mode 100644 node_modules/openai/src/internal/qs/README.md create mode 100644 node_modules/openai/src/internal/qs/formats.ts create mode 100644 node_modules/openai/src/internal/qs/index.ts create mode 100644 node_modules/openai/src/internal/qs/stringify.ts create mode 100644 node_modules/openai/src/internal/qs/types.ts create mode 100644 node_modules/openai/src/internal/qs/utils.ts create mode 100644 node_modules/openai/src/lib/.keep create mode 100644 node_modules/openai/src/lib/AbstractChatCompletionRunner.ts create mode 100644 node_modules/openai/src/lib/AssistantStream.ts create mode 100644 node_modules/openai/src/lib/ChatCompletionRunner.ts create mode 100644 node_modules/openai/src/lib/ChatCompletionStream.ts create mode 100644 node_modules/openai/src/lib/ChatCompletionStreamingRunner.ts create mode 100644 node_modules/openai/src/lib/EventStream.ts create mode 100644 node_modules/openai/src/lib/RunnableFunction.ts create mode 100644 node_modules/openai/src/lib/Util.ts create mode 100644 node_modules/openai/src/lib/chatCompletionUtils.ts create mode 100644 node_modules/openai/src/lib/jsonschema.ts create mode 100644 node_modules/openai/src/lib/parser.ts create mode 100644 node_modules/openai/src/pagination.ts create mode 100644 node_modules/openai/src/resource.ts create mode 100644 node_modules/openai/src/resources/audio/audio.ts create mode 100644 node_modules/openai/src/resources/audio/index.ts create mode 100644 node_modules/openai/src/resources/audio/speech.ts create mode 100644 node_modules/openai/src/resources/audio/transcriptions.ts create mode 100644 node_modules/openai/src/resources/audio/translations.ts create mode 100644 node_modules/openai/src/resources/batches.ts create mode 100644 node_modules/openai/src/resources/beta/assistants.ts create mode 100644 node_modules/openai/src/resources/beta/beta.ts create mode 100644 node_modules/openai/src/resources/beta/chat/chat.ts create mode 100644 node_modules/openai/src/resources/beta/chat/completions.ts create mode 100644 node_modules/openai/src/resources/beta/chat/index.ts create mode 100644 node_modules/openai/src/resources/beta/index.ts create mode 100644 node_modules/openai/src/resources/beta/threads/index.ts create mode 100644 node_modules/openai/src/resources/beta/threads/messages.ts create mode 100644 node_modules/openai/src/resources/beta/threads/runs/index.ts create mode 100644 node_modules/openai/src/resources/beta/threads/runs/runs.ts create mode 100644 node_modules/openai/src/resources/beta/threads/runs/steps.ts create mode 100644 node_modules/openai/src/resources/beta/threads/threads.ts create mode 100644 node_modules/openai/src/resources/beta/vector-stores/file-batches.ts create mode 100644 node_modules/openai/src/resources/beta/vector-stores/files.ts create mode 100644 node_modules/openai/src/resources/beta/vector-stores/index.ts create mode 100644 node_modules/openai/src/resources/beta/vector-stores/vector-stores.ts create mode 100644 node_modules/openai/src/resources/chat/chat.ts create mode 100644 node_modules/openai/src/resources/chat/completions.ts create mode 100644 node_modules/openai/src/resources/chat/index.ts create mode 100644 node_modules/openai/src/resources/completions.ts create mode 100644 node_modules/openai/src/resources/embeddings.ts create mode 100644 node_modules/openai/src/resources/files.ts create mode 100644 node_modules/openai/src/resources/fine-tuning/fine-tuning.ts create mode 100644 node_modules/openai/src/resources/fine-tuning/index.ts create mode 100644 node_modules/openai/src/resources/fine-tuning/jobs/checkpoints.ts create mode 100644 node_modules/openai/src/resources/fine-tuning/jobs/index.ts create mode 100644 node_modules/openai/src/resources/fine-tuning/jobs/jobs.ts create mode 100644 node_modules/openai/src/resources/images.ts create mode 100644 node_modules/openai/src/resources/index.ts create mode 100644 node_modules/openai/src/resources/models.ts create mode 100644 node_modules/openai/src/resources/moderations.ts create mode 100644 node_modules/openai/src/resources/shared.ts create mode 100644 node_modules/openai/src/resources/uploads/index.ts create mode 100644 node_modules/openai/src/resources/uploads/parts.ts create mode 100644 node_modules/openai/src/resources/uploads/uploads.ts create mode 100644 node_modules/openai/src/shims/node.ts create mode 100644 node_modules/openai/src/shims/web.ts create mode 100644 node_modules/openai/src/streaming.ts create mode 100644 node_modules/openai/src/tsconfig.json create mode 100644 node_modules/openai/src/uploads.ts create mode 100644 node_modules/openai/src/version.ts create mode 100644 node_modules/openai/streaming.d.ts create mode 100644 node_modules/openai/streaming.d.ts.map create mode 100644 node_modules/openai/streaming.js create mode 100644 node_modules/openai/streaming.js.map create mode 100644 node_modules/openai/streaming.mjs create mode 100644 node_modules/openai/streaming.mjs.map create mode 100644 node_modules/openai/uploads.d.ts create mode 100644 node_modules/openai/uploads.d.ts.map create mode 100644 node_modules/openai/uploads.js create mode 100644 node_modules/openai/uploads.js.map create mode 100644 node_modules/openai/uploads.mjs create mode 100644 node_modules/openai/uploads.mjs.map create mode 100644 node_modules/openai/version.d.ts create mode 100644 node_modules/openai/version.d.ts.map create mode 100644 node_modules/openai/version.js create mode 100644 node_modules/openai/version.js.map create mode 100644 node_modules/openai/version.mjs create mode 100644 node_modules/openai/version.mjs.map create mode 100644 node_modules/tr46/.npmignore create mode 100644 node_modules/tr46/index.js create mode 100644 node_modules/tr46/lib/.gitkeep create mode 100644 node_modules/tr46/lib/mappingTable.json create mode 100644 node_modules/tr46/package.json create mode 100644 node_modules/tunnel/.idea/encodings.xml create mode 100644 node_modules/tunnel/.idea/modules.xml create mode 100644 node_modules/tunnel/.idea/node-tunnel.iml create mode 100644 node_modules/tunnel/.idea/vcs.xml create mode 100644 node_modules/tunnel/.idea/workspace.xml create mode 100644 node_modules/tunnel/.travis.yml create mode 100644 node_modules/tunnel/CHANGELOG.md create mode 100644 node_modules/tunnel/LICENSE create mode 100644 node_modules/tunnel/README.md create mode 100644 node_modules/tunnel/index.js create mode 100644 node_modules/tunnel/lib/tunnel.js create mode 100644 node_modules/tunnel/package.json create mode 100644 node_modules/undici-types/README.md create mode 100644 node_modules/undici-types/agent.d.ts create mode 100644 node_modules/undici-types/api.d.ts create mode 100644 node_modules/undici-types/balanced-pool.d.ts create mode 100644 node_modules/undici-types/cache.d.ts create mode 100644 node_modules/undici-types/client.d.ts create mode 100644 node_modules/undici-types/connector.d.ts create mode 100644 node_modules/undici-types/content-type.d.ts create mode 100644 node_modules/undici-types/cookies.d.ts create mode 100644 node_modules/undici-types/diagnostics-channel.d.ts create mode 100644 node_modules/undici-types/dispatcher.d.ts create mode 100644 node_modules/undici-types/errors.d.ts create mode 100644 node_modules/undici-types/fetch.d.ts create mode 100644 node_modules/undici-types/file.d.ts create mode 100644 node_modules/undici-types/filereader.d.ts create mode 100644 node_modules/undici-types/formdata.d.ts create mode 100644 node_modules/undici-types/global-dispatcher.d.ts create mode 100644 node_modules/undici-types/global-origin.d.ts create mode 100644 node_modules/undici-types/handlers.d.ts create mode 100644 node_modules/undici-types/header.d.ts create mode 100644 node_modules/undici-types/index.d.ts create mode 100644 node_modules/undici-types/interceptors.d.ts create mode 100644 node_modules/undici-types/mock-agent.d.ts create mode 100644 node_modules/undici-types/mock-client.d.ts create mode 100644 node_modules/undici-types/mock-errors.d.ts create mode 100644 node_modules/undici-types/mock-interceptor.d.ts create mode 100644 node_modules/undici-types/mock-pool.d.ts create mode 100644 node_modules/undici-types/package.json create mode 100644 node_modules/undici-types/patch.d.ts create mode 100644 node_modules/undici-types/pool-stats.d.ts create mode 100644 node_modules/undici-types/pool.d.ts create mode 100644 node_modules/undici-types/proxy-agent.d.ts create mode 100644 node_modules/undici-types/readable.d.ts create mode 100644 node_modules/undici-types/webidl.d.ts create mode 100644 node_modules/undici-types/websocket.d.ts create mode 100644 node_modules/undici/LICENSE create mode 100644 node_modules/undici/README.md create mode 100644 node_modules/undici/docs/api/Agent.md create mode 100644 node_modules/undici/docs/api/BalancedPool.md create mode 100644 node_modules/undici/docs/api/CacheStorage.md create mode 100644 node_modules/undici/docs/api/Client.md create mode 100644 node_modules/undici/docs/api/Connector.md create mode 100644 node_modules/undici/docs/api/ContentType.md create mode 100644 node_modules/undici/docs/api/Cookies.md create mode 100644 node_modules/undici/docs/api/DiagnosticsChannel.md create mode 100644 node_modules/undici/docs/api/DispatchInterceptor.md create mode 100644 node_modules/undici/docs/api/Dispatcher.md create mode 100644 node_modules/undici/docs/api/Errors.md create mode 100644 node_modules/undici/docs/api/Fetch.md create mode 100644 node_modules/undici/docs/api/MockAgent.md create mode 100644 node_modules/undici/docs/api/MockClient.md create mode 100644 node_modules/undici/docs/api/MockErrors.md create mode 100644 node_modules/undici/docs/api/MockPool.md create mode 100644 node_modules/undici/docs/api/Pool.md create mode 100644 node_modules/undici/docs/api/PoolStats.md create mode 100644 node_modules/undici/docs/api/ProxyAgent.md create mode 100644 node_modules/undici/docs/api/RetryHandler.md create mode 100644 node_modules/undici/docs/api/WebSocket.md create mode 100644 node_modules/undici/docs/api/api-lifecycle.md create mode 100644 node_modules/undici/docs/assets/lifecycle-diagram.png create mode 100644 node_modules/undici/docs/best-practices/client-certificate.md create mode 100644 node_modules/undici/docs/best-practices/mocking-request.md create mode 100644 node_modules/undici/docs/best-practices/proxy.md create mode 100644 node_modules/undici/docs/best-practices/writing-tests.md create mode 100644 node_modules/undici/index-fetch.js create mode 100644 node_modules/undici/index.d.ts create mode 100644 node_modules/undici/index.js create mode 100644 node_modules/undici/lib/agent.js create mode 100644 node_modules/undici/lib/api/abort-signal.js create mode 100644 node_modules/undici/lib/api/api-connect.js create mode 100644 node_modules/undici/lib/api/api-pipeline.js create mode 100644 node_modules/undici/lib/api/api-request.js create mode 100644 node_modules/undici/lib/api/api-stream.js create mode 100644 node_modules/undici/lib/api/api-upgrade.js create mode 100644 node_modules/undici/lib/api/index.js create mode 100644 node_modules/undici/lib/api/readable.js create mode 100644 node_modules/undici/lib/api/util.js create mode 100644 node_modules/undici/lib/balanced-pool.js create mode 100644 node_modules/undici/lib/cache/cache.js create mode 100644 node_modules/undici/lib/cache/cachestorage.js create mode 100644 node_modules/undici/lib/cache/symbols.js create mode 100644 node_modules/undici/lib/cache/util.js create mode 100644 node_modules/undici/lib/client.js create mode 100644 node_modules/undici/lib/compat/dispatcher-weakref.js create mode 100644 node_modules/undici/lib/cookies/constants.js create mode 100644 node_modules/undici/lib/cookies/index.js create mode 100644 node_modules/undici/lib/cookies/parse.js create mode 100644 node_modules/undici/lib/cookies/util.js create mode 100644 node_modules/undici/lib/core/connect.js create mode 100644 node_modules/undici/lib/core/constants.js create mode 100644 node_modules/undici/lib/core/errors.js create mode 100644 node_modules/undici/lib/core/request.js create mode 100644 node_modules/undici/lib/core/symbols.js create mode 100644 node_modules/undici/lib/core/util.js create mode 100644 node_modules/undici/lib/dispatcher-base.js create mode 100644 node_modules/undici/lib/dispatcher.js create mode 100644 node_modules/undici/lib/fetch/LICENSE create mode 100644 node_modules/undici/lib/fetch/body.js create mode 100644 node_modules/undici/lib/fetch/constants.js create mode 100644 node_modules/undici/lib/fetch/dataURL.js create mode 100644 node_modules/undici/lib/fetch/file.js create mode 100644 node_modules/undici/lib/fetch/formdata.js create mode 100644 node_modules/undici/lib/fetch/global.js create mode 100644 node_modules/undici/lib/fetch/headers.js create mode 100644 node_modules/undici/lib/fetch/index.js create mode 100644 node_modules/undici/lib/fetch/request.js create mode 100644 node_modules/undici/lib/fetch/response.js create mode 100644 node_modules/undici/lib/fetch/symbols.js create mode 100644 node_modules/undici/lib/fetch/util.js create mode 100644 node_modules/undici/lib/fetch/webidl.js create mode 100644 node_modules/undici/lib/fileapi/encoding.js create mode 100644 node_modules/undici/lib/fileapi/filereader.js create mode 100644 node_modules/undici/lib/fileapi/progressevent.js create mode 100644 node_modules/undici/lib/fileapi/symbols.js create mode 100644 node_modules/undici/lib/fileapi/util.js create mode 100644 node_modules/undici/lib/global.js create mode 100644 node_modules/undici/lib/handler/DecoratorHandler.js create mode 100644 node_modules/undici/lib/handler/RedirectHandler.js create mode 100644 node_modules/undici/lib/handler/RetryHandler.js create mode 100644 node_modules/undici/lib/interceptor/redirectInterceptor.js create mode 100644 node_modules/undici/lib/llhttp/constants.d.ts create mode 100644 node_modules/undici/lib/llhttp/constants.js create mode 100644 node_modules/undici/lib/llhttp/constants.js.map create mode 100644 node_modules/undici/lib/llhttp/llhttp-wasm.js create mode 100755 node_modules/undici/lib/llhttp/llhttp.wasm create mode 100644 node_modules/undici/lib/llhttp/llhttp_simd-wasm.js create mode 100755 node_modules/undici/lib/llhttp/llhttp_simd.wasm create mode 100644 node_modules/undici/lib/llhttp/utils.d.ts create mode 100644 node_modules/undici/lib/llhttp/utils.js create mode 100644 node_modules/undici/lib/llhttp/utils.js.map create mode 100644 node_modules/undici/lib/llhttp/wasm_build_env.txt create mode 100644 node_modules/undici/lib/mock/mock-agent.js create mode 100644 node_modules/undici/lib/mock/mock-client.js create mode 100644 node_modules/undici/lib/mock/mock-errors.js create mode 100644 node_modules/undici/lib/mock/mock-interceptor.js create mode 100644 node_modules/undici/lib/mock/mock-pool.js create mode 100644 node_modules/undici/lib/mock/mock-symbols.js create mode 100644 node_modules/undici/lib/mock/mock-utils.js create mode 100644 node_modules/undici/lib/mock/pending-interceptors-formatter.js create mode 100644 node_modules/undici/lib/mock/pluralizer.js create mode 100644 node_modules/undici/lib/node/fixed-queue.js create mode 100644 node_modules/undici/lib/pool-base.js create mode 100644 node_modules/undici/lib/pool-stats.js create mode 100644 node_modules/undici/lib/pool.js create mode 100644 node_modules/undici/lib/proxy-agent.js create mode 100644 node_modules/undici/lib/timers.js create mode 100644 node_modules/undici/lib/websocket/connection.js create mode 100644 node_modules/undici/lib/websocket/constants.js create mode 100644 node_modules/undici/lib/websocket/events.js create mode 100644 node_modules/undici/lib/websocket/frame.js create mode 100644 node_modules/undici/lib/websocket/receiver.js create mode 100644 node_modules/undici/lib/websocket/symbols.js create mode 100644 node_modules/undici/lib/websocket/util.js create mode 100644 node_modules/undici/lib/websocket/websocket.js create mode 100644 node_modules/undici/package.json create mode 100644 node_modules/undici/types/README.md create mode 100644 node_modules/undici/types/agent.d.ts create mode 100644 node_modules/undici/types/api.d.ts create mode 100644 node_modules/undici/types/balanced-pool.d.ts create mode 100644 node_modules/undici/types/cache.d.ts create mode 100644 node_modules/undici/types/client.d.ts create mode 100644 node_modules/undici/types/connector.d.ts create mode 100644 node_modules/undici/types/content-type.d.ts create mode 100644 node_modules/undici/types/cookies.d.ts create mode 100644 node_modules/undici/types/diagnostics-channel.d.ts create mode 100644 node_modules/undici/types/dispatcher.d.ts create mode 100644 node_modules/undici/types/errors.d.ts create mode 100644 node_modules/undici/types/fetch.d.ts create mode 100644 node_modules/undici/types/file.d.ts create mode 100644 node_modules/undici/types/filereader.d.ts create mode 100644 node_modules/undici/types/formdata.d.ts create mode 100644 node_modules/undici/types/global-dispatcher.d.ts create mode 100644 node_modules/undici/types/global-origin.d.ts create mode 100644 node_modules/undici/types/handlers.d.ts create mode 100644 node_modules/undici/types/header.d.ts create mode 100644 node_modules/undici/types/index.d.ts create mode 100644 node_modules/undici/types/interceptors.d.ts create mode 100644 node_modules/undici/types/mock-agent.d.ts create mode 100644 node_modules/undici/types/mock-client.d.ts create mode 100644 node_modules/undici/types/mock-errors.d.ts create mode 100644 node_modules/undici/types/mock-interceptor.d.ts create mode 100644 node_modules/undici/types/mock-pool.d.ts create mode 100644 node_modules/undici/types/patch.d.ts create mode 100644 node_modules/undici/types/pool-stats.d.ts create mode 100644 node_modules/undici/types/pool.d.ts create mode 100644 node_modules/undici/types/proxy-agent.d.ts create mode 100644 node_modules/undici/types/readable.d.ts create mode 100644 node_modules/undici/types/retry-handler.d.ts create mode 100644 node_modules/undici/types/webidl.d.ts create mode 100644 node_modules/undici/types/websocket.d.ts create mode 100644 node_modules/universal-user-agent/LICENSE.md create mode 100644 node_modules/universal-user-agent/README.md create mode 100644 node_modules/universal-user-agent/dist-node/index.js create mode 100644 node_modules/universal-user-agent/dist-node/index.js.map create mode 100644 node_modules/universal-user-agent/dist-src/index.js create mode 100644 node_modules/universal-user-agent/dist-types/index.d.ts create mode 100644 node_modules/universal-user-agent/dist-web/index.js create mode 100644 node_modules/universal-user-agent/dist-web/index.js.map create mode 100644 node_modules/universal-user-agent/package.json create mode 100644 node_modules/web-streams-polyfill/LICENSE create mode 100644 node_modules/web-streams-polyfill/README.md create mode 100644 node_modules/web-streams-polyfill/dist/polyfill.es5.js create mode 100644 node_modules/web-streams-polyfill/dist/polyfill.js create mode 100644 node_modules/web-streams-polyfill/dist/ponyfill.es5.js create mode 100644 node_modules/web-streams-polyfill/dist/ponyfill.es5.mjs create mode 100644 node_modules/web-streams-polyfill/dist/ponyfill.js create mode 100644 node_modules/web-streams-polyfill/dist/ponyfill.mjs create mode 100644 node_modules/web-streams-polyfill/es5/package.json create mode 100644 node_modules/web-streams-polyfill/package.json create mode 100644 node_modules/web-streams-polyfill/polyfill/es5/package.json create mode 100644 node_modules/web-streams-polyfill/polyfill/package.json create mode 100644 node_modules/web-streams-polyfill/types/polyfill.d.ts create mode 100644 node_modules/web-streams-polyfill/types/ponyfill.d.ts create mode 100644 node_modules/web-streams-polyfill/types/tsdoc-metadata.json create mode 100644 node_modules/webidl-conversions/LICENSE.md create mode 100644 node_modules/webidl-conversions/README.md create mode 100644 node_modules/webidl-conversions/lib/index.js create mode 100644 node_modules/webidl-conversions/package.json create mode 100644 node_modules/whatwg-url/LICENSE.txt create mode 100644 node_modules/whatwg-url/README.md create mode 100644 node_modules/whatwg-url/lib/URL-impl.js create mode 100644 node_modules/whatwg-url/lib/URL.js create mode 100644 node_modules/whatwg-url/lib/public-api.js create mode 100644 node_modules/whatwg-url/lib/url-state-machine.js create mode 100644 node_modules/whatwg-url/lib/utils.js create mode 100644 node_modules/whatwg-url/package.json create mode 100644 node_modules/wrappy/LICENSE create mode 100644 node_modules/wrappy/README.md create mode 100644 node_modules/wrappy/package.json create mode 100644 node_modules/wrappy/wrappy.js create mode 100644 package-lock.json create mode 100644 test.js diff --git a/ai-agent.js b/ai-agent.js index b974d8c..3b9ce68 100644 --- a/ai-agent.js +++ b/ai-agent.js @@ -1,16 +1,13 @@ -import { - Configuration, - OpenAIApi, -} from 'openai'; +const OpenAI = require('openai'); async function createFailureRhyme(apiKey, failureDescription, user) { try { - const openai = new OpenAIApi(new Configuration({ - apiKey, - })); + const openai = new OpenAI({ + apiKey + }); - const response = await openai.createChatCompletion(getChatCompletionRequest(failureDescription, user)); - const rhyme = response.data.choices?.[0]?.message?.content + const response = await openai.chat.completions.create(getChatCompletionRequest(failureDescription, user)); + const rhyme = response.choices?.[0]?.message?.content return rhyme; } catch (error) { @@ -20,15 +17,17 @@ async function createFailureRhyme(apiKey, failureDescription, user) { } const getChatCompletionRequest = (failureDescription, user) => { - - const prompt = ` - You are a witted engineer and you need to create a rhyme about a failure that happened when running a GitHub Action on the current commit. - It has to be short and witted - no more then 4 lines. Dont insolt the engineer that caused it. Be funny. - The failtre is - ${failureDescription} and the user who casued it is - ${user}.`; - return { model : "gpt-4o", - messages, + messages: [{ + role: "system", + content: "You are a witty engineer who creates short, funny rhymes about GitHub Action failures." + }, { + role: "user", + content: `Create a witty rhyme (max 4 lines) about this GitHub Action failure. Be funny but don't insult the engineer. + Failure: ${failureDescription} + User: ${user}` + }], max_tokens: 100, n: 1, stream: false, diff --git a/index.js b/index.js index 1793a5f..96ed4a7 100644 --- a/index.js +++ b/index.js @@ -1,6 +1,6 @@ const core = require('@actions/core'); const github = require('@actions/github'); -const aiAgent = require('./aiAgent'); +const aiAgent = require('./ai-agent'); const failFailed = (err = `It's sad, so sad. It's a sad, sad situation 🎶`) => { core.setFailed(err); @@ -14,6 +14,12 @@ try { const user = core.getInput('user'); const ghToken = core.getInput('ghToken'); + + if (!openAIToken) { + return ` + Without a token, you're out of luck, + The gates to OpenAI are firmly stuck!`; + } const rhyme = await aiAgent.createFailureRhyme(openAIToken, failureDescription, user); await postRhymeToGitHub(rhyme, ghToken); @@ -22,7 +28,7 @@ try { } })() .then(() => { - console.log(`Finished runnning Loadmill Test Plan`); + console.log(`Finished runnning Failure Rhyme Action`); }); async function postRhymeToGitHub(rhyme, token) { diff --git a/node_modules/.bin/openai b/node_modules/.bin/openai new file mode 120000 index 0000000..d3dcb0b --- /dev/null +++ b/node_modules/.bin/openai @@ -0,0 +1 @@ +../openai/bin/cli \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 0000000..c54c3b9 --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,405 @@ +{ + "name": "failing-rhymes-action", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "node_modules/@actions/core": { + "version": "1.11.1", + "license": "MIT", + "dependencies": { + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" + } + }, + "node_modules/@actions/exec": { + "version": "1.1.1", + "license": "MIT", + "dependencies": { + "@actions/io": "^1.0.1" + } + }, + "node_modules/@actions/github": { + "version": "5.1.1", + "license": "MIT", + "dependencies": { + "@actions/http-client": "^2.0.1", + "@octokit/core": "^3.6.0", + "@octokit/plugin-paginate-rest": "^2.17.0", + "@octokit/plugin-rest-endpoint-methods": "^5.13.0" + } + }, + "node_modules/@actions/http-client": { + "version": "2.2.3", + "license": "MIT", + "dependencies": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" + } + }, + "node_modules/@actions/io": { + "version": "1.1.3", + "license": "MIT" + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/@octokit/auth-token": { + "version": "2.5.0", + "license": "MIT", + "dependencies": { + "@octokit/types": "^6.0.3" + } + }, + "node_modules/@octokit/core": { + "version": "3.6.0", + "license": "MIT", + "dependencies": { + "@octokit/auth-token": "^2.4.4", + "@octokit/graphql": "^4.5.8", + "@octokit/request": "^5.6.3", + "@octokit/request-error": "^2.0.5", + "@octokit/types": "^6.0.3", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@octokit/endpoint": { + "version": "6.0.12", + "license": "MIT", + "dependencies": { + "@octokit/types": "^6.0.3", + "is-plain-object": "^5.0.0", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@octokit/graphql": { + "version": "4.8.0", + "license": "MIT", + "dependencies": { + "@octokit/request": "^5.6.0", + "@octokit/types": "^6.0.3", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "12.11.0", + "license": "MIT" + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "2.21.3", + "license": "MIT", + "dependencies": { + "@octokit/types": "^6.40.0" + }, + "peerDependencies": { + "@octokit/core": ">=2" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "5.16.2", + "license": "MIT", + "dependencies": { + "@octokit/types": "^6.39.0", + "deprecation": "^2.3.1" + }, + "peerDependencies": { + "@octokit/core": ">=3" + } + }, + "node_modules/@octokit/request": { + "version": "5.6.3", + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^6.0.1", + "@octokit/request-error": "^2.1.0", + "@octokit/types": "^6.16.1", + "is-plain-object": "^5.0.0", + "node-fetch": "^2.6.7", + "universal-user-agent": "^6.0.0" + } + }, + "node_modules/@octokit/request-error": { + "version": "2.1.0", + "license": "MIT", + "dependencies": { + "@octokit/types": "^6.0.3", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "node_modules/@octokit/types": { + "version": "6.41.0", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^12.11.0" + } + }, + "node_modules/@types/node": { + "version": "18.19.68", + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/node-fetch": { + "version": "2.6.12", + "license": "MIT", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, + "node_modules/@types/node-fetch/node_modules/@types/node": { + "version": "22.10.2", + "license": "MIT", + "dependencies": { + "undici-types": "~6.20.0" + } + }, + "node_modules/@types/node-fetch/node_modules/undici-types": { + "version": "6.20.0", + "license": "MIT" + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "license": "MIT", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/agentkeepalive": { + "version": "4.5.0", + "license": "MIT", + "dependencies": { + "humanize-ms": "^1.2.1" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "license": "MIT" + }, + "node_modules/before-after-hook": { + "version": "2.2.3", + "license": "Apache-2.0" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "license": "ISC" + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/form-data": { + "version": "4.0.1", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/form-data-encoder": { + "version": "1.7.2", + "license": "MIT" + }, + "node_modules/formdata-node": { + "version": "4.4.1", + "license": "MIT", + "dependencies": { + "node-domexception": "1.0.0", + "web-streams-polyfill": "4.0.0-beta.3" + }, + "engines": { + "node": ">= 12.20" + } + }, + "node_modules/humanize-ms": { + "version": "1.2.1", + "license": "MIT", + "dependencies": { + "ms": "^2.0.0" + } + }, + "node_modules/is-plain-object": { + "version": "5.0.0", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "license": "MIT" + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/once": { + "version": "1.4.0", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/openai": { + "version": "4.77.0", + "license": "Apache-2.0", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7" + }, + "bin": { + "openai": "bin/cli" + }, + "peerDependencies": { + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "license": "MIT" + }, + "node_modules/tunnel": { + "version": "0.0.6", + "license": "MIT", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, + "node_modules/undici": { + "version": "5.28.4", + "license": "MIT", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "license": "MIT" + }, + "node_modules/universal-user-agent": { + "version": "6.0.1", + "license": "ISC" + }, + "node_modules/web-streams-polyfill": { + "version": "4.0.0-beta.3", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "license": "ISC" + } + } +} diff --git a/node_modules/.yarn-integrity b/node_modules/.yarn-integrity new file mode 100644 index 0000000..9ba9eb4 --- /dev/null +++ b/node_modules/.yarn-integrity @@ -0,0 +1,74 @@ +{ + "systemParams": "darwin-arm64-93", + "modulesFolders": [ + "node_modules" + ], + "flags": [], + "linkedModules": [], + "topLevelPatterns": [ + "@actions/core@^1.11.1", + "@actions/github@^5.1.1", + "@actions/http-client@^2.2.3", + "openai@^4.77.0" + ], + "lockfileEntries": { + "@actions/core@^1.11.1": "https://registry.yarnpkg.com/@actions/core/-/core-1.11.1.tgz#ae683aac5112438021588030efb53b1adb86f172", + "@actions/exec@^1.1.1": "https://registry.yarnpkg.com/@actions/exec/-/exec-1.1.1.tgz#2e43f28c54022537172819a7cf886c844221a611", + "@actions/github@^5.1.1": "https://registry.yarnpkg.com/@actions/github/-/github-5.1.1.tgz#40b9b9e1323a5efcf4ff7dadd33d8ea51651bbcb", + "@actions/http-client@^2.0.1": "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.2.3.tgz#31fc0b25c0e665754ed39a9f19a8611fc6dab674", + "@actions/http-client@^2.2.3": "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.2.3.tgz#31fc0b25c0e665754ed39a9f19a8611fc6dab674", + "@actions/io@^1.0.1": "https://registry.yarnpkg.com/@actions/io/-/io-1.1.3.tgz#4cdb6254da7962b07473ff5c335f3da485d94d71", + "@fastify/busboy@^2.0.0": "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d", + "@octokit/auth-token@^2.4.4": "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.5.0.tgz#27c37ea26c205f28443402477ffd261311f21e36", + "@octokit/core@^3.6.0": "https://registry.yarnpkg.com/@octokit/core/-/core-3.6.0.tgz#3376cb9f3008d9b3d110370d90e0a1fcd5fe6085", + "@octokit/endpoint@^6.0.1": "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.12.tgz#3b4d47a4b0e79b1027fb8d75d4221928b2d05658", + "@octokit/graphql@^4.5.8": "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-4.8.0.tgz#664d9b11c0e12112cbf78e10f49a05959aa22cc3", + "@octokit/openapi-types@^12.11.0": "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-12.11.0.tgz#da5638d64f2b919bca89ce6602d059f1b52d3ef0", + "@octokit/plugin-paginate-rest@^2.17.0": "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz#7f12532797775640dbb8224da577da7dc210c87e", + "@octokit/plugin-rest-endpoint-methods@^5.13.0": "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz#7ee8bf586df97dd6868cf68f641354e908c25342", + "@octokit/request-error@^2.0.5": "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.1.0.tgz#9e150357831bfc788d13a4fd4b1913d60c74d677", + "@octokit/request-error@^2.1.0": "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.1.0.tgz#9e150357831bfc788d13a4fd4b1913d60c74d677", + "@octokit/request@^5.6.0": "https://registry.yarnpkg.com/@octokit/request/-/request-5.6.3.tgz#19a022515a5bba965ac06c9d1334514eb50c48b0", + "@octokit/request@^5.6.3": "https://registry.yarnpkg.com/@octokit/request/-/request-5.6.3.tgz#19a022515a5bba965ac06c9d1334514eb50c48b0", + "@octokit/types@^6.0.3": "https://registry.yarnpkg.com/@octokit/types/-/types-6.41.0.tgz#e58ef78d78596d2fb7df9c6259802464b5f84a04", + "@octokit/types@^6.16.1": "https://registry.yarnpkg.com/@octokit/types/-/types-6.41.0.tgz#e58ef78d78596d2fb7df9c6259802464b5f84a04", + "@octokit/types@^6.39.0": "https://registry.yarnpkg.com/@octokit/types/-/types-6.41.0.tgz#e58ef78d78596d2fb7df9c6259802464b5f84a04", + "@octokit/types@^6.40.0": "https://registry.yarnpkg.com/@octokit/types/-/types-6.41.0.tgz#e58ef78d78596d2fb7df9c6259802464b5f84a04", + "@types/node-fetch@^2.6.4": "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.12.tgz#8ab5c3ef8330f13100a7479e2cd56d3386830a03", + "@types/node@*": "https://registry.yarnpkg.com/@types/node/-/node-22.10.2.tgz#a485426e6d1fdafc7b0d4c7b24e2c78182ddabb9", + "@types/node@^18.11.18": "https://registry.yarnpkg.com/@types/node/-/node-18.19.68.tgz#f4f10d9927a7eaf3568c46a6d739cc0967ccb701", + "abort-controller@^3.0.0": "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392", + "agentkeepalive@^4.2.1": "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923", + "asynckit@^0.4.0": "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79", + "before-after-hook@^2.2.0": "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c", + "combined-stream@^1.0.8": "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f", + "delayed-stream@~1.0.0": "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619", + "deprecation@^2.0.0": "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919", + "deprecation@^2.3.1": "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919", + "event-target-shim@^5.0.0": "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789", + "form-data-encoder@1.7.2": "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.2.tgz#1f1ae3dccf58ed4690b86d87e4f57c654fbab040", + "form-data@^4.0.0": "https://registry.yarnpkg.com/form-data/-/form-data-4.0.1.tgz#ba1076daaaa5bfd7e99c1a6cb02aa0a5cff90d48", + "formdata-node@^4.3.2": "https://registry.yarnpkg.com/formdata-node/-/formdata-node-4.4.1.tgz#23f6a5cb9cb55315912cbec4ff7b0f59bbd191e2", + "humanize-ms@^1.2.1": "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed", + "is-plain-object@^5.0.0": "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344", + "mime-db@1.52.0": "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70", + "mime-types@^2.1.12": "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a", + "ms@^2.0.0": "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2", + "node-domexception@1.0.0": "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5", + "node-fetch@^2.6.7": "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d", + "once@^1.4.0": "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1", + "openai@^4.77.0": "https://registry.yarnpkg.com/openai/-/openai-4.77.0.tgz#228f2d43ffa79ae9d8b5d4155e965da82e5ac330", + "tr46@~0.0.3": "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a", + "tunnel@^0.0.6": "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c", + "undici-types@~5.26.4": "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617", + "undici-types@~6.20.0": "https://registry.yarnpkg.com/undici-types/-/undici-types-6.20.0.tgz#8171bf22c1f588d1554d55bf204bc624af388433", + "undici@^5.25.4": "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068", + "universal-user-agent@^6.0.0": "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa", + "web-streams-polyfill@4.0.0-beta.3": "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38", + "webidl-conversions@^3.0.0": "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871", + "whatwg-url@^5.0.0": "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d", + "wrappy@1": "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + }, + "files": [], + "artifacts": {} +} \ No newline at end of file diff --git a/node_modules/@actions/core/LICENSE.md b/node_modules/@actions/core/LICENSE.md new file mode 100644 index 0000000..dbae2ed --- /dev/null +++ b/node_modules/@actions/core/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/core/README.md b/node_modules/@actions/core/README.md new file mode 100644 index 0000000..ac8ced9 --- /dev/null +++ b/node_modules/@actions/core/README.md @@ -0,0 +1,486 @@ +# `@actions/core` + +> Core functions for setting results, logging, registering secrets and exporting variables across actions + +## Usage + +### Import the package + +```js +// javascript +const core = require('@actions/core'); + +// typescript +import * as core from '@actions/core'; +``` + +#### Inputs/Outputs + +Action inputs can be read with `getInput` which returns a `string` or `getBooleanInput` which parses a boolean based on the [yaml 1.2 specification](https://yaml.org/spec/1.2/spec.html#id2804923). If `required` set to be false, the input should have a default value in `action.yml`. + +Outputs can be set with `setOutput` which makes them available to be mapped into inputs of other actions to ensure they are decoupled. + +```js +const myInput = core.getInput('inputName', { required: true }); +const myBooleanInput = core.getBooleanInput('booleanInputName', { required: true }); +const myMultilineInput = core.getMultilineInput('multilineInputName', { required: true }); +core.setOutput('outputKey', 'outputVal'); +``` + +#### Exporting variables + +Since each step runs in a separate process, you can use `exportVariable` to add it to this step and future steps environment blocks. + +```js +core.exportVariable('envVar', 'Val'); +``` + +#### Setting a secret + +Setting a secret registers the secret with the runner to ensure it is masked in logs. + +```js +core.setSecret('myPassword'); +``` + +#### PATH Manipulation + +To make a tool's path available in the path for the remainder of the job (without altering the machine or containers state), use `addPath`. The runner will prepend the path given to the jobs PATH. + +```js +core.addPath('/path/to/mytool'); +``` + +#### Exit codes + +You should use this library to set the failing exit code for your action. If status is not set and the script runs to completion, that will lead to a success. + +```js +const core = require('@actions/core'); + +try { + // Do stuff +} +catch (err) { + // setFailed logs the message and sets a failing exit code + core.setFailed(`Action failed with error ${err}`); +} +``` + +Note that `setNeutral` is not yet implemented in actions V2 but equivalent functionality is being planned. + +#### Logging + +Finally, this library provides some utilities for logging. Note that debug logging is hidden from the logs by default. This behavior can be toggled by enabling the [Step Debug Logs](../../docs/action-debugging.md#step-debug-logs). + +```js +const core = require('@actions/core'); + +const myInput = core.getInput('input'); +try { + core.debug('Inside try block'); + + if (!myInput) { + core.warning('myInput was not set'); + } + + if (core.isDebug()) { + // curl -v https://github.com + } else { + // curl https://github.com + } + + // Do stuff + core.info('Output to the actions build log') + + core.notice('This is a message that will also emit an annotation') +} +catch (err) { + core.error(`Error ${err}, action may still succeed though`); +} +``` + +This library can also wrap chunks of output in foldable groups. + +```js +const core = require('@actions/core') + +// Manually wrap output +core.startGroup('Do some function') +doSomeFunction() +core.endGroup() + +// Wrap an asynchronous function call +const result = await core.group('Do something async', async () => { + const response = await doSomeHTTPRequest() + return response +}) +``` + +#### Annotations + +This library has 3 methods that will produce [annotations](https://docs.github.com/en/rest/reference/checks#create-a-check-run). +```js +core.error('This is a bad error, action may still succeed though.') + +core.warning('Something went wrong, but it\'s not bad enough to fail the build.') + +core.notice('Something happened that you might want to know about.') +``` + +These will surface to the UI in the Actions page and on Pull Requests. They look something like this: + +![Annotations Image](../../docs/assets/annotations.png) + +These annotations can also be attached to particular lines and columns of your source files to show exactly where a problem is occuring. + +These options are: +```typescript +export interface AnnotationProperties { + /** + * A title for the annotation. + */ + title?: string + + /** + * The name of the file for which the annotation should be created. + */ + file?: string + + /** + * The start line for the annotation. + */ + startLine?: number + + /** + * The end line for the annotation. Defaults to `startLine` when `startLine` is provided. + */ + endLine?: number + + /** + * The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + */ + startColumn?: number + + /** + * The end column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + * Defaults to `startColumn` when `startColumn` is provided. + */ + endColumn?: number +} +``` + +#### Styling output + +Colored output is supported in the Action logs via standard [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code). 3/4 bit, 8 bit and 24 bit colors are all supported. + +Foreground colors: + +```js +// 3/4 bit +core.info('\u001b[35mThis foreground will be magenta') + +// 8 bit +core.info('\u001b[38;5;6mThis foreground will be cyan') + +// 24 bit +core.info('\u001b[38;2;255;0;0mThis foreground will be bright red') +``` + +Background colors: + +```js +// 3/4 bit +core.info('\u001b[43mThis background will be yellow'); + +// 8 bit +core.info('\u001b[48;5;6mThis background will be cyan') + +// 24 bit +core.info('\u001b[48;2;255;0;0mThis background will be bright red') +``` + +Special styles: + +```js +core.info('\u001b[1mBold text') +core.info('\u001b[3mItalic text') +core.info('\u001b[4mUnderlined text') +``` + +ANSI escape codes can be combined with one another: + +```js +core.info('\u001b[31;46mRed foreground with a cyan background and \u001b[1mbold text at the end'); +``` + +> Note: Escape codes reset at the start of each line + +```js +core.info('\u001b[35mThis foreground will be magenta') +core.info('This foreground will reset to the default') +``` + +Manually typing escape codes can be a little difficult, but you can use third party modules such as [ansi-styles](https://github.com/chalk/ansi-styles). + +```js +const style = require('ansi-styles'); +core.info(style.color.ansi16m.hex('#abcdef') + 'Hello world!') +``` + +#### Action state + +You can use this library to save state and get state for sharing information between a given wrapper action: + +**action.yml**: + +```yaml +name: 'Wrapper action sample' +inputs: + name: + default: 'GitHub' +runs: + using: 'node12' + main: 'main.js' + post: 'cleanup.js' +``` + +In action's `main.js`: + +```js +const core = require('@actions/core'); + +core.saveState("pidToKill", 12345); +``` + +In action's `cleanup.js`: + +```js +const core = require('@actions/core'); + +var pid = core.getState("pidToKill"); + +process.kill(pid); +``` + +#### OIDC Token + +You can use these methods to interact with the GitHub OIDC provider and get a JWT ID token which would help to get access token from third party cloud providers. + +**Method Name**: getIDToken() + +**Inputs** + +audience : optional + +**Outputs** + +A [JWT](https://jwt.io/) ID Token + +In action's `main.ts`: +```js +const core = require('@actions/core'); +async function getIDTokenAction(): Promise { + + const audience = core.getInput('audience', {required: false}) + + const id_token1 = await core.getIDToken() // ID Token with default audience + const id_token2 = await core.getIDToken(audience) // ID token with custom audience + + // this id_token can be used to get access token from third party cloud providers +} +getIDTokenAction() +``` + +In action's `actions.yml`: + +```yaml +name: 'GetIDToken' +description: 'Get ID token from Github OIDC provider' +inputs: + audience: + description: 'Audience for which the ID token is intended for' + required: false +outputs: + id_token1: + description: 'ID token obtained from OIDC provider' + id_token2: + description: 'ID token obtained from OIDC provider' +runs: + using: 'node12' + main: 'dist/index.js' +``` + +#### Filesystem path helpers + +You can use these methods to manipulate file paths across operating systems. + +The `toPosixPath` function converts input paths to Posix-style (Linux) paths. +The `toWin32Path` function converts input paths to Windows-style paths. These +functions work independently of the underlying runner operating system. + +```js +toPosixPath('\\foo\\bar') // => /foo/bar +toWin32Path('/foo/bar') // => \foo\bar +``` + +The `toPlatformPath` function converts input paths to the expected value on the runner's operating system. + +```js +// On a Windows runner. +toPlatformPath('/foo/bar') // => \foo\bar + +// On a Linux runner. +toPlatformPath('\\foo\\bar') // => /foo/bar +``` + +#### Platform helper + +Provides shorthands for getting information about platform action is running on. + +```js +import { platform } from '@actions/core' + +/* equals to a call of os.platform() */ +platform.platform // 'win32' | 'darwin' | 'linux' | 'freebsd' | 'openbsd' | 'android' | 'cygwin' | 'sunos' + +/* equals to a call of os.arch() */ +platform.arch // 'x64' | 'arm' | 'arm64' | 'ia32' | 'mips' | 'mipsel' | 'ppc' | 'ppc64' | 'riscv64' | 's390' | 's390x' + +/* common shorthands for platform-specific logic */ +platform.isWindows // true +platform.isMacOS // false +platform.isLinux // false + +/* run platform-specific script to get more details about the exact platform, works on Windows, MacOS and Linux */ +const { + name, // Microsoft Windows 11 Enterprise + version, // 10.0.22621 +} = await platform.getDetails() +``` + +#### Populating job summary + +These methods can be used to populate a [job summary](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary). A job summary is a buffer that can be added to throughout your job via `core.summary` methods. + +Job summaries when complete must be written to the summary buffer file via the `core.summary.write()` method. + +All methods except `addRaw()` utilize the `addRaw()` method to append to the buffer, followed by an EOL using the `addEOL()` method. + +```typescript + +// Write raw text, optionally add an EOL after the content, defaults to false +core.summary.addRaw('Some content here :speech_balloon:', true) +// Output: Some content here :speech_balloon:\n + +// Add an operating system-specific end-of-line marker +core.summary.addEOL() +// Output (POSIX): \n +// Output (Windows): \r\n + +// Add a codeblock with an optional language for syntax highlighting +core.summary.addCodeBlock('console.log(\'hello world\')', 'javascript') +// Output:
console.log('hello world')
+ +// Add a list, second parameter indicates if list is ordered, defaults to false +core.summary.addList(['item1','item2','item3'], true) +// Output:
  1. item1
  2. item2
  3. item3
+ +// Add a collapsible HTML details element +core.summary.addDetails('Label', 'Some detail that will be collapsed') +// Output:
LabelSome detail that will be collapsed
+ +// Add an image, image options parameter is optional, you can supply one of or both width and height in pixels +core.summary.addImage('example.png', 'alt description of img', {width: '100', height: '100'}) +// Output: alt description of img + +// Add an HTML section heading element, optionally pass a level that translates to 'hX' ie. h2. Defaults to h1 +core.summary.addHeading('My Heading', '2') +// Output:

My Heading

+ +// Add an HTML thematic break
+core.summary.addSeparator() +// Output:
+ +// Add an HTML line break
+core.summary.addBreak() +// Output:
+ +// Add an HTML blockquote with an optional citation +core.summary.addQuote('To be or not to be', 'Shakespeare') +// Output:
To be or not to be
+ +// Add an HTML anchor tag +core.summary.addLink('click here', 'https://github.com') +// Output: click here + +``` + +Tables are added using the `addTable()` method, and an array of `SummaryTableRow`. + +```typescript + +export type SummaryTableRow = (SummaryTableCell | string)[] + +export interface SummaryTableCell { + /** + * Cell content + */ + data: string + /** + * Render cell as header + * (optional) default: false + */ + header?: boolean + /** + * Number of columns the cell extends + * (optional) default: '1' + */ + colspan?: string + /** + * Number of rows the cell extends + * (optional) default: '1' + */ + rowspan?: string +} + +``` + +For example + +```typescript + +const tableData = [ + {data: 'Header1', header: true}, + {data: 'Header2', header: true}, + {data: 'Header3', header: true}, + {data: 'MyData1'}, + {data: 'MyData2'}, + {data: 'MyData3'} +] + +// Add an HTML table +core.summary.addTable([tableData]) +// Output:
Header1Header2Header3
MyData1MyData2MyData3
+ +``` + +In addition to job summary content, there are utility functions for interfacing with the buffer. + +```typescript + +// Empties the summary buffer AND wipes the summary file on disk +core.summary.clear() + +// Returns the current summary buffer as a string +core.summary.stringify() + +// If the summary buffer is empty +core.summary.isEmptyBuffer() + +// Resets the summary buffer without writing to the summary file on disk +core.summary.emptyBuffer() + +// Writes text in the buffer to the summary buffer file and empties the buffer, optionally overwriting all existing content in the summary file with buffer contents. Defaults to false. +core.summary.write({overwrite: true}) +``` \ No newline at end of file diff --git a/node_modules/@actions/core/lib/command.d.ts b/node_modules/@actions/core/lib/command.d.ts new file mode 100644 index 0000000..53f8f4b --- /dev/null +++ b/node_modules/@actions/core/lib/command.d.ts @@ -0,0 +1,15 @@ +export interface CommandProperties { + [key: string]: any; +} +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +export declare function issueCommand(command: string, properties: CommandProperties, message: any): void; +export declare function issue(name: string, message?: string): void; diff --git a/node_modules/@actions/core/lib/command.js b/node_modules/@actions/core/lib/command.js new file mode 100644 index 0000000..728a014 --- /dev/null +++ b/node_modules/@actions/core/lib/command.js @@ -0,0 +1,96 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(require("os")); +const utils_1 = require("./utils"); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return (0, utils_1.toCommandValue)(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return (0, utils_1.toCommandValue)(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/command.js.map b/node_modules/@actions/core/lib/command.js.map new file mode 100644 index 0000000..476bcf6 --- /dev/null +++ b/node_modules/@actions/core/lib/command.js.map @@ -0,0 +1 @@ +{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,mCAAsC;AAWtC;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAY;IAEZ,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,OAAO,GAAG,EAAE;IAC9C,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAM;IACxB,OAAO,IAAA,sBAAc,EAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAM;IAC5B,OAAO,IAAA,sBAAc,EAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/core.d.ts b/node_modules/@actions/core/lib/core.d.ts new file mode 100644 index 0000000..a0803d7 --- /dev/null +++ b/node_modules/@actions/core/lib/core.d.ts @@ -0,0 +1,202 @@ +/** + * Interface for getInput options + */ +export interface InputOptions { + /** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */ + required?: boolean; + /** Optional. Whether leading/trailing whitespace will be trimmed for the input. Defaults to true */ + trimWhitespace?: boolean; +} +/** + * The code to exit an action + */ +export declare enum ExitCode { + /** + * A code indicating that the action was successful + */ + Success = 0, + /** + * A code indicating that the action was a failure + */ + Failure = 1 +} +/** + * Optional properties that can be sent with annotation commands (notice, error, and warning) + * See: https://docs.github.com/en/rest/reference/checks#create-a-check-run for more information about annotations. + */ +export interface AnnotationProperties { + /** + * A title for the annotation. + */ + title?: string; + /** + * The path of the file for which the annotation should be created. + */ + file?: string; + /** + * The start line for the annotation. + */ + startLine?: number; + /** + * The end line for the annotation. Defaults to `startLine` when `startLine` is provided. + */ + endLine?: number; + /** + * The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + */ + startColumn?: number; + /** + * The end column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + * Defaults to `startColumn` when `startColumn` is provided. + */ + endColumn?: number; +} +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +export declare function exportVariable(name: string, val: any): void; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +export declare function setSecret(secret: string): void; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +export declare function addPath(inputPath: string): void; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +export declare function getInput(name: string, options?: InputOptions): string; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +export declare function getMultilineInput(name: string, options?: InputOptions): string[]; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +export declare function getBooleanInput(name: string, options?: InputOptions): boolean; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +export declare function setOutput(name: string, value: any): void; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +export declare function setCommandEcho(enabled: boolean): void; +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +export declare function setFailed(message: string | Error): void; +/** + * Gets whether Actions Step Debug is on or not + */ +export declare function isDebug(): boolean; +/** + * Writes debug message to user log + * @param message debug message + */ +export declare function debug(message: string): void; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +export declare function error(message: string | Error, properties?: AnnotationProperties): void; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +export declare function warning(message: string | Error, properties?: AnnotationProperties): void; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +export declare function notice(message: string | Error, properties?: AnnotationProperties): void; +/** + * Writes info to log with console.log. + * @param message info message + */ +export declare function info(message: string): void; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +export declare function startGroup(name: string): void; +/** + * End an output group. + */ +export declare function endGroup(): void; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +export declare function group(name: string, fn: () => Promise): Promise; +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +export declare function saveState(name: string, value: any): void; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +export declare function getState(name: string): string; +export declare function getIDToken(aud?: string): Promise; +/** + * Summary exports + */ +export { summary } from './summary'; +/** + * @deprecated use core.summary + */ +export { markdownSummary } from './summary'; +/** + * Path exports + */ +export { toPosixPath, toWin32Path, toPlatformPath } from './path-utils'; +/** + * Platform utilities exports + */ +export * as platform from './platform'; diff --git a/node_modules/@actions/core/lib/core.js b/node_modules/@actions/core/lib/core.js new file mode 100644 index 0000000..8f85466 --- /dev/null +++ b/node_modules/@actions/core/lib/core.js @@ -0,0 +1,344 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.platform = exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = exports.markdownSummary = exports.summary = exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = require("./command"); +const file_command_1 = require("./file-command"); +const utils_1 = require("./utils"); +const os = __importStar(require("os")); +const path = __importStar(require("path")); +const oidc_utils_1 = require("./oidc-utils"); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode || (exports.ExitCode = ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = (0, utils_1.toCommandValue)(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return (0, file_command_1.issueFileCommand)('ENV', (0, file_command_1.prepareKeyValueMessage)(name, val)); + } + (0, command_1.issueCommand)('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + (0, command_1.issueCommand)('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + (0, file_command_1.issueFileCommand)('PATH', inputPath); + } + else { + (0, command_1.issueCommand)('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return (0, file_command_1.issueFileCommand)('OUTPUT', (0, file_command_1.prepareKeyValueMessage)(name, value)); + } + process.stdout.write(os.EOL); + (0, command_1.issueCommand)('set-output', { name }, (0, utils_1.toCommandValue)(value)); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + (0, command_1.issue)('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + (0, command_1.issueCommand)('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + (0, command_1.issueCommand)('error', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + (0, command_1.issueCommand)('warning', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + (0, command_1.issueCommand)('notice', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + (0, command_1.issue)('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + (0, command_1.issue)('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return (0, file_command_1.issueFileCommand)('STATE', (0, file_command_1.prepareKeyValueMessage)(name, value)); + } + (0, command_1.issueCommand)('save-state', { name }, (0, utils_1.toCommandValue)(value)); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = require("./summary"); +Object.defineProperty(exports, "summary", { enumerable: true, get: function () { return summary_1.summary; } }); +/** + * @deprecated use core.summary + */ +var summary_2 = require("./summary"); +Object.defineProperty(exports, "markdownSummary", { enumerable: true, get: function () { return summary_2.markdownSummary; } }); +/** + * Path exports + */ +var path_utils_1 = require("./path-utils"); +Object.defineProperty(exports, "toPosixPath", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } }); +Object.defineProperty(exports, "toWin32Path", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } }); +Object.defineProperty(exports, "toPlatformPath", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }); +/** + * Platform utilities exports + */ +exports.platform = __importStar(require("./platform")); +//# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/core.js.map b/node_modules/@actions/core/lib/core.js.map new file mode 100644 index 0000000..1261718 --- /dev/null +++ b/node_modules/@actions/core/lib/core.js.map @@ -0,0 +1 @@ +{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAC7C,iDAAuE;AACvE,mCAA2D;AAE3D,uCAAwB;AACxB,2CAA4B;AAE5B,6CAAuC;AAavC;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,wBAAR,QAAQ,QAUnB;AAuCD,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,8DAA8D;AAC9D,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAQ;IACnD,MAAM,YAAY,GAAG,IAAA,sBAAc,EAAC,GAAG,CAAC,CAAA;IACxC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,YAAY,CAAA;IAEhC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAA;IAChD,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAA,+BAAgB,EAAC,KAAK,EAAE,IAAA,qCAAsB,EAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;KAClE;IAED,IAAA,sBAAY,EAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,YAAY,CAAC,CAAA;AAC/C,CAAC;AAVD,wCAUC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,IAAA,sBAAY,EAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;IACjD,IAAI,QAAQ,EAAE;QACZ,IAAA,+BAAgB,EAAC,MAAM,EAAE,SAAS,CAAC,CAAA;KACpC;SAAM;QACL,IAAA,sBAAY,EAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;KACxC;IACD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AARD,0BAQC;AAED;;;;;;;;GAQG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,IAAI,OAAO,IAAI,OAAO,CAAC,cAAc,KAAK,KAAK,EAAE;QAC/C,OAAO,GAAG,CAAA;KACX;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AAZD,4BAYC;AAED;;;;;;;GAOG;AACH,SAAgB,iBAAiB,CAC/B,IAAY,EACZ,OAAsB;IAEtB,MAAM,MAAM,GAAa,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;SAC7C,KAAK,CAAC,IAAI,CAAC;SACX,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IAExB,IAAI,OAAO,IAAI,OAAO,CAAC,cAAc,KAAK,KAAK,EAAE;QAC/C,OAAO,MAAM,CAAA;KACd;IAED,OAAO,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAA;AAC1C,CAAC;AAbD,8CAaC;AAED;;;;;;;;;GASG;AACH,SAAgB,eAAe,CAAC,IAAY,EAAE,OAAsB;IAClE,MAAM,SAAS,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAA;IAC1C,MAAM,UAAU,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,IAAI,CAAA;IACxC,IAAI,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,KAAK,CAAA;IAC1C,MAAM,IAAI,SAAS,CACjB,6DAA6D,IAAI,IAAI;QACnE,4EAA4E,CAC/E,CAAA;AACH,CAAC;AAVD,0CAUC;AAED;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,EAAE,CAAA;IACnD,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAA,+BAAgB,EAAC,QAAQ,EAAE,IAAA,qCAAsB,EAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAA;KACvE;IAED,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;IAC5B,IAAA,sBAAY,EAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,IAAA,sBAAc,EAAC,KAAK,CAAC,CAAC,CAAA;AAC3D,CAAC;AARD,8BAQC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;IAC7C,IAAA,eAAK,EAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;AACvC,CAAC;AAFD,wCAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAuB;IAC/C,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IAEnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAJD,8BAIC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;GAEG;AACH,SAAgB,OAAO;IACrB,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,GAAG,CAAA;AAC5C,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,IAAA,sBAAY,EAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;;GAIG;AACH,SAAgB,KAAK,CACnB,OAAuB,EACvB,aAAmC,EAAE;IAErC,IAAA,sBAAY,EACV,OAAO,EACP,IAAA,2BAAmB,EAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,sBASC;AAED;;;;GAIG;AACH,SAAgB,OAAO,CACrB,OAAuB,EACvB,aAAmC,EAAE;IAErC,IAAA,sBAAY,EACV,SAAS,EACT,IAAA,2BAAmB,EAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,0BASC;AAED;;;;GAIG;AACH,SAAgB,MAAM,CACpB,OAAuB,EACvB,aAAmC,EAAE;IAErC,IAAA,sBAAY,EACV,QAAQ,EACR,IAAA,2BAAmB,EAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,wBASC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,IAAA,eAAK,EAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,IAAA,eAAK,EAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAA;IAClD,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAA,+BAAgB,EAAC,OAAO,EAAE,IAAA,qCAAsB,EAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAA;KACtE;IAED,IAAA,sBAAY,EAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,IAAA,sBAAc,EAAC,KAAK,CAAC,CAAC,CAAA;AAC3D,CAAC;AAPD,8BAOC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC;AAED,SAAsB,UAAU,CAAC,GAAY;;QAC3C,OAAO,MAAM,uBAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;IACzC,CAAC;CAAA;AAFD,gCAEC;AAED;;GAEG;AACH,qCAAiC;AAAzB,kGAAA,OAAO,OAAA;AAEf;;GAEG;AACH,qCAAyC;AAAjC,0GAAA,eAAe,OAAA;AAEvB;;GAEG;AACH,2CAAqE;AAA7D,yGAAA,WAAW,OAAA;AAAE,yGAAA,WAAW,OAAA;AAAE,4GAAA,cAAc,OAAA;AAEhD;;GAEG;AACH,uDAAsC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/file-command.d.ts b/node_modules/@actions/core/lib/file-command.d.ts new file mode 100644 index 0000000..2d1f2f4 --- /dev/null +++ b/node_modules/@actions/core/lib/file-command.d.ts @@ -0,0 +1,2 @@ +export declare function issueFileCommand(command: string, message: any): void; +export declare function prepareKeyValueMessage(key: string, value: any): string; diff --git a/node_modules/@actions/core/lib/file-command.js b/node_modules/@actions/core/lib/file-command.js new file mode 100644 index 0000000..e4cae44 --- /dev/null +++ b/node_modules/@actions/core/lib/file-command.js @@ -0,0 +1,62 @@ +"use strict"; +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const crypto = __importStar(require("crypto")); +const fs = __importStar(require("fs")); +const os = __importStar(require("os")); +const utils_1 = require("./utils"); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${crypto.randomUUID()}`; + const convertedValue = (0, utils_1.toCommandValue)(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/file-command.js.map b/node_modules/@actions/core/lib/file-command.js.map new file mode 100644 index 0000000..8510a57 --- /dev/null +++ b/node_modules/@actions/core/lib/file-command.js.map @@ -0,0 +1 @@ +{"version":3,"file":"file-command.js","sourceRoot":"","sources":["../src/file-command.ts"],"names":[],"mappings":";AAAA,uCAAuC;;;;;;;;;;;;;;;;;;;;;;;;;;AAEvC,mCAAmC;AACnC,uDAAuD;AAEvD,+CAAgC;AAChC,uCAAwB;AACxB,uCAAwB;AACxB,mCAAsC;AAEtC,SAAgB,gBAAgB,CAAC,OAAe,EAAE,OAAY;IAC5D,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,OAAO,EAAE,CAAC,CAAA;IACjD,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CACb,wDAAwD,OAAO,EAAE,CAClE,CAAA;KACF;IACD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAA;KACrD;IAED,EAAE,CAAC,cAAc,CAAC,QAAQ,EAAE,GAAG,IAAA,sBAAc,EAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,EAAE;QACjE,QAAQ,EAAE,MAAM;KACjB,CAAC,CAAA;AACJ,CAAC;AAdD,4CAcC;AAED,SAAgB,sBAAsB,CAAC,GAAW,EAAE,KAAU;IAC5D,MAAM,SAAS,GAAG,gBAAgB,MAAM,CAAC,UAAU,EAAE,EAAE,CAAA;IACvD,MAAM,cAAc,GAAG,IAAA,sBAAc,EAAC,KAAK,CAAC,CAAA;IAE5C,4EAA4E;IAC5E,6EAA6E;IAC7E,iBAAiB;IACjB,IAAI,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QAC3B,MAAM,IAAI,KAAK,CACb,4DAA4D,SAAS,GAAG,CACzE,CAAA;KACF;IAED,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CACb,6DAA6D,SAAS,GAAG,CAC1E,CAAA;KACF;IAED,OAAO,GAAG,GAAG,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,cAAc,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;AAC9E,CAAC;AApBD,wDAoBC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/oidc-utils.d.ts b/node_modules/@actions/core/lib/oidc-utils.d.ts new file mode 100644 index 0000000..657c7f4 --- /dev/null +++ b/node_modules/@actions/core/lib/oidc-utils.d.ts @@ -0,0 +1,7 @@ +export declare class OidcClient { + private static createHttpClient; + private static getRequestToken; + private static getIDTokenUrl; + private static getCall; + static getIDToken(audience?: string): Promise; +} diff --git a/node_modules/@actions/core/lib/oidc-utils.js b/node_modules/@actions/core/lib/oidc-utils.js new file mode 100644 index 0000000..f8895d0 --- /dev/null +++ b/node_modules/@actions/core/lib/oidc-utils.js @@ -0,0 +1,77 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.OidcClient = void 0; +const http_client_1 = require("@actions/http-client"); +const auth_1 = require("@actions/http-client/lib/auth"); +const core_1 = require("./core"); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + (0, core_1.debug)(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + (0, core_1.setSecret)(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/oidc-utils.js.map b/node_modules/@actions/core/lib/oidc-utils.js.map new file mode 100644 index 0000000..f144099 --- /dev/null +++ b/node_modules/@actions/core/lib/oidc-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,wDAAqE;AACrE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAmB;YACrC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,OAAO,EAAE,CAC/B,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,GAAG,MAAA,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,IAAA,YAAK,EAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,IAAA,gBAAS,EAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/path-utils.d.ts b/node_modules/@actions/core/lib/path-utils.d.ts new file mode 100644 index 0000000..1fee9f3 --- /dev/null +++ b/node_modules/@actions/core/lib/path-utils.d.ts @@ -0,0 +1,25 @@ +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +export declare function toPosixPath(pth: string): string; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +export declare function toWin32Path(pth: string): string; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +export declare function toPlatformPath(pth: string): string; diff --git a/node_modules/@actions/core/lib/path-utils.js b/node_modules/@actions/core/lib/path-utils.js new file mode 100644 index 0000000..d13f4ec --- /dev/null +++ b/node_modules/@actions/core/lib/path-utils.js @@ -0,0 +1,62 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(require("path")); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/path-utils.js.map b/node_modules/@actions/core/lib/path-utils.js.map new file mode 100644 index 0000000..b296008 --- /dev/null +++ b/node_modules/@actions/core/lib/path-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-utils.js","sourceRoot":"","sources":["../src/path-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAE5B;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;;GAOG;AACH,SAAgB,cAAc,CAAC,GAAW;IACxC,OAAO,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,wCAEC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/platform.d.ts b/node_modules/@actions/core/lib/platform.d.ts new file mode 100644 index 0000000..34dbd93 --- /dev/null +++ b/node_modules/@actions/core/lib/platform.d.ts @@ -0,0 +1,15 @@ +/// +export declare const platform: NodeJS.Platform; +export declare const arch: string; +export declare const isWindows: boolean; +export declare const isMacOS: boolean; +export declare const isLinux: boolean; +export declare function getDetails(): Promise<{ + name: string; + platform: string; + arch: string; + version: string; + isWindows: boolean; + isMacOS: boolean; + isLinux: boolean; +}>; diff --git a/node_modules/@actions/core/lib/platform.js b/node_modules/@actions/core/lib/platform.js new file mode 100644 index 0000000..c6b58c5 --- /dev/null +++ b/node_modules/@actions/core/lib/platform.js @@ -0,0 +1,94 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getDetails = exports.isLinux = exports.isMacOS = exports.isWindows = exports.arch = exports.platform = void 0; +const os_1 = __importDefault(require("os")); +const exec = __importStar(require("@actions/exec")); +const getWindowsInfo = () => __awaiter(void 0, void 0, void 0, function* () { + const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', undefined, { + silent: true + }); + const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', undefined, { + silent: true + }); + return { + name: name.trim(), + version: version.trim() + }; +}); +const getMacOsInfo = () => __awaiter(void 0, void 0, void 0, function* () { + var _a, _b, _c, _d; + const { stdout } = yield exec.getExecOutput('sw_vers', undefined, { + silent: true + }); + const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : ''; + const name = (_d = (_c = stdout.match(/ProductName:\s*(.+)/)) === null || _c === void 0 ? void 0 : _c[1]) !== null && _d !== void 0 ? _d : ''; + return { + name, + version + }; +}); +const getLinuxInfo = () => __awaiter(void 0, void 0, void 0, function* () { + const { stdout } = yield exec.getExecOutput('lsb_release', ['-i', '-r', '-s'], { + silent: true + }); + const [name, version] = stdout.trim().split('\n'); + return { + name, + version + }; +}); +exports.platform = os_1.default.platform(); +exports.arch = os_1.default.arch(); +exports.isWindows = exports.platform === 'win32'; +exports.isMacOS = exports.platform === 'darwin'; +exports.isLinux = exports.platform === 'linux'; +function getDetails() { + return __awaiter(this, void 0, void 0, function* () { + return Object.assign(Object.assign({}, (yield (exports.isWindows + ? getWindowsInfo() + : exports.isMacOS + ? getMacOsInfo() + : getLinuxInfo()))), { platform: exports.platform, + arch: exports.arch, + isWindows: exports.isWindows, + isMacOS: exports.isMacOS, + isLinux: exports.isLinux }); + }); +} +exports.getDetails = getDetails; +//# sourceMappingURL=platform.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/platform.js.map b/node_modules/@actions/core/lib/platform.js.map new file mode 100644 index 0000000..976102b --- /dev/null +++ b/node_modules/@actions/core/lib/platform.js.map @@ -0,0 +1 @@ +{"version":3,"file":"platform.js","sourceRoot":"","sources":["../src/platform.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,4CAAmB;AACnB,oDAAqC;AAErC,MAAM,cAAc,GAAG,GAAmD,EAAE;IAC1E,MAAM,EAAC,MAAM,EAAE,OAAO,EAAC,GAAG,MAAM,IAAI,CAAC,aAAa,CAChD,kFAAkF,EAClF,SAAS,EACT;QACE,MAAM,EAAE,IAAI;KACb,CACF,CAAA;IAED,MAAM,EAAC,MAAM,EAAE,IAAI,EAAC,GAAG,MAAM,IAAI,CAAC,aAAa,CAC7C,kFAAkF,EAClF,SAAS,EACT;QACE,MAAM,EAAE,IAAI;KACb,CACF,CAAA;IAED,OAAO;QACL,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE;QACjB,OAAO,EAAE,OAAO,CAAC,IAAI,EAAE;KACxB,CAAA;AACH,CAAC,CAAA,CAAA;AAED,MAAM,YAAY,GAAG,GAGlB,EAAE;;IACH,MAAM,EAAC,MAAM,EAAC,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,SAAS,EAAE,SAAS,EAAE;QAC9D,MAAM,EAAE,IAAI;KACb,CAAC,CAAA;IAEF,MAAM,OAAO,GAAG,MAAA,MAAA,MAAM,CAAC,KAAK,CAAC,wBAAwB,CAAC,0CAAG,CAAC,CAAC,mCAAI,EAAE,CAAA;IACjE,MAAM,IAAI,GAAG,MAAA,MAAA,MAAM,CAAC,KAAK,CAAC,qBAAqB,CAAC,0CAAG,CAAC,CAAC,mCAAI,EAAE,CAAA;IAE3D,OAAO;QACL,IAAI;QACJ,OAAO;KACR,CAAA;AACH,CAAC,CAAA,CAAA;AAED,MAAM,YAAY,GAAG,GAGlB,EAAE;IACH,MAAM,EAAC,MAAM,EAAC,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,aAAa,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE;QAC3E,MAAM,EAAE,IAAI;KACb,CAAC,CAAA;IAEF,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;IAEjD,OAAO;QACL,IAAI;QACJ,OAAO;KACR,CAAA;AACH,CAAC,CAAA,CAAA;AAEY,QAAA,QAAQ,GAAG,YAAE,CAAC,QAAQ,EAAE,CAAA;AACxB,QAAA,IAAI,GAAG,YAAE,CAAC,IAAI,EAAE,CAAA;AAChB,QAAA,SAAS,GAAG,gBAAQ,KAAK,OAAO,CAAA;AAChC,QAAA,OAAO,GAAG,gBAAQ,KAAK,QAAQ,CAAA;AAC/B,QAAA,OAAO,GAAG,gBAAQ,KAAK,OAAO,CAAA;AAE3C,SAAsB,UAAU;;QAS9B,uCACK,CAAC,MAAM,CAAC,iBAAS;YAClB,CAAC,CAAC,cAAc,EAAE;YAClB,CAAC,CAAC,eAAO;gBACT,CAAC,CAAC,YAAY,EAAE;gBAChB,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,KACpB,QAAQ,EAAR,gBAAQ;YACR,IAAI,EAAJ,YAAI;YACJ,SAAS,EAAT,iBAAS;YACT,OAAO,EAAP,eAAO;YACP,OAAO,EAAP,eAAO,IACR;IACH,CAAC;CAAA;AArBD,gCAqBC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/summary.d.ts b/node_modules/@actions/core/lib/summary.d.ts new file mode 100644 index 0000000..0ea4384 --- /dev/null +++ b/node_modules/@actions/core/lib/summary.d.ts @@ -0,0 +1,202 @@ +export declare const SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY"; +export declare const SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary"; +export type SummaryTableRow = (SummaryTableCell | string)[]; +export interface SummaryTableCell { + /** + * Cell content + */ + data: string; + /** + * Render cell as header + * (optional) default: false + */ + header?: boolean; + /** + * Number of columns the cell extends + * (optional) default: '1' + */ + colspan?: string; + /** + * Number of rows the cell extends + * (optional) default: '1' + */ + rowspan?: string; +} +export interface SummaryImageOptions { + /** + * The width of the image in pixels. Must be an integer without a unit. + * (optional) + */ + width?: string; + /** + * The height of the image in pixels. Must be an integer without a unit. + * (optional) + */ + height?: string; +} +export interface SummaryWriteOptions { + /** + * Replace all existing content in summary file with buffer contents + * (optional) default: false + */ + overwrite?: boolean; +} +declare class Summary { + private _buffer; + private _filePath?; + constructor(); + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + private filePath; + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + private wrap; + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options?: SummaryWriteOptions): Promise; + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear(): Promise; + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify(): string; + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer(): boolean; + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer(): Summary; + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text: string, addEOL?: boolean): Summary; + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL(): Summary; + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code: string, lang?: string): Summary; + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items: string[], ordered?: boolean): Summary; + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows: SummaryTableRow[]): Summary; + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label: string, content: string): Summary; + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src: string, alt: string, options?: SummaryImageOptions): Summary; + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text: string, level?: number | string): Summary; + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator(): Summary; + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak(): Summary; + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text: string, cite?: string): Summary; + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text: string, href: string): Summary; +} +/** + * @deprecated use `core.summary` + */ +export declare const markdownSummary: Summary; +export declare const summary: Summary; +export {}; diff --git a/node_modules/@actions/core/lib/summary.js b/node_modules/@actions/core/lib/summary.js new file mode 100644 index 0000000..04a335b --- /dev/null +++ b/node_modules/@actions/core/lib/summary.js @@ -0,0 +1,283 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = require("os"); +const fs_1 = require("fs"); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/summary.js.map b/node_modules/@actions/core/lib/summary.js.map new file mode 100644 index 0000000..9637351 --- /dev/null +++ b/node_modules/@actions/core/lib/summary.js.map @@ -0,0 +1 @@ +{"version":3,"file":"summary.js","sourceRoot":"","sources":["../src/summary.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2BAAsB;AACtB,2BAAsC;AACtC,MAAM,EAAC,MAAM,EAAE,UAAU,EAAE,SAAS,EAAC,GAAG,aAAQ,CAAA;AAEnC,QAAA,eAAe,GAAG,qBAAqB,CAAA;AACvC,QAAA,gBAAgB,GAC3B,2GAA2G,CAAA;AA+C7G,MAAM,OAAO;IAIX;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;IACnB,CAAC;IAED;;;;;OAKG;IACW,QAAQ;;YACpB,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,OAAO,IAAI,CAAC,SAAS,CAAA;aACtB;YAED,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAe,CAAC,CAAA;YAChD,IAAI,CAAC,WAAW,EAAE;gBAChB,MAAM,IAAI,KAAK,CACb,4CAA4C,uBAAe,6DAA6D,CACzH,CAAA;aACF;YAED,IAAI;gBACF,MAAM,MAAM,CAAC,WAAW,EAAE,cAAS,CAAC,IAAI,GAAG,cAAS,CAAC,IAAI,CAAC,CAAA;aAC3D;YAAC,WAAM;gBACN,MAAM,IAAI,KAAK,CACb,mCAAmC,WAAW,0DAA0D,CACzG,CAAA;aACF;YAED,IAAI,CAAC,SAAS,GAAG,WAAW,CAAA;YAC5B,OAAO,IAAI,CAAC,SAAS,CAAA;QACvB,CAAC;KAAA;IAED;;;;;;;;OAQG;IACK,IAAI,CACV,GAAW,EACX,OAAsB,EACtB,QAAuC,EAAE;QAEzC,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC;aACpC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,IAAI,GAAG,KAAK,KAAK,GAAG,CAAC;aAC3C,IAAI,CAAC,EAAE,CAAC,CAAA;QAEX,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,IAAI,GAAG,GAAG,SAAS,GAAG,CAAA;SAC9B;QAED,OAAO,IAAI,GAAG,GAAG,SAAS,IAAI,OAAO,KAAK,GAAG,GAAG,CAAA;IAClD,CAAC;IAED;;;;;;OAMG;IACG,KAAK,CAAC,OAA6B;;YACvC,MAAM,SAAS,GAAG,CAAC,CAAC,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,CAAA,CAAA;YACtC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAA;YACtC,MAAM,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAA;YACpD,MAAM,SAAS,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAAA;YAC3D,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;QAC3B,CAAC;KAAA;IAED;;;;OAIG;IACG,KAAK;;YACT,OAAO,IAAI,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,EAAC,SAAS,EAAE,IAAI,EAAC,CAAC,CAAA;QACpD,CAAC;KAAA;IAED;;;;OAIG;IACH,SAAS;QACP,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED;;;;OAIG;IACH,aAAa;QACX,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,CAAA;IAClC,CAAC;IAED;;;;OAIG;IACH,WAAW;QACT,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;;;OAOG;IACH,MAAM,CAAC,IAAY,EAAE,MAAM,GAAG,KAAK;QACjC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAA;QACpB,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,IAAI,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,MAAM,CAAC,QAAG,CAAC,CAAA;IACzB,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,IAAY,EAAE,IAAa;QACtC,MAAM,KAAK,qBACN,CAAC,IAAI,IAAI,EAAC,IAAI,EAAC,CAAC,CACpB,CAAA;QACD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;QAChE,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,OAAO,CAAC,KAAe,EAAE,OAAO,GAAG,KAAK;QACtC,MAAM,GAAG,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;QACjC,MAAM,SAAS,GAAG,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACnE,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,SAAS,CAAC,CAAA;QACzC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;OAMG;IACH,QAAQ,CAAC,IAAuB;QAC9B,MAAM,SAAS,GAAG,IAAI;aACnB,GAAG,CAAC,GAAG,CAAC,EAAE;YACT,MAAM,KAAK,GAAG,GAAG;iBACd,GAAG,CAAC,IAAI,CAAC,EAAE;gBACV,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;oBAC5B,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;iBAC7B;gBAED,MAAM,EAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAC,GAAG,IAAI,CAAA;gBAC7C,MAAM,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;gBAChC,MAAM,KAAK,mCACN,CAAC,OAAO,IAAI,EAAC,OAAO,EAAC,CAAC,GACtB,CAAC,OAAO,IAAI,EAAC,OAAO,EAAC,CAAC,CAC1B,CAAA;gBAED,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;YACpC,CAAC,CAAC;iBACD,IAAI,CAAC,EAAE,CAAC,CAAA;YAEX,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;QAC/B,CAAC,CAAC;aACD,IAAI,CAAC,EAAE,CAAC,CAAA;QAEX,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAA;QAC7C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,KAAa,EAAE,OAAe;QACvC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,GAAG,OAAO,CAAC,CAAA;QAC3E,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;;OAQG;IACH,QAAQ,CAAC,GAAW,EAAE,GAAW,EAAE,OAA6B;QAC9D,MAAM,EAAC,KAAK,EAAE,MAAM,EAAC,GAAG,OAAO,IAAI,EAAE,CAAA;QACrC,MAAM,KAAK,mCACN,CAAC,KAAK,IAAI,EAAC,KAAK,EAAC,CAAC,GAClB,CAAC,MAAM,IAAI,EAAC,MAAM,EAAC,CAAC,CACxB,CAAA;QAED,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,kBAAG,GAAG,EAAE,GAAG,IAAK,KAAK,EAAE,CAAA;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,IAAY,EAAE,KAAuB;QAC9C,MAAM,GAAG,GAAG,IAAI,KAAK,EAAE,CAAA;QACvB,MAAM,UAAU,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC;YACnE,CAAC,CAAC,GAAG;YACL,CAAC,CAAC,IAAI,CAAA;QACR,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAA;QAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,YAAY;QACV,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QACrC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,QAAQ;QACN,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QACrC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,QAAQ,CAAC,IAAY,EAAE,IAAa;QAClC,MAAM,KAAK,qBACN,CAAC,IAAI,IAAI,EAAC,IAAI,EAAC,CAAC,CACpB,CAAA;QACD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;QACpD,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,OAAO,CAAC,IAAY,EAAE,IAAY;QAChC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,EAAC,IAAI,EAAC,CAAC,CAAA;QAC5C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;CACF;AAED,MAAM,QAAQ,GAAG,IAAI,OAAO,EAAE,CAAA;AAE9B;;GAEG;AACU,QAAA,eAAe,GAAG,QAAQ,CAAA;AAC1B,QAAA,OAAO,GAAG,QAAQ,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/utils.d.ts b/node_modules/@actions/core/lib/utils.d.ts new file mode 100644 index 0000000..3b9e28d --- /dev/null +++ b/node_modules/@actions/core/lib/utils.d.ts @@ -0,0 +1,14 @@ +import { AnnotationProperties } from './core'; +import { CommandProperties } from './command'; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +export declare function toCommandValue(input: any): string; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +export declare function toCommandProperties(annotationProperties: AnnotationProperties): CommandProperties; diff --git a/node_modules/@actions/core/lib/utils.js b/node_modules/@actions/core/lib/utils.js new file mode 100644 index 0000000..9b5ca44 --- /dev/null +++ b/node_modules/@actions/core/lib/utils.js @@ -0,0 +1,40 @@ +"use strict"; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/utils.js.map b/node_modules/@actions/core/lib/utils.js.map new file mode 100644 index 0000000..8211bb7 --- /dev/null +++ b/node_modules/@actions/core/lib/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";AAAA,mCAAmC;AACnC,uDAAuD;;;AAKvD;;;GAGG;AACH,SAAgB,cAAc,CAAC,KAAU;IACvC,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,OAAO,EAAE,CAAA;KACV;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,YAAY,MAAM,EAAE;QAC/D,OAAO,KAAe,CAAA;KACvB;IACD,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAA;AAC9B,CAAC;AAPD,wCAOC;AAED;;;;;GAKG;AACH,SAAgB,mBAAmB,CACjC,oBAA0C;IAE1C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC,MAAM,EAAE;QAC7C,OAAO,EAAE,CAAA;KACV;IAED,OAAO;QACL,KAAK,EAAE,oBAAoB,CAAC,KAAK;QACjC,IAAI,EAAE,oBAAoB,CAAC,IAAI;QAC/B,IAAI,EAAE,oBAAoB,CAAC,SAAS;QACpC,OAAO,EAAE,oBAAoB,CAAC,OAAO;QACrC,GAAG,EAAE,oBAAoB,CAAC,WAAW;QACrC,SAAS,EAAE,oBAAoB,CAAC,SAAS;KAC1C,CAAA;AACH,CAAC;AAfD,kDAeC"} \ No newline at end of file diff --git a/node_modules/@actions/core/package.json b/node_modules/@actions/core/package.json new file mode 100644 index 0000000..6d60010 --- /dev/null +++ b/node_modules/@actions/core/package.json @@ -0,0 +1,45 @@ +{ + "name": "@actions/core", + "version": "1.11.1", + "description": "Actions core lib", + "keywords": [ + "github", + "actions", + "core" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/core", + "license": "MIT", + "main": "lib/core.js", + "types": "lib/core.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/core" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc -p tsconfig.json" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" + }, + "devDependencies": { + "@types/node": "^16.18.112" + } +} \ No newline at end of file diff --git a/node_modules/@actions/exec/LICENSE.md b/node_modules/@actions/exec/LICENSE.md new file mode 100644 index 0000000..dbae2ed --- /dev/null +++ b/node_modules/@actions/exec/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/exec/README.md b/node_modules/@actions/exec/README.md new file mode 100644 index 0000000..53a6bf5 --- /dev/null +++ b/node_modules/@actions/exec/README.md @@ -0,0 +1,57 @@ +# `@actions/exec` + +## Usage + +#### Basic + +You can use this package to execute tools in a cross platform way: + +```js +const exec = require('@actions/exec'); + +await exec.exec('node index.js'); +``` + +#### Args + +You can also pass in arg arrays: + +```js +const exec = require('@actions/exec'); + +await exec.exec('node', ['index.js', 'foo=bar']); +``` + +#### Output/options + +Capture output or specify [other options](https://github.com/actions/toolkit/blob/d9347d4ab99fd507c0b9104b2cf79fb44fcc827d/packages/exec/src/interfaces.ts#L5): + +```js +const exec = require('@actions/exec'); + +let myOutput = ''; +let myError = ''; + +const options = {}; +options.listeners = { + stdout: (data: Buffer) => { + myOutput += data.toString(); + }, + stderr: (data: Buffer) => { + myError += data.toString(); + } +}; +options.cwd = './lib'; + +await exec.exec('node', ['index.js', 'foo=bar'], options); +``` + +#### Exec tools not in the PATH + +You can specify the full path for tools not in the PATH: + +```js +const exec = require('@actions/exec'); + +await exec.exec('"/path/to/my-tool"', ['arg1']); +``` diff --git a/node_modules/@actions/exec/lib/exec.d.ts b/node_modules/@actions/exec/lib/exec.d.ts new file mode 100644 index 0000000..baedcdb --- /dev/null +++ b/node_modules/@actions/exec/lib/exec.d.ts @@ -0,0 +1,24 @@ +import { ExecOptions, ExecOutput, ExecListeners } from './interfaces'; +export { ExecOptions, ExecOutput, ExecListeners }; +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +export declare function exec(commandLine: string, args?: string[], options?: ExecOptions): Promise; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +export declare function getExecOutput(commandLine: string, args?: string[], options?: ExecOptions): Promise; diff --git a/node_modules/@actions/exec/lib/exec.js b/node_modules/@actions/exec/lib/exec.js new file mode 100644 index 0000000..72c7a9c --- /dev/null +++ b/node_modules/@actions/exec/lib/exec.js @@ -0,0 +1,103 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getExecOutput = exports.exec = void 0; +const string_decoder_1 = require("string_decoder"); +const tr = __importStar(require("./toolrunner")); +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); +} +exports.exec = exec; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ''; + let stderr = ''; + //Using string decoder covers the case where a mult-byte character is split + const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); + const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + //flush any remaining characters + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); +} +exports.getExecOutput = getExecOutput; +//# sourceMappingURL=exec.js.map \ No newline at end of file diff --git a/node_modules/@actions/exec/lib/exec.js.map b/node_modules/@actions/exec/lib/exec.js.map new file mode 100644 index 0000000..0762636 --- /dev/null +++ b/node_modules/@actions/exec/lib/exec.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exec.js","sourceRoot":"","sources":["../src/exec.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,mDAA4C;AAE5C,iDAAkC;AAIlC;;;;;;;;;GASG;AACH,SAAsB,IAAI,CACxB,WAAmB,EACnB,IAAe,EACf,OAAqB;;QAErB,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAA;QACpD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;YAC5B,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;SACpE;QACD,8CAA8C;QAC9C,MAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,CAAA;QAC/B,IAAI,GAAG,WAAW,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,IAAI,EAAE,CAAC,CAAA;QAC9C,MAAM,MAAM,GAAkB,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;QACxE,OAAO,MAAM,CAAC,IAAI,EAAE,CAAA;IACtB,CAAC;CAAA;AAdD,oBAcC;AAED;;;;;;;;;GASG;AAEH,SAAsB,aAAa,CACjC,WAAmB,EACnB,IAAe,EACf,OAAqB;;;QAErB,IAAI,MAAM,GAAG,EAAE,CAAA;QACf,IAAI,MAAM,GAAG,EAAE,CAAA;QAEf,2EAA2E;QAC3E,MAAM,aAAa,GAAG,IAAI,8BAAa,CAAC,MAAM,CAAC,CAAA;QAC/C,MAAM,aAAa,GAAG,IAAI,8BAAa,CAAC,MAAM,CAAC,CAAA;QAE/C,MAAM,sBAAsB,SAAG,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,CAAA;QACzD,MAAM,sBAAsB,SAAG,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,CAAA;QAEzD,MAAM,cAAc,GAAG,CAAC,IAAY,EAAQ,EAAE;YAC5C,MAAM,IAAI,aAAa,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACnC,IAAI,sBAAsB,EAAE;gBAC1B,sBAAsB,CAAC,IAAI,CAAC,CAAA;aAC7B;QACH,CAAC,CAAA;QAED,MAAM,cAAc,GAAG,CAAC,IAAY,EAAQ,EAAE;YAC5C,MAAM,IAAI,aAAa,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACnC,IAAI,sBAAsB,EAAE;gBAC1B,sBAAsB,CAAC,IAAI,CAAC,CAAA;aAC7B;QACH,CAAC,CAAA;QAED,MAAM,SAAS,mCACV,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,KACrB,MAAM,EAAE,cAAc,EACtB,MAAM,EAAE,cAAc,GACvB,CAAA;QAED,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,EAAE,IAAI,kCAAM,OAAO,KAAE,SAAS,IAAE,CAAA;QAEvE,gCAAgC;QAChC,MAAM,IAAI,aAAa,CAAC,GAAG,EAAE,CAAA;QAC7B,MAAM,IAAI,aAAa,CAAC,GAAG,EAAE,CAAA;QAE7B,OAAO;YACL,QAAQ;YACR,MAAM;YACN,MAAM;SACP,CAAA;;CACF;AA9CD,sCA8CC"} \ No newline at end of file diff --git a/node_modules/@actions/exec/lib/interfaces.d.ts b/node_modules/@actions/exec/lib/interfaces.d.ts new file mode 100644 index 0000000..8ae20e4 --- /dev/null +++ b/node_modules/@actions/exec/lib/interfaces.d.ts @@ -0,0 +1,57 @@ +/// +import * as stream from 'stream'; +/** + * Interface for exec options + */ +export interface ExecOptions { + /** optional working directory. defaults to current */ + cwd?: string; + /** optional envvar dictionary. defaults to current process's env */ + env?: { + [key: string]: string; + }; + /** optional. defaults to false */ + silent?: boolean; + /** optional out stream to use. Defaults to process.stdout */ + outStream?: stream.Writable; + /** optional err stream to use. Defaults to process.stderr */ + errStream?: stream.Writable; + /** optional. whether to skip quoting/escaping arguments if needed. defaults to false. */ + windowsVerbatimArguments?: boolean; + /** optional. whether to fail if output to stderr. defaults to false */ + failOnStdErr?: boolean; + /** optional. defaults to failing on non zero. ignore will not fail leaving it up to the caller */ + ignoreReturnCode?: boolean; + /** optional. How long in ms to wait for STDIO streams to close after the exit event of the process before terminating. defaults to 10000 */ + delay?: number; + /** optional. input to write to the process on STDIN. */ + input?: Buffer; + /** optional. Listeners for output. Callback functions that will be called on these events */ + listeners?: ExecListeners; +} +/** + * Interface for the output of getExecOutput() + */ +export interface ExecOutput { + /**The exit code of the process */ + exitCode: number; + /**The entire stdout of the process as a string */ + stdout: string; + /**The entire stderr of the process as a string */ + stderr: string; +} +/** + * The user defined listeners for an exec call + */ +export interface ExecListeners { + /** A call back for each buffer of stdout */ + stdout?: (data: Buffer) => void; + /** A call back for each buffer of stderr */ + stderr?: (data: Buffer) => void; + /** A call back for each line of stdout */ + stdline?: (data: string) => void; + /** A call back for each line of stderr */ + errline?: (data: string) => void; + /** A call back for each debug log */ + debug?: (data: string) => void; +} diff --git a/node_modules/@actions/exec/lib/interfaces.js b/node_modules/@actions/exec/lib/interfaces.js new file mode 100644 index 0000000..db91911 --- /dev/null +++ b/node_modules/@actions/exec/lib/interfaces.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@actions/exec/lib/interfaces.js.map b/node_modules/@actions/exec/lib/interfaces.js.map new file mode 100644 index 0000000..8fb5f7d --- /dev/null +++ b/node_modules/@actions/exec/lib/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/exec/lib/toolrunner.d.ts b/node_modules/@actions/exec/lib/toolrunner.d.ts new file mode 100644 index 0000000..9bbbb1e --- /dev/null +++ b/node_modules/@actions/exec/lib/toolrunner.d.ts @@ -0,0 +1,37 @@ +/// +import * as events from 'events'; +import * as im from './interfaces'; +export declare class ToolRunner extends events.EventEmitter { + constructor(toolPath: string, args?: string[], options?: im.ExecOptions); + private toolPath; + private args; + private options; + private _debug; + private _getCommandString; + private _processLineBuffer; + private _getSpawnFileName; + private _getSpawnArgs; + private _endsWith; + private _isCmdFile; + private _windowsQuoteCmdArg; + private _uvQuoteCmdArg; + private _cloneExecOptions; + private _getSpawnOptions; + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec(): Promise; +} +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +export declare function argStringToArray(argString: string): string[]; diff --git a/node_modules/@actions/exec/lib/toolrunner.js b/node_modules/@actions/exec/lib/toolrunner.js new file mode 100644 index 0000000..e456a72 --- /dev/null +++ b/node_modules/@actions/exec/lib/toolrunner.js @@ -0,0 +1,618 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(require("os")); +const events = __importStar(require("events")); +const child = __importStar(require("child_process")); +const path = __importStar(require("path")); +const io = __importStar(require("@actions/io")); +const ioUtil = __importStar(require("@actions/io/lib/io-util")); +const timers_1 = require("timers"); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + return s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + return ''; + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + let errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); + } + })); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map \ No newline at end of file diff --git a/node_modules/@actions/exec/lib/toolrunner.js.map b/node_modules/@actions/exec/lib/toolrunner.js.map new file mode 100644 index 0000000..6eaf183 --- /dev/null +++ b/node_modules/@actions/exec/lib/toolrunner.js.map @@ -0,0 +1 @@ +{"version":3,"file":"toolrunner.js","sourceRoot":"","sources":["../src/toolrunner.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,+CAAgC;AAChC,qDAAsC;AACtC,2CAA4B;AAG5B,gDAAiC;AACjC,gEAAiD;AACjD,mCAAiC;AAEjC,sDAAsD;AAEtD,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;GAEG;AACH,MAAa,UAAW,SAAQ,MAAM,CAAC,YAAY;IACjD,YAAY,QAAgB,EAAE,IAAe,EAAE,OAAwB;QACrE,KAAK,EAAE,CAAA;QAEP,IAAI,CAAC,QAAQ,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;SACjE;QAED,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,IAAI,GAAG,IAAI,IAAI,EAAE,CAAA;QACtB,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,EAAE,CAAA;IAC9B,CAAC;IAMO,MAAM,CAAC,OAAe;QAC5B,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,EAAE;YAC1D,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;SACtC;IACH,CAAC;IAEO,iBAAiB,CACvB,OAAuB,EACvB,QAAkB;QAElB,MAAM,QAAQ,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAA;QACzC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACxC,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAA,CAAC,0CAA0C;QAChF,IAAI,UAAU,EAAE;YACd,qBAAqB;YACrB,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE;gBACrB,GAAG,IAAI,QAAQ,CAAA;gBACf,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;oBACpB,GAAG,IAAI,IAAI,CAAC,EAAE,CAAA;iBACf;aACF;YACD,qBAAqB;iBAChB,IAAI,OAAO,CAAC,wBAAwB,EAAE;gBACzC,GAAG,IAAI,IAAI,QAAQ,GAAG,CAAA;gBACtB,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;oBACpB,GAAG,IAAI,IAAI,CAAC,EAAE,CAAA;iBACf;aACF;YACD,oBAAoB;iBACf;gBACH,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAA;gBACzC,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;oBACpB,GAAG,IAAI,IAAI,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE,CAAA;iBACzC;aACF;SACF;aAAM;YACL,qEAAqE;YACrE,sEAAsE;YACtE,wCAAwC;YACxC,GAAG,IAAI,QAAQ,CAAA;YACf,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;gBACpB,GAAG,IAAI,IAAI,CAAC,EAAE,CAAA;aACf;SACF;QAED,OAAO,GAAG,CAAA;IACZ,CAAC;IAEO,kBAAkB,CACxB,IAAY,EACZ,SAAiB,EACjB,MAA8B;QAE9B,IAAI;YACF,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAA;YACnC,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;YAEzB,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE;gBACb,MAAM,IAAI,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;gBAC9B,MAAM,CAAC,IAAI,CAAC,CAAA;gBAEZ,6BAA6B;gBAC7B,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;gBAClC,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;aACtB;YAED,OAAO,CAAC,CAAA;SACT;QAAC,OAAO,GAAG,EAAE;YACZ,kEAAkE;YAClE,IAAI,CAAC,MAAM,CAAC,4CAA4C,GAAG,EAAE,CAAC,CAAA;YAE9D,OAAO,EAAE,CAAA;SACV;IACH,CAAC;IAEO,iBAAiB;QACvB,IAAI,UAAU,EAAE;YACd,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE;gBACrB,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,SAAS,CAAA;aAC3C;SACF;QAED,OAAO,IAAI,CAAC,QAAQ,CAAA;IACtB,CAAC;IAEO,aAAa,CAAC,OAAuB;QAC3C,IAAI,UAAU,EAAE;YACd,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE;gBACrB,IAAI,OAAO,GAAG,aAAa,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAA;gBACpE,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,IAAI,EAAE;oBACzB,OAAO,IAAI,GAAG,CAAA;oBACd,OAAO,IAAI,OAAO,CAAC,wBAAwB;wBACzC,CAAC,CAAC,CAAC;wBACH,CAAC,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAA;iBAChC;gBAED,OAAO,IAAI,GAAG,CAAA;gBACd,OAAO,CAAC,OAAO,CAAC,CAAA;aACjB;SACF;QAED,OAAO,IAAI,CAAC,IAAI,CAAA;IAClB,CAAC;IAEO,SAAS,CAAC,GAAW,EAAE,GAAW;QACxC,OAAO,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;IAC1B,CAAC;IAEO,UAAU;QAChB,MAAM,aAAa,GAAW,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAA;QACzD,OAAO,CACL,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,MAAM,CAAC;YACrC,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,MAAM,CAAC,CACtC,CAAA;IACH,CAAC;IAEO,mBAAmB,CAAC,GAAW;QACrC,8DAA8D;QAC9D,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE;YACtB,OAAO,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAA;SAChC;QAED,6EAA6E;QAC7E,4EAA4E;QAC5E,uBAAuB;QACvB,EAAE;QACF,0EAA0E;QAC1E,4HAA4H;QAE5H,4BAA4B;QAC5B,IAAI,CAAC,GAAG,EAAE;YACR,OAAO,IAAI,CAAA;SACZ;QAED,+CAA+C;QAC/C,MAAM,eAAe,GAAG;YACtB,GAAG;YACH,IAAI;YACJ,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;YACH,GAAG;SACJ,CAAA;QACD,IAAI,WAAW,GAAG,KAAK,CAAA;QACvB,KAAK,MAAM,IAAI,IAAI,GAAG,EAAE;YACtB,IAAI,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,IAAI,CAAC,EAAE;gBACzC,WAAW,GAAG,IAAI,CAAA;gBAClB,MAAK;aACN;SACF;QAED,qCAAqC;QACrC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,GAAG,CAAA;SACX;QAED,mFAAmF;QACnF,EAAE;QACF,+BAA+B;QAC/B,EAAE;QACF,qCAAqC;QACrC,EAAE;QACF,mGAAmG;QACnG,oDAAoD;QACpD,EAAE;QACF,sGAAsG;QACtG,oCAAoC;QACpC,sCAAsC;QACtC,wDAAwD;QACxD,kCAAkC;QAClC,yFAAyF;QACzF,4DAA4D;QAC5D,sCAAsC;QACtC,EAAE;QACF,6CAA6C;QAC7C,6CAA6C;QAC7C,+CAA+C;QAC/C,iDAAiD;QACjD,8CAA8C;QAC9C,EAAE;QACF,gGAAgG;QAChG,gEAAgE;QAChE,EAAE;QACF,iGAAiG;QACjG,kGAAkG;QAClG,EAAE;QACF,6FAA6F;QAC7F,wDAAwD;QACxD,EAAE;QACF,oGAAoG;QACpG,mGAAmG;QACnG,eAAe;QACf,EAAE;QACF,sGAAsG;QACtG,sGAAsG;QACtG,EAAE;QACF,gGAAgG;QAChG,kGAAkG;QAClG,oGAAoG;QACpG,0BAA0B;QAC1B,EAAE;QACF,iGAAiG;QACjG,uCAAuC;QACvC,IAAI,OAAO,GAAG,GAAG,CAAA;QACjB,IAAI,QAAQ,GAAG,IAAI,CAAA;QACnB,KAAK,IAAI,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YACnC,6BAA6B;YAC7B,OAAO,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;YACrB,IAAI,QAAQ,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACnC,OAAO,IAAI,IAAI,CAAA,CAAC,mBAAmB;aACpC;iBAAM,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,GAAG,EAAE;gBAC7B,QAAQ,GAAG,IAAI,CAAA;gBACf,OAAO,IAAI,GAAG,CAAA,CAAC,mBAAmB;aACnC;iBAAM;gBACL,QAAQ,GAAG,KAAK,CAAA;aACjB;SACF;QAED,OAAO,IAAI,GAAG,CAAA;QACd,OAAO,OAAO;aACX,KAAK,CAAC,EAAE,CAAC;aACT,OAAO,EAAE;aACT,IAAI,CAAC,EAAE,CAAC,CAAA;IACb,CAAC;IAEO,cAAc,CAAC,GAAW;QAChC,iFAAiF;QACjF,qFAAqF;QACrF,WAAW;QACX,EAAE;QACF,qFAAqF;QACrF,uFAAuF;QACvF,2DAA2D;QAC3D,EAAE;QACF,gFAAgF;QAChF,EAAE;QACF,oFAAoF;QACpF,gFAAgF;QAChF,kFAAkF;QAClF,mFAAmF;QACnF,kFAAkF;QAClF,gEAAgE;QAChE,EAAE;QACF,kFAAkF;QAClF,2DAA2D;QAC3D,EAAE;QACF,kFAAkF;QAClF,gFAAgF;QAChF,mFAAmF;QACnF,8EAA8E;QAC9E,+EAA+E;QAC/E,oFAAoF;QACpF,wBAAwB;QAExB,IAAI,CAAC,GAAG,EAAE;YACR,2CAA2C;YAC3C,OAAO,IAAI,CAAA;SACZ;QAED,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACnE,sBAAsB;YACtB,OAAO,GAAG,CAAA;SACX;QAED,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YAC7C,+DAA+D;YAC/D,sCAAsC;YACtC,OAAO,IAAI,GAAG,GAAG,CAAA;SAClB;QAED,yBAAyB;QACzB,wBAAwB;QACxB,2BAA2B;QAC3B,yBAAyB;QACzB,6BAA6B;QAC7B,wBAAwB;QACxB,wBAAwB;QACxB,yBAAyB;QACzB,yBAAyB;QACzB,yBAAyB;QACzB,6BAA6B;QAC7B,0BAA0B;QAC1B,+BAA+B;QAC/B,yBAAyB;QACzB,sFAAsF;QACtF,gGAAgG;QAChG,IAAI,OAAO,GAAG,GAAG,CAAA;QACjB,IAAI,QAAQ,GAAG,IAAI,CAAA;QACnB,KAAK,IAAI,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YACnC,6BAA6B;YAC7B,OAAO,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;YACrB,IAAI,QAAQ,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACnC,OAAO,IAAI,IAAI,CAAA;aAChB;iBAAM,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,GAAG,EAAE;gBAC7B,QAAQ,GAAG,IAAI,CAAA;gBACf,OAAO,IAAI,IAAI,CAAA;aAChB;iBAAM;gBACL,QAAQ,GAAG,KAAK,CAAA;aACjB;SACF;QAED,OAAO,IAAI,GAAG,CAAA;QACd,OAAO,OAAO;aACX,KAAK,CAAC,EAAE,CAAC;aACT,OAAO,EAAE;aACT,IAAI,CAAC,EAAE,CAAC,CAAA;IACb,CAAC;IAEO,iBAAiB,CAAC,OAAwB;QAChD,OAAO,GAAG,OAAO,IAAoB,EAAE,CAAA;QACvC,MAAM,MAAM,GAAmC;YAC7C,GAAG,EAAE,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,EAAE;YACjC,GAAG,EAAE,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG;YAC/B,MAAM,EAAE,OAAO,CAAC,MAAM,IAAI,KAAK;YAC/B,wBAAwB,EAAE,OAAO,CAAC,wBAAwB,IAAI,KAAK;YACnE,YAAY,EAAE,OAAO,CAAC,YAAY,IAAI,KAAK;YAC3C,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,IAAI,KAAK;YACnD,KAAK,EAAE,OAAO,CAAC,KAAK,IAAI,KAAK;SAC9B,CAAA;QACD,MAAM,CAAC,SAAS,GAAG,OAAO,CAAC,SAAS,IAAqB,OAAO,CAAC,MAAM,CAAA;QACvE,MAAM,CAAC,SAAS,GAAG,OAAO,CAAC,SAAS,IAAqB,OAAO,CAAC,MAAM,CAAA;QACvE,OAAO,MAAM,CAAA;IACf,CAAC;IAEO,gBAAgB,CACtB,OAAuB,EACvB,QAAgB;QAEhB,OAAO,GAAG,OAAO,IAAoB,EAAE,CAAA;QACvC,MAAM,MAAM,GAAuB,EAAE,CAAA;QACrC,MAAM,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAA;QACxB,MAAM,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAA;QACxB,MAAM,CAAC,0BAA0B,CAAC;YAChC,OAAO,CAAC,wBAAwB,IAAI,IAAI,CAAC,UAAU,EAAE,CAAA;QACvD,IAAI,OAAO,CAAC,wBAAwB,EAAE;YACpC,MAAM,CAAC,KAAK,GAAG,IAAI,QAAQ,GAAG,CAAA;SAC/B;QACD,OAAO,MAAM,CAAA;IACf,CAAC;IAED;;;;;;;;OAQG;IACG,IAAI;;YACR,qEAAqE;YACrE,IACE,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC;gBAC/B,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC;oBAC1B,CAAC,UAAU,IAAI,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAC/C;gBACA,wFAAwF;gBACxF,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,OAAO,CAC1B,OAAO,CAAC,GAAG,EAAE,EACb,IAAI,CAAC,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,EAAE,EACjC,IAAI,CAAC,QAAQ,CACd,CAAA;aACF;YAED,iEAAiE;YACjE,qEAAqE;YACrE,IAAI,CAAC,QAAQ,GAAG,MAAM,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAA;YAEnD,OAAO,IAAI,OAAO,CAAS,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;gBACnD,IAAI,CAAC,MAAM,CAAC,cAAc,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAA;gBAC1C,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAA;gBACzB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,IAAI,EAAE;oBAC3B,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,CAAA;iBACzB;gBAED,MAAM,cAAc,GAAG,IAAI,CAAC,iBAAiB,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;gBAC3D,IAAI,CAAC,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,SAAS,EAAE;oBACtD,cAAc,CAAC,SAAS,CAAC,KAAK,CAC5B,IAAI,CAAC,iBAAiB,CAAC,cAAc,CAAC,GAAG,EAAE,CAAC,GAAG,CAChD,CAAA;iBACF;gBAED,MAAM,KAAK,GAAG,IAAI,SAAS,CAAC,cAAc,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;gBAC1D,KAAK,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,OAAe,EAAE,EAAE;oBACpC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;gBACtB,CAAC,CAAC,CAAA;gBAEF,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE;oBAChE,OAAO,MAAM,CAAC,IAAI,KAAK,CAAC,YAAY,IAAI,CAAC,OAAO,CAAC,GAAG,kBAAkB,CAAC,CAAC,CAAA;iBACzE;gBAED,MAAM,QAAQ,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAA;gBACzC,MAAM,EAAE,GAAG,KAAK,CAAC,KAAK,CACpB,QAAQ,EACR,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,EAClC,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,CAC9C,CAAA;gBAED,IAAI,SAAS,GAAG,EAAE,CAAA;gBAClB,IAAI,EAAE,CAAC,MAAM,EAAE;oBACb,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;wBACpC,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,MAAM,EAAE;4BAC3D,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;yBACpC;wBAED,IAAI,CAAC,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,SAAS,EAAE;4BACtD,cAAc,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;yBACrC;wBAED,SAAS,GAAG,IAAI,CAAC,kBAAkB,CACjC,IAAI,EACJ,SAAS,EACT,CAAC,IAAY,EAAE,EAAE;4BACf,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,OAAO,EAAE;gCAC5D,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAA;6BACrC;wBACH,CAAC,CACF,CAAA;oBACH,CAAC,CAAC,CAAA;iBACH;gBAED,IAAI,SAAS,GAAG,EAAE,CAAA;gBAClB,IAAI,EAAE,CAAC,MAAM,EAAE;oBACb,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;wBACpC,KAAK,CAAC,aAAa,GAAG,IAAI,CAAA;wBAC1B,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,MAAM,EAAE;4BAC3D,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;yBACpC;wBAED,IACE,CAAC,cAAc,CAAC,MAAM;4BACtB,cAAc,CAAC,SAAS;4BACxB,cAAc,CAAC,SAAS,EACxB;4BACA,MAAM,CAAC,GAAG,cAAc,CAAC,YAAY;gCACnC,CAAC,CAAC,cAAc,CAAC,SAAS;gCAC1B,CAAC,CAAC,cAAc,CAAC,SAAS,CAAA;4BAC5B,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;yBACd;wBAED,SAAS,GAAG,IAAI,CAAC,kBAAkB,CACjC,IAAI,EACJ,SAAS,EACT,CAAC,IAAY,EAAE,EAAE;4BACf,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,OAAO,EAAE;gCAC5D,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAA;6BACrC;wBACH,CAAC,CACF,CAAA;oBACH,CAAC,CAAC,CAAA;iBACH;gBAED,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;oBAC5B,KAAK,CAAC,YAAY,GAAG,GAAG,CAAC,OAAO,CAAA;oBAChC,KAAK,CAAC,aAAa,GAAG,IAAI,CAAA;oBAC1B,KAAK,CAAC,aAAa,GAAG,IAAI,CAAA;oBAC1B,KAAK,CAAC,aAAa,EAAE,CAAA;gBACvB,CAAC,CAAC,CAAA;gBAEF,EAAE,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;oBAC7B,KAAK,CAAC,eAAe,GAAG,IAAI,CAAA;oBAC5B,KAAK,CAAC,aAAa,GAAG,IAAI,CAAA;oBAC1B,IAAI,CAAC,MAAM,CAAC,aAAa,IAAI,wBAAwB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAA;oBACtE,KAAK,CAAC,aAAa,EAAE,CAAA;gBACvB,CAAC,CAAC,CAAA;gBAEF,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAY,EAAE,EAAE;oBAC9B,KAAK,CAAC,eAAe,GAAG,IAAI,CAAA;oBAC5B,KAAK,CAAC,aAAa,GAAG,IAAI,CAAA;oBAC1B,KAAK,CAAC,aAAa,GAAG,IAAI,CAAA;oBAC1B,IAAI,CAAC,MAAM,CAAC,uCAAuC,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAA;oBACpE,KAAK,CAAC,aAAa,EAAE,CAAA;gBACvB,CAAC,CAAC,CAAA;gBAEF,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAY,EAAE,QAAgB,EAAE,EAAE;oBAClD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;wBACxB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC,CAAA;qBAChC;oBAED,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;wBACxB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC,CAAA;qBAChC;oBAED,EAAE,CAAC,kBAAkB,EAAE,CAAA;oBAEvB,IAAI,KAAK,EAAE;wBACT,MAAM,CAAC,KAAK,CAAC,CAAA;qBACd;yBAAM;wBACL,OAAO,CAAC,QAAQ,CAAC,CAAA;qBAClB;gBACH,CAAC,CAAC,CAAA;gBAEF,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE;oBACtB,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE;wBACb,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAA;qBAC/C;oBAED,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAA;iBACjC;YACH,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAthBD,gCAshBC;AAED;;;;;GAKG;AACH,SAAgB,gBAAgB,CAAC,SAAiB;IAChD,MAAM,IAAI,GAAa,EAAE,CAAA;IAEzB,IAAI,QAAQ,GAAG,KAAK,CAAA;IACpB,IAAI,OAAO,GAAG,KAAK,CAAA;IACnB,IAAI,GAAG,GAAG,EAAE,CAAA;IAEZ,SAAS,MAAM,CAAC,CAAS;QACvB,gCAAgC;QAChC,IAAI,OAAO,IAAI,CAAC,KAAK,GAAG,EAAE;YACxB,GAAG,IAAI,IAAI,CAAA;SACZ;QAED,GAAG,IAAI,CAAC,CAAA;QACR,OAAO,GAAG,KAAK,CAAA;IACjB,CAAC;IAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACzC,MAAM,CAAC,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QAE7B,IAAI,CAAC,KAAK,GAAG,EAAE;YACb,IAAI,CAAC,OAAO,EAAE;gBACZ,QAAQ,GAAG,CAAC,QAAQ,CAAA;aACrB;iBAAM;gBACL,MAAM,CAAC,CAAC,CAAC,CAAA;aACV;YACD,SAAQ;SACT;QAED,IAAI,CAAC,KAAK,IAAI,IAAI,OAAO,EAAE;YACzB,MAAM,CAAC,CAAC,CAAC,CAAA;YACT,SAAQ;SACT;QAED,IAAI,CAAC,KAAK,IAAI,IAAI,QAAQ,EAAE;YAC1B,OAAO,GAAG,IAAI,CAAA;YACd,SAAQ;SACT;QAED,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE;YAC1B,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE;gBAClB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACd,GAAG,GAAG,EAAE,CAAA;aACT;YACD,SAAQ;SACT;QAED,MAAM,CAAC,CAAC,CAAC,CAAA;KACV;IAED,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE;QAClB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAA;KACtB;IAED,OAAO,IAAI,CAAA;AACb,CAAC;AAvDD,4CAuDC;AAED,MAAM,SAAU,SAAQ,MAAM,CAAC,YAAY;IACzC,YAAY,OAAuB,EAAE,QAAgB;QACnD,KAAK,EAAE,CAAA;QAaT,kBAAa,GAAG,KAAK,CAAA,CAAC,4DAA4D;QAClF,iBAAY,GAAG,EAAE,CAAA;QACjB,oBAAe,GAAG,CAAC,CAAA;QACnB,kBAAa,GAAG,KAAK,CAAA,CAAC,wCAAwC;QAC9D,kBAAa,GAAG,KAAK,CAAA,CAAC,uCAAuC;QACrD,UAAK,GAAG,KAAK,CAAA,CAAC,aAAa;QAC3B,SAAI,GAAG,KAAK,CAAA;QAEZ,YAAO,GAAwB,IAAI,CAAA;QAnBzC,IAAI,CAAC,QAAQ,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC9C;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,OAAO,CAAC,KAAK,EAAE;YACjB,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,KAAK,CAAA;SAC3B;IACH,CAAC;IAaD,aAAa;QACX,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,OAAM;SACP;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,IAAI,CAAC,UAAU,EAAE,CAAA;SAClB;aAAM,IAAI,IAAI,CAAC,aAAa,EAAE;YAC7B,IAAI,CAAC,OAAO,GAAG,mBAAU,CAAC,SAAS,CAAC,aAAa,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;SACrE;IACH,CAAC;IAEO,MAAM,CAAC,OAAe;QAC5B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;IAC7B,CAAC;IAEO,UAAU;QAChB,sCAAsC;QACtC,IAAI,KAAwB,CAAA;QAC5B,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,IAAI,IAAI,CAAC,YAAY,EAAE;gBACrB,KAAK,GAAG,IAAI,KAAK,CACf,8DAA8D,IAAI,CAAC,QAAQ,4DAA4D,IAAI,CAAC,YAAY,EAAE,CAC3J,CAAA;aACF;iBAAM,IAAI,IAAI,CAAC,eAAe,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,gBAAgB,EAAE;gBACvE,KAAK,GAAG,IAAI,KAAK,CACf,gBAAgB,IAAI,CAAC,QAAQ,2BAA2B,IAAI,CAAC,eAAe,EAAE,CAC/E,CAAA;aACF;iBAAM,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;gBAC1D,KAAK,GAAG,IAAI,KAAK,CACf,gBAAgB,IAAI,CAAC,QAAQ,sEAAsE,CACpG,CAAA;aACF;SACF;QAED,oBAAoB;QACpB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;YAC1B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAA;SACpB;QAED,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAA;IAChD,CAAC;IAEO,MAAM,CAAC,aAAa,CAAC,KAAgB;QAC3C,IAAI,KAAK,CAAC,IAAI,EAAE;YACd,OAAM;SACP;QAED,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,KAAK,CAAC,aAAa,EAAE;YAC/C,MAAM,OAAO,GAAG,0CAA0C,KAAK,CAAC,KAAK;gBACnE,IAAI,4CACJ,KAAK,CAAC,QACR,0FAA0F,CAAA;YAC1F,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;SACtB;QAED,KAAK,CAAC,UAAU,EAAE,CAAA;IACpB,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/@actions/exec/package.json b/node_modules/@actions/exec/package.json new file mode 100644 index 0000000..bc4d77a --- /dev/null +++ b/node_modules/@actions/exec/package.json @@ -0,0 +1,41 @@ +{ + "name": "@actions/exec", + "version": "1.1.1", + "description": "Actions exec lib", + "keywords": [ + "github", + "actions", + "exec" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/exec", + "license": "MIT", + "main": "lib/exec.js", + "types": "lib/exec.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/exec" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/io": "^1.0.1" + } +} diff --git a/node_modules/@actions/github/LICENSE.md b/node_modules/@actions/github/LICENSE.md new file mode 100644 index 0000000..dbae2ed --- /dev/null +++ b/node_modules/@actions/github/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/github/README.md b/node_modules/@actions/github/README.md new file mode 100644 index 0000000..30e6a68 --- /dev/null +++ b/node_modules/@actions/github/README.md @@ -0,0 +1,98 @@ +# `@actions/github` + +> A hydrated Octokit client. + +## Usage + +Returns an authenticated Octokit client that follows the machine [proxy settings](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners) and correctly sets GHES base urls. See https://octokit.github.io/rest.js for the API. + +```js +const github = require('@actions/github'); +const core = require('@actions/core'); + +async function run() { + // This should be a token with access to your repository scoped in as a secret. + // The YML workflow will need to set myToken with the GitHub Secret Token + // myToken: ${{ secrets.GITHUB_TOKEN }} + // https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token#about-the-github_token-secret + const myToken = core.getInput('myToken'); + + const octokit = github.getOctokit(myToken) + + // You can also pass in additional options as a second parameter to getOctokit + // const octokit = github.getOctokit(myToken, {userAgent: "MyActionVersion1"}); + + const { data: pullRequest } = await octokit.rest.pulls.get({ + owner: 'octokit', + repo: 'rest.js', + pull_number: 123, + mediaType: { + format: 'diff' + } + }); + + console.log(pullRequest); +} + +run(); +``` + +You can also make GraphQL requests. See https://github.com/octokit/graphql.js for the API. + +```js +const result = await octokit.graphql(query, variables); +``` + +Finally, you can get the context of the current action: + +```js +const github = require('@actions/github'); + +const context = github.context; + +const newIssue = await octokit.rest.issues.create({ + ...context.repo, + title: 'New issue!', + body: 'Hello Universe!' +}); +``` + +## Webhook payload typescript definitions + +The npm module `@octokit/webhooks-definitions` provides type definitions for the response payloads. You can cast the payload to these types for better type information. + +First, install the npm module `npm install @octokit/webhooks-definitions` + +Then, assert the type based on the eventName +```ts +import * as core from '@actions/core' +import * as github from '@actions/github' +import {PushEvent} from '@octokit/webhooks-definitions/schema' + +if (github.context.eventName === 'push') { + const pushPayload = github.context.payload as PushEvent + core.info(`The head commit is: ${pushPayload.head_commit}`) +} +``` + +## Extending the Octokit instance +`@octokit/core` now supports the [plugin architecture](https://github.com/octokit/core.js#plugins). You can extend the GitHub instance using plugins. + +For example, using the `@octokit/plugin-enterprise-server` you can now access enterprise admin apis on GHES instances. + +```ts +import { GitHub, getOctokitOptions } from '@actions/github/lib/utils' +import { enterpriseServer220Admin } from '@octokit/plugin-enterprise-server' + +const octokit = GitHub.plugin(enterpriseServer220Admin) +// or override some of the default values as well +// const octokit = GitHub.plugin(enterpriseServer220Admin).defaults({userAgent: "MyNewUserAgent"}) + +const myToken = core.getInput('myToken'); +const myOctokit = new octokit(getOctokitOptions(token)) +// Create a new user +myOctokit.rest.enterpriseAdmin.createUser({ + login: "testuser", + email: "testuser@test.com", +}); +``` diff --git a/node_modules/@actions/github/lib/context.d.ts b/node_modules/@actions/github/lib/context.d.ts new file mode 100644 index 0000000..7d3a7de --- /dev/null +++ b/node_modules/@actions/github/lib/context.d.ts @@ -0,0 +1,32 @@ +import { WebhookPayload } from './interfaces'; +export declare class Context { + /** + * Webhook payload object that triggered the workflow + */ + payload: WebhookPayload; + eventName: string; + sha: string; + ref: string; + workflow: string; + action: string; + actor: string; + job: string; + runNumber: number; + runId: number; + apiUrl: string; + serverUrl: string; + graphqlUrl: string; + /** + * Hydrate the context from the environment + */ + constructor(); + get issue(): { + owner: string; + repo: string; + number: number; + }; + get repo(): { + owner: string; + repo: string; + }; +} diff --git a/node_modules/@actions/github/lib/context.js b/node_modules/@actions/github/lib/context.js new file mode 100644 index 0000000..767933c --- /dev/null +++ b/node_modules/@actions/github/lib/context.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Context = void 0; +const fs_1 = require("fs"); +const os_1 = require("os"); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + } + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; + } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } +} +exports.Context = Context; +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@actions/github/lib/context.js.map b/node_modules/@actions/github/lib/context.js.map new file mode 100644 index 0000000..91fb9a9 --- /dev/null +++ b/node_modules/@actions/github/lib/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../src/context.ts"],"names":[],"mappings":";;;AAEA,2BAA2C;AAC3C,2BAAsB;AAEtB,MAAa,OAAO;IAmBlB;;OAEG;IACH;;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,IAAI,eAAU,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CACvB,iBAAY,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAChE,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAA;gBAC1C,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,IAAI,kBAAkB,QAAG,EAAE,CAAC,CAAA;aACvE;SACF;QACD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,iBAA2B,CAAA;QACxD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAyB,CAAA;QACrD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,aAAuB,CAAA;QACjD,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,YAAsB,CAAA;QAC/C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAA2B,EAAE,EAAE,CAAC,CAAA;QACtE,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,aAAuB,EAAE,EAAE,CAAC,CAAA;QAC9D,IAAI,CAAC,MAAM,SAAG,OAAO,CAAC,GAAG,CAAC,cAAc,mCAAI,wBAAwB,CAAA;QACpE,IAAI,CAAC,SAAS,SAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,mCAAI,oBAAoB,CAAA;QACtE,IAAI,CAAC,UAAU,SACb,OAAO,CAAC,GAAG,CAAC,kBAAkB,mCAAI,gCAAgC,CAAA;IACtE,CAAC;IAED,IAAI,KAAK;QACP,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAA;QAE5B,uCACK,IAAI,CAAC,IAAI,KACZ,MAAM,EAAE,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,CAAC,MAAM,IAClE;IACH,CAAC;IAED,IAAI,IAAI;QACN,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YAC9D,OAAO,EAAC,KAAK,EAAE,IAAI,EAAC,CAAA;SACrB;QAED,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YAC3B,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK;gBAC1C,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI;aACnC,CAAA;SACF;QAED,MAAM,IAAI,KAAK,CACb,kFAAkF,CACnF,CAAA;IACH,CAAC;CACF;AA3ED,0BA2EC"} \ No newline at end of file diff --git a/node_modules/@actions/github/lib/github.d.ts b/node_modules/@actions/github/lib/github.d.ts new file mode 100644 index 0000000..8c2121d --- /dev/null +++ b/node_modules/@actions/github/lib/github.d.ts @@ -0,0 +1,11 @@ +import * as Context from './context'; +import { GitHub } from './utils'; +import { OctokitOptions, OctokitPlugin } from '@octokit/core/dist-types/types'; +export declare const context: Context.Context; +/** + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +export declare function getOctokit(token: string, options?: OctokitOptions, ...additionalPlugins: OctokitPlugin[]): InstanceType; diff --git a/node_modules/@actions/github/lib/github.js b/node_modules/@actions/github/lib/github.js new file mode 100644 index 0000000..b717a6b --- /dev/null +++ b/node_modules/@actions/github/lib/github.js @@ -0,0 +1,37 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(require("./context")); +const utils_1 = require("./utils"); +exports.context = new Context.Context(); +/** + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokit(token, options, ...additionalPlugins) { + const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); + return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options)); +} +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map \ No newline at end of file diff --git a/node_modules/@actions/github/lib/github.js.map b/node_modules/@actions/github/lib/github.js.map new file mode 100644 index 0000000..60be74b --- /dev/null +++ b/node_modules/@actions/github/lib/github.js.map @@ -0,0 +1 @@ +{"version":3,"file":"github.js","sourceRoot":"","sources":["../src/github.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,mDAAoC;AACpC,mCAAiD;AAKpC,QAAA,OAAO,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,CAAA;AAE5C;;;;;GAKG;AACH,SAAgB,UAAU,CACxB,KAAa,EACb,OAAwB,EACxB,GAAG,iBAAkC;IAErC,MAAM,iBAAiB,GAAG,cAAM,CAAC,MAAM,CAAC,GAAG,iBAAiB,CAAC,CAAA;IAC7D,OAAO,IAAI,iBAAiB,CAAC,yBAAiB,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC,CAAA;AACjE,CAAC;AAPD,gCAOC"} \ No newline at end of file diff --git a/node_modules/@actions/github/lib/interfaces.d.ts b/node_modules/@actions/github/lib/interfaces.d.ts new file mode 100644 index 0000000..f810333 --- /dev/null +++ b/node_modules/@actions/github/lib/interfaces.d.ts @@ -0,0 +1,40 @@ +export interface PayloadRepository { + [key: string]: any; + full_name?: string; + name: string; + owner: { + [key: string]: any; + login: string; + name?: string; + }; + html_url?: string; +} +export interface WebhookPayload { + [key: string]: any; + repository?: PayloadRepository; + issue?: { + [key: string]: any; + number: number; + html_url?: string; + body?: string; + }; + pull_request?: { + [key: string]: any; + number: number; + html_url?: string; + body?: string; + }; + sender?: { + [key: string]: any; + type: string; + }; + action?: string; + installation?: { + id: number; + [key: string]: any; + }; + comment?: { + id: number; + [key: string]: any; + }; +} diff --git a/node_modules/@actions/github/lib/interfaces.js b/node_modules/@actions/github/lib/interfaces.js new file mode 100644 index 0000000..a660b5e --- /dev/null +++ b/node_modules/@actions/github/lib/interfaces.js @@ -0,0 +1,4 @@ +"use strict"; +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@actions/github/lib/interfaces.js.map b/node_modules/@actions/github/lib/interfaces.js.map new file mode 100644 index 0000000..dc2c960 --- /dev/null +++ b/node_modules/@actions/github/lib/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":";AAAA,uDAAuD"} \ No newline at end of file diff --git a/node_modules/@actions/github/lib/internal/utils.d.ts b/node_modules/@actions/github/lib/internal/utils.d.ts new file mode 100644 index 0000000..5790d91 --- /dev/null +++ b/node_modules/@actions/github/lib/internal/utils.d.ts @@ -0,0 +1,6 @@ +/// +import * as http from 'http'; +import { OctokitOptions } from '@octokit/core/dist-types/types'; +export declare function getAuthString(token: string, options: OctokitOptions): string | undefined; +export declare function getProxyAgent(destinationUrl: string): http.Agent; +export declare function getApiBaseUrl(): string; diff --git a/node_modules/@actions/github/lib/internal/utils.js b/node_modules/@actions/github/lib/internal/utils.js new file mode 100644 index 0000000..175a4da --- /dev/null +++ b/node_modules/@actions/github/lib/internal/utils.js @@ -0,0 +1,43 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(require("@actions/http-client")); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); + } + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; +} +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/github/lib/internal/utils.js.map b/node_modules/@actions/github/lib/internal/utils.js.map new file mode 100644 index 0000000..f1f519d --- /dev/null +++ b/node_modules/@actions/github/lib/internal/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/internal/utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,iEAAkD;AAGlD,SAAgB,aAAa,CAC3B,KAAa,EACb,OAAuB;IAEvB,IAAI,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;QAC3B,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;SAAM,IAAI,KAAK,IAAI,OAAO,CAAC,IAAI,EAAE;QAChC,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAA;KAC5E;IAED,OAAO,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,KAAK,EAAE,CAAA;AAC3E,CAAC;AAXD,sCAWC;AAED,SAAgB,aAAa,CAAC,cAAsB;IAClD,MAAM,EAAE,GAAG,IAAI,UAAU,CAAC,UAAU,EAAE,CAAA;IACtC,OAAO,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAA;AACpC,CAAC;AAHD,sCAGC;AAED,SAAgB,aAAa;IAC3B,OAAO,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,IAAI,wBAAwB,CAAA;AAClE,CAAC;AAFD,sCAEC"} \ No newline at end of file diff --git a/node_modules/@actions/github/lib/utils.d.ts b/node_modules/@actions/github/lib/utils.d.ts new file mode 100644 index 0000000..e889ee0 --- /dev/null +++ b/node_modules/@actions/github/lib/utils.d.ts @@ -0,0 +1,15 @@ +import * as Context from './context'; +import { Octokit } from '@octokit/core'; +import { OctokitOptions } from '@octokit/core/dist-types/types'; +export declare const context: Context.Context; +export declare const defaults: OctokitOptions; +export declare const GitHub: typeof Octokit & import("@octokit/core/dist-types/types").Constructor; +/** + * Convience function to correctly format Octokit Options to pass into the constructor. + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +export declare function getOctokitOptions(token: string, options?: OctokitOptions): OctokitOptions; diff --git a/node_modules/@actions/github/lib/utils.js b/node_modules/@actions/github/lib/utils.js new file mode 100644 index 0000000..d803f01 --- /dev/null +++ b/node_modules/@actions/github/lib/utils.js @@ -0,0 +1,54 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; +const Context = __importStar(require("./context")); +const Utils = __importStar(require("./internal/utils")); +// octokit + plugins +const core_1 = require("@octokit/core"); +const plugin_rest_endpoint_methods_1 = require("@octokit/plugin-rest-endpoint-methods"); +const plugin_paginate_rest_1 = require("@octokit/plugin-paginate-rest"); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +exports.defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); +/** + * Convience function to correctly format Octokit Options to pass into the constructor. + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; +} +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/github/lib/utils.js.map b/node_modules/@actions/github/lib/utils.js.map new file mode 100644 index 0000000..7221d06 --- /dev/null +++ b/node_modules/@actions/github/lib/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,mDAAoC;AACpC,wDAAyC;AAEzC,oBAAoB;AACpB,wCAAqC;AAErC,wFAAyE;AACzE,wEAA0D;AAE7C,QAAA,OAAO,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,CAAA;AAE5C,MAAM,OAAO,GAAG,KAAK,CAAC,aAAa,EAAE,CAAA;AACxB,QAAA,QAAQ,GAAmB;IACtC,OAAO;IACP,OAAO,EAAE;QACP,KAAK,EAAE,KAAK,CAAC,aAAa,CAAC,OAAO,CAAC;KACpC;CACF,CAAA;AAEY,QAAA,MAAM,GAAG,cAAO,CAAC,MAAM,CAClC,kDAAmB,EACnB,mCAAY,CACb,CAAC,QAAQ,CAAC,gBAAQ,CAAC,CAAA;AAEpB;;;;;GAKG;AACH,SAAgB,iBAAiB,CAC/B,KAAa,EACb,OAAwB;IAExB,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,IAAI,EAAE,CAAC,CAAA,CAAC,iEAAiE;IAE/G,OAAO;IACP,MAAM,IAAI,GAAG,KAAK,CAAC,aAAa,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IAC7C,IAAI,IAAI,EAAE;QACR,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;KACjB;IAED,OAAO,IAAI,CAAA;AACb,CAAC;AAbD,8CAaC"} \ No newline at end of file diff --git a/node_modules/@actions/github/package.json b/node_modules/@actions/github/package.json new file mode 100644 index 0000000..3bcbe30 --- /dev/null +++ b/node_modules/@actions/github/package.json @@ -0,0 +1,49 @@ +{ + "name": "@actions/github", + "version": "5.1.1", + "description": "Actions github lib", + "keywords": [ + "github", + "actions" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/github", + "license": "MIT", + "main": "lib/github.js", + "types": "lib/github.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/github" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "jest", + "build": "tsc", + "format": "prettier --write **/*.ts", + "format-check": "prettier --check **/*.ts", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/http-client": "^2.0.1", + "@octokit/core": "^3.6.0", + "@octokit/plugin-paginate-rest": "^2.17.0", + "@octokit/plugin-rest-endpoint-methods": "^5.13.0" + }, + "devDependencies": { + "proxy": "^1.0.2" + } +} diff --git a/node_modules/@actions/http-client/LICENSE b/node_modules/@actions/http-client/LICENSE new file mode 100644 index 0000000..5823a51 --- /dev/null +++ b/node_modules/@actions/http-client/LICENSE @@ -0,0 +1,21 @@ +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@actions/http-client/README.md b/node_modules/@actions/http-client/README.md new file mode 100644 index 0000000..7e06ade --- /dev/null +++ b/node_modules/@actions/http-client/README.md @@ -0,0 +1,73 @@ +# `@actions/http-client` + +A lightweight HTTP client optimized for building actions. + +## Features + + - HTTP client with TypeScript generics and async/await/Promises + - Typings included! + - [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner + - Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+. + - Basic, Bearer and PAT Support out of the box. Extensible handlers for others. + - Redirects supported + +Features and releases [here](./RELEASES.md) + +## Install + +``` +npm install @actions/http-client --save +``` + +## Samples + +See the [tests](./__tests__) for detailed examples. + +## Errors + +### HTTP + +The HTTP client does not throw unless truly exceptional. + +* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body. +* Redirects (3xx) will be followed by default. + +See the [tests](./__tests__) for detailed examples. + +## Debugging + +To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible: + +```shell +export NODE_DEBUG=http +``` + +## Node support + +The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+. + +## Support and Versioning + +We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat). + +## Contributing + +We welcome PRs. Please create an issue and if applicable, a design before proceeding with code. + +once: + +``` +npm install +``` + +To build: + +``` +npm run build +``` + +To run all tests: + +``` +npm test +``` diff --git a/node_modules/@actions/http-client/lib/auth.d.ts b/node_modules/@actions/http-client/lib/auth.d.ts new file mode 100644 index 0000000..8cc9fc3 --- /dev/null +++ b/node_modules/@actions/http-client/lib/auth.d.ts @@ -0,0 +1,26 @@ +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +import { HttpClientResponse } from './index'; +export declare class BasicCredentialHandler implements ifm.RequestHandler { + username: string; + password: string; + constructor(username: string, password: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class BearerCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class PersonalAccessTokenCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} diff --git a/node_modules/@actions/http-client/lib/auth.js b/node_modules/@actions/http-client/lib/auth.js new file mode 100644 index 0000000..2c150a3 --- /dev/null +++ b/node_modules/@actions/http-client/lib/auth.js @@ -0,0 +1,81 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/auth.js.map b/node_modules/@actions/http-client/lib/auth.js.map new file mode 100644 index 0000000..62cc16b --- /dev/null +++ b/node_modules/@actions/http-client/lib/auth.js.map @@ -0,0 +1 @@ +{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAK/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA5BD,oFA4BC"} \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/index.d.ts b/node_modules/@actions/http-client/lib/index.d.ts new file mode 100644 index 0000000..38db700 --- /dev/null +++ b/node_modules/@actions/http-client/lib/index.d.ts @@ -0,0 +1,130 @@ +/// +/// +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +import { ProxyAgent } from 'undici'; +export declare enum HttpCodes { + OK = 200, + MultipleChoices = 300, + MovedPermanently = 301, + ResourceMoved = 302, + SeeOther = 303, + NotModified = 304, + UseProxy = 305, + SwitchProxy = 306, + TemporaryRedirect = 307, + PermanentRedirect = 308, + BadRequest = 400, + Unauthorized = 401, + PaymentRequired = 402, + Forbidden = 403, + NotFound = 404, + MethodNotAllowed = 405, + NotAcceptable = 406, + ProxyAuthenticationRequired = 407, + RequestTimeout = 408, + Conflict = 409, + Gone = 410, + TooManyRequests = 429, + InternalServerError = 500, + NotImplemented = 501, + BadGateway = 502, + ServiceUnavailable = 503, + GatewayTimeout = 504 +} +export declare enum Headers { + Accept = "accept", + ContentType = "content-type" +} +export declare enum MediaTypes { + ApplicationJson = "application/json" +} +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +export declare function getProxyUrl(serverUrl: string): string; +export declare class HttpClientError extends Error { + constructor(message: string, statusCode: number); + statusCode: number; + result?: any; +} +export declare class HttpClientResponse { + constructor(message: http.IncomingMessage); + message: http.IncomingMessage; + readBody(): Promise; + readBodyBuffer?(): Promise; +} +export declare function isHttps(requestUrl: string): boolean; +export declare class HttpClient { + userAgent: string | undefined; + handlers: ifm.RequestHandler[]; + requestOptions: ifm.RequestOptions | undefined; + private _ignoreSslError; + private _socketTimeout; + private _allowRedirects; + private _allowRedirectDowngrade; + private _maxRedirects; + private _allowRetries; + private _maxRetries; + private _agent; + private _proxyAgent; + private _proxyAgentDispatcher; + private _keepAlive; + private _disposed; + constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions); + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + head(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + postJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + putJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + patchJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream | null, headers?: http.OutgoingHttpHeaders): Promise; + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose(): void; + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null, onResult: (err?: Error, res?: HttpClientResponse) => void): void; + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl: string): http.Agent; + getAgentDispatcher(serverUrl: string): ProxyAgent | undefined; + private _prepareRequest; + private _mergeHeaders; + private _getExistingOrDefaultHeader; + private _getAgent; + private _getProxyAgentDispatcher; + private _performExponentialBackoff; + private _processResponse; +} diff --git a/node_modules/@actions/http-client/lib/index.js b/node_modules/@actions/http-client/lib/index.js new file mode 100644 index 0000000..c337ca6 --- /dev/null +++ b/node_modules/@actions/http-client/lib/index.js @@ -0,0 +1,652 @@ +"use strict"; +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(require("http")); +const https = __importStar(require("https")); +const pm = __importStar(require("./proxy")); +const tunnel = __importStar(require("tunnel")); +const undici_1 = require("undici"); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers || (exports.Headers = Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (!useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if tunneling agent isn't assigned create a new agent + if (!agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/index.js.map b/node_modules/@actions/http-client/lib/index.js.map new file mode 100644 index 0000000..2a0d1a8 --- /dev/null +++ b/node_modules/@actions/http-client/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA,uDAAuD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEvD,2CAA4B;AAC5B,6CAA8B;AAG9B,4CAA6B;AAC7B,+CAAgC;AAChC,mCAAiC;AAEjC,IAAY,SA4BX;AA5BD,WAAY,SAAS;IACnB,uCAAQ,CAAA;IACR,iEAAqB,CAAA;IACrB,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,qEAAuB,CAAA;IACvB,qEAAuB,CAAA;IACvB,uDAAgB,CAAA;IAChB,2DAAkB,CAAA;IAClB,iEAAqB,CAAA;IACrB,qDAAe,CAAA;IACf,mDAAc,CAAA;IACd,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,yFAAiC,CAAA;IACjC,+DAAoB,CAAA;IACpB,mDAAc,CAAA;IACd,2CAAU,CAAA;IACV,iEAAqB,CAAA;IACrB,yEAAyB,CAAA;IACzB,+DAAoB,CAAA;IACpB,uDAAgB,CAAA;IAChB,uEAAwB,CAAA;IACxB,+DAAoB,CAAA;AACtB,CAAC,EA5BW,SAAS,yBAAT,SAAS,QA4BpB;AAED,IAAY,OAGX;AAHD,WAAY,OAAO;IACjB,4BAAiB,CAAA;IACjB,uCAA4B,CAAA;AAC9B,CAAC,EAHW,OAAO,uBAAP,OAAO,QAGlB;AAED,IAAY,UAEX;AAFD,WAAY,UAAU;IACpB,kDAAoC,CAAA;AACtC,CAAC,EAFW,UAAU,0BAAV,UAAU,QAErB;AAED;;;GAGG;AACH,SAAgB,WAAW,CAAC,SAAiB;IAC3C,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAA;IACnD,OAAO,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;AACtC,CAAC;AAHD,kCAGC;AAED,MAAM,iBAAiB,GAAa;IAClC,SAAS,CAAC,gBAAgB;IAC1B,SAAS,CAAC,aAAa;IACvB,SAAS,CAAC,QAAQ;IAClB,SAAS,CAAC,iBAAiB;IAC3B,SAAS,CAAC,iBAAiB;CAC5B,CAAA;AACD,MAAM,sBAAsB,GAAa;IACvC,SAAS,CAAC,UAAU;IACpB,SAAS,CAAC,kBAAkB;IAC5B,SAAS,CAAC,cAAc;CACzB,CAAA;AACD,MAAM,kBAAkB,GAAa,CAAC,SAAS,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAA;AACzE,MAAM,yBAAyB,GAAG,EAAE,CAAA;AACpC,MAAM,2BAA2B,GAAG,CAAC,CAAA;AAErC,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe,EAAE,UAAkB;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAIF;AAVD,0CAUC;AAED,MAAa,kBAAkB;IAC7B,YAAY,OAA6B;QACvC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAGK,QAAQ;;YACZ,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBAE5B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAA;gBACzC,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAA;gBAC5B,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;IAEK,cAAc;;YAClB,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,MAAM,MAAM,GAAa,EAAE,CAAA;gBAE3B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;gBACpB,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAA;gBAChC,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAjCD,gDAiCC;AAED,SAAgB,OAAO,CAAC,UAAkB;IACxC,MAAM,SAAS,GAAQ,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;IAC1C,OAAO,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;AACxC,CAAC;AAHD,0BAGC;AAED,MAAa,UAAU;IAkBrB,YACE,SAAkB,EAClB,QAA+B,EAC/B,cAAmC;QAhB7B,oBAAe,GAAG,KAAK,CAAA;QAEvB,oBAAe,GAAG,IAAI,CAAA;QACtB,4BAAuB,GAAG,KAAK,CAAA;QAC/B,kBAAa,GAAG,EAAE,CAAA;QAClB,kBAAa,GAAG,KAAK,CAAA;QACrB,gBAAW,GAAG,CAAC,CAAA;QAIf,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QAOvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;QAC9B,IAAI,CAAC,cAAc,GAAG,cAAc,CAAA;QACpC,IAAI,cAAc,EAAE;YAClB,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC,aAAa,CAAA;YAElD,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,cAAc,CAAC,sBAAsB,IAAI,IAAI,EAAE;gBACjD,IAAI,CAAC,uBAAuB,GAAG,cAAc,CAAC,sBAAsB,CAAA;aACrE;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,YAAY,EAAE,CAAC,CAAC,CAAA;aAC9D;YAED,IAAI,cAAc,CAAC,SAAS,IAAI,IAAI,EAAE;gBACpC,IAAI,CAAC,UAAU,GAAG,cAAc,CAAC,SAAS,CAAA;aAC3C;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,cAAc,CAAC,YAAY,CAAA;aACjD;YAED,IAAI,cAAc,CAAC,UAAU,IAAI,IAAI,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,cAAc,CAAC,UAAU,CAAA;aAC7C;SACF;IACH,CAAC;IAEK,OAAO,CACX,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC3E,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC1E,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,KAAK,CACT,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACzE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,UAAU,CACd,IAAY,EACZ,UAAkB,EAClB,MAA6B,EAC7B,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,iBAAiB,CAAC,CAAA;QAClE,CAAC;KAAA;IAED;;;OAGG;IACG,OAAO,CACX,UAAkB,EAClB,oBAA8C,EAAE;;YAEhD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,QAAQ,CACZ,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,IAAI,CAC7C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,OAAO,CACX,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,SAAS,CACb,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,KAAK,CAC9C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAED;;;;OAIG;IACG,OAAO,CACX,IAAY,EACZ,UAAkB,EAClB,IAA2C,EAC3C,OAAkC;;YAElC,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;aACrD;YAED,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;YACrC,IAAI,IAAI,GAAoB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,CAAC,CAAA;YAE1E,oEAAoE;YACpE,MAAM,QAAQ,GACZ,IAAI,CAAC,aAAa,IAAI,kBAAkB,CAAC,QAAQ,CAAC,IAAI,CAAC;gBACrD,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC;gBACtB,CAAC,CAAC,CAAC,CAAA;YACP,IAAI,QAAQ,GAAG,CAAC,CAAA;YAEhB,IAAI,QAAwC,CAAA;YAC5C,GAAG;gBACD,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;gBAE5C,4CAA4C;gBAC5C,IACE,QAAQ;oBACR,QAAQ,CAAC,OAAO;oBAChB,QAAQ,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,CAAC,YAAY,EACtD;oBACA,IAAI,qBAAqD,CAAA;oBAEzD,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;wBACnC,IAAI,OAAO,CAAC,uBAAuB,CAAC,QAAQ,CAAC,EAAE;4BAC7C,qBAAqB,GAAG,OAAO,CAAA;4BAC/B,MAAK;yBACN;qBACF;oBAED,IAAI,qBAAqB,EAAE;wBACzB,OAAO,qBAAqB,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;qBACpE;yBAAM;wBACL,+EAA+E;wBAC/E,yCAAyC;wBACzC,OAAO,QAAQ,CAAA;qBAChB;iBACF;gBAED,IAAI,kBAAkB,GAAW,IAAI,CAAC,aAAa,CAAA;gBACnD,OACE,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC3B,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;oBACvD,IAAI,CAAC,eAAe;oBACpB,kBAAkB,GAAG,CAAC,EACtB;oBACA,MAAM,WAAW,GACf,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;oBACtC,IAAI,CAAC,WAAW,EAAE;wBAChB,kDAAkD;wBAClD,MAAK;qBACN;oBACD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAAC,WAAW,CAAC,CAAA;oBAC9C,IACE,SAAS,CAAC,QAAQ,KAAK,QAAQ;wBAC/B,SAAS,CAAC,QAAQ,KAAK,iBAAiB,CAAC,QAAQ;wBACjD,CAAC,IAAI,CAAC,uBAAuB,EAC7B;wBACA,MAAM,IAAI,KAAK,CACb,8KAA8K,CAC/K,CAAA;qBACF;oBAED,qEAAqE;oBACrE,mCAAmC;oBACnC,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBAEzB,mEAAmE;oBACnE,IAAI,iBAAiB,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,EAAE;wBACrD,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;4BAC5B,oCAAoC;4BACpC,IAAI,MAAM,CAAC,WAAW,EAAE,KAAK,eAAe,EAAE;gCAC5C,OAAO,OAAO,CAAC,MAAM,CAAC,CAAA;6BACvB;yBACF;qBACF;oBAED,kDAAkD;oBAClD,IAAI,GAAG,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,iBAAiB,EAAE,OAAO,CAAC,CAAA;oBAC7D,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;oBAC5C,kBAAkB,EAAE,CAAA;iBACrB;gBAED,IACE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC5B,CAAC,sBAAsB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAC7D;oBACA,8DAA8D;oBAC9D,OAAO,QAAQ,CAAA;iBAChB;gBAED,QAAQ,IAAI,CAAC,CAAA;gBAEb,IAAI,QAAQ,GAAG,QAAQ,EAAE;oBACvB,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACzB,MAAM,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,CAAA;iBAChD;aACF,QAAQ,QAAQ,GAAG,QAAQ,EAAC;YAE7B,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;IAED;;OAEG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;SACtB;QAED,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACvB,CAAC;IAED;;;;OAIG;IACG,UAAU,CACd,IAAqB,EACrB,IAA2C;;YAE3C,OAAO,IAAI,OAAO,CAAqB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACzD,SAAS,iBAAiB,CAAC,GAAW,EAAE,GAAwB;oBAC9D,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAA;qBACZ;yBAAM,IAAI,CAAC,GAAG,EAAE;wBACf,qDAAqD;wBACrD,MAAM,CAAC,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAA;qBACnC;yBAAM;wBACL,OAAO,CAAC,GAAG,CAAC,CAAA;qBACb;gBACH,CAAC;gBAED,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,IAAI,EAAE,iBAAiB,CAAC,CAAA;YAC5D,CAAC,CAAC,CAAA;QACJ,CAAC;KAAA;IAED;;;;;OAKG;IACH,sBAAsB,CACpB,IAAqB,EACrB,IAA2C,EAC3C,QAAyD;QAEzD,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACzB,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,EAAE,CAAA;aAC1B;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACzE;QAED,IAAI,cAAc,GAAG,KAAK,CAAA;QAC1B,SAAS,YAAY,CAAC,GAAW,EAAE,GAAwB;YACzD,IAAI,CAAC,cAAc,EAAE;gBACnB,cAAc,GAAG,IAAI,CAAA;gBACrB,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;aACnB;QACH,CAAC;QAED,MAAM,GAAG,GAAuB,IAAI,CAAC,UAAU,CAAC,OAAO,CACrD,IAAI,CAAC,OAAO,EACZ,CAAC,GAAyB,EAAE,EAAE;YAC5B,MAAM,GAAG,GAAuB,IAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA;YAC3D,YAAY,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;QAC9B,CAAC,CACF,CAAA;QAED,IAAI,MAAkB,CAAA;QACtB,GAAG,CAAC,EAAE,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE;YACtB,MAAM,GAAG,IAAI,CAAA;QACf,CAAC,CAAC,CAAA;QAEF,wEAAwE;QACxE,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,GAAG,KAAK,EAAE,GAAG,EAAE;YACpD,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,GAAG,EAAE,CAAA;aACb;YACD,YAAY,CAAC,IAAI,KAAK,CAAC,oBAAoB,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;QAClE,CAAC,CAAC,CAAA;QAEF,GAAG,CAAC,EAAE,CAAC,OAAO,EAAE,UAAU,GAAG;YAC3B,8BAA8B;YAC9B,0BAA0B;YAC1B,YAAY,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC,CAAC,CAAA;QAEF,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACxB;QAED,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE;gBACf,GAAG,CAAC,GAAG,EAAE,CAAA;YACX,CAAC,CAAC,CAAA;YAEF,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACf;aAAM;YACL,GAAG,CAAC,GAAG,EAAE,CAAA;SACV;IACH,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,SAAiB;QACxB,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;IAClC,CAAC;IAED,kBAAkB,CAAC,SAAiB;QAClC,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAC9C,IAAI,CAAC,QAAQ,EAAE;YACb,OAAM;SACP;QAED,OAAO,IAAI,CAAC,wBAAwB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAA;IAC3D,CAAC;IAEO,eAAe,CACrB,MAAc,EACd,UAAe,EACf,OAAkC;QAElC,MAAM,IAAI,GAAqC,EAAE,CAAA;QAEjD,IAAI,CAAC,SAAS,GAAG,UAAU,CAAA;QAC3B,MAAM,QAAQ,GAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAC9D,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAA;QACzC,MAAM,WAAW,GAAW,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;QAE/C,IAAI,CAAC,OAAO,GAAwB,EAAE,CAAA;QACtC,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAA;QAC3C,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI;YACrC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC/B,CAAC,CAAC,WAAW,CAAA;QACf,IAAI,CAAC,OAAO,CAAC,IAAI;YACf,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,IAAI,EAAE,CAAC,CAAA;QACjE,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;QAC5B,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE;YAC1B,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;SACpD;QAED,IAAI,CAAC,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAEnD,+CAA+C;QAC/C,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACrC;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,aAAa,CACnB,OAAkC;QAElC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,OAAO,MAAM,CAAC,MAAM,CAClB,EAAE,EACF,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,EAC1C,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAC7B,CAAA;SACF;QAED,OAAO,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAAA;IACrC,CAAC;IAEO,2BAA2B,CACjC,iBAA2C,EAC3C,MAAc,EACd,QAAgB;QAEhB,IAAI,YAAgC,CAAA;QACpC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAA;SAClE;QACD,OAAO,iBAAiB,CAAC,MAAM,CAAC,IAAI,YAAY,IAAI,QAAQ,CAAA;IAC9D,CAAC;IAEO,SAAS,CAAC,SAAc;QAC9B,IAAI,KAAK,CAAA;QACT,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAE9C,IAAI,IAAI,CAAC,UAAU,IAAI,QAAQ,EAAE;YAC/B,KAAK,GAAG,IAAI,CAAC,WAAW,CAAA;SACzB;QAED,IAAI,CAAC,QAAQ,EAAE;YACb,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;SACpB;QAED,+CAA+C;QAC/C,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAA;SACb;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,IAAI,UAAU,GAAG,GAAG,CAAA;QACpB,IAAI,IAAI,CAAC,cAAc,EAAE;YACvB,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,UAAU,IAAI,IAAI,CAAC,WAAW,CAAC,UAAU,CAAA;SAC3E;QAED,sGAAsG;QACtG,IAAI,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;YACjC,MAAM,YAAY,GAAG;gBACnB,UAAU;gBACV,SAAS,EAAE,IAAI,CAAC,UAAU;gBAC1B,KAAK,kCACA,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;oBAC9C,SAAS,EAAE,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;iBACvD,CAAC,KACF,IAAI,EAAE,QAAQ,CAAC,QAAQ,EACvB,IAAI,EAAE,QAAQ,CAAC,IAAI,GACpB;aACF,CAAA;YAED,IAAI,WAAqB,CAAA;YACzB,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,KAAK,QAAQ,CAAA;YAChD,IAAI,QAAQ,EAAE;gBACZ,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAA;aACvE;iBAAM;gBACL,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAA;aACrE;YAED,KAAK,GAAG,WAAW,CAAC,YAAY,CAAC,CAAA;YACjC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAA;SACzB;QAED,uDAAuD;QACvD,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,OAAO,GAAG,EAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,UAAU,EAAC,CAAA;YACxD,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YACrE,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;SACpB;QAED,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE;gBACjD,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,wBAAwB,CAAC,SAAc,EAAE,QAAa;QAC5D,IAAI,UAAU,CAAA;QAEd,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,UAAU,GAAG,IAAI,CAAC,qBAAqB,CAAA;SACxC;QAED,+CAA+C;QAC/C,IAAI,UAAU,EAAE;YACd,OAAO,UAAU,CAAA;SAClB;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,UAAU,GAAG,IAAI,mBAAU,iBACzB,GAAG,EAAE,QAAQ,CAAC,IAAI,EAClB,UAAU,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IACjC,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;YAC9C,KAAK,EAAE,SAAS,MAAM,CAAC,IAAI,CACzB,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE,CAC5C,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;SACvB,CAAC,EACF,CAAA;QACF,IAAI,CAAC,qBAAqB,GAAG,UAAU,CAAA;QAEvC,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,UAAU,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,IAAI,EAAE,EAAE;gBACtE,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,UAAU,CAAA;IACnB,CAAC;IAEa,0BAA0B,CAAC,WAAmB;;YAC1D,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAA;YAC9D,MAAM,EAAE,GAAW,2BAA2B,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC,CAAA;YACzE,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;QAChE,CAAC;KAAA;IAEa,gBAAgB,CAC5B,GAAuB,EACvB,OAA4B;;YAE5B,OAAO,IAAI,OAAO,CAAuB,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;gBACjE,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,UAAU,IAAI,CAAC,CAAA;gBAE9C,MAAM,QAAQ,GAAyB;oBACrC,UAAU;oBACV,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;iBACZ,CAAA;gBAED,uCAAuC;gBACvC,IAAI,UAAU,KAAK,SAAS,CAAC,QAAQ,EAAE;oBACrC,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;gBAED,+BAA+B;gBAE/B,SAAS,oBAAoB,CAAC,GAAQ,EAAE,KAAU;oBAChD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;wBAC7B,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAA;wBACzB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,EAAE;4BACvB,OAAO,CAAC,CAAA;yBACT;qBACF;oBAED,OAAO,KAAK,CAAA;gBACd,CAAC;gBAED,IAAI,GAAQ,CAAA;gBACZ,IAAI,QAA4B,CAAA;gBAEhC,IAAI;oBACF,QAAQ,GAAG,MAAM,GAAG,CAAC,QAAQ,EAAE,CAAA;oBAC/B,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBACnC,IAAI,OAAO,IAAI,OAAO,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,oBAAoB,CAAC,CAAA;yBACjD;6BAAM;4BACL,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;yBAC3B;wBAED,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAA;qBACtB;oBAED,QAAQ,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,CAAA;iBACvC;gBAAC,OAAO,GAAG,EAAE;oBACZ,iEAAiE;iBAClE;gBAED,yDAAyD;gBACzD,IAAI,UAAU,GAAG,GAAG,EAAE;oBACpB,IAAI,GAAW,CAAA;oBAEf,0DAA0D;oBAC1D,IAAI,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE;wBACtB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAA;qBAClB;yBAAM,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBAC1C,yEAAyE;wBACzE,GAAG,GAAG,QAAQ,CAAA;qBACf;yBAAM;wBACL,GAAG,GAAG,oBAAoB,UAAU,GAAG,CAAA;qBACxC;oBAED,MAAM,GAAG,GAAG,IAAI,eAAe,CAAC,GAAG,EAAE,UAAU,CAAC,CAAA;oBAChD,GAAG,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAA;oBAE5B,MAAM,CAAC,GAAG,CAAC,CAAA;iBACZ;qBAAM;oBACL,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;YACH,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AA7rBD,gCA6rBC;AAED,MAAM,aAAa,GAAG,CAAC,GAA2B,EAAO,EAAE,CACzD,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/interfaces.d.ts b/node_modules/@actions/http-client/lib/interfaces.d.ts new file mode 100644 index 0000000..775ced9 --- /dev/null +++ b/node_modules/@actions/http-client/lib/interfaces.d.ts @@ -0,0 +1,46 @@ +/// +/// +/// +import * as http from 'http'; +import * as https from 'https'; +import { HttpClientResponse } from './index'; +export interface HttpClient { + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: http.OutgoingHttpHeaders): Promise; + requestRaw(info: RequestInfo, data: string | NodeJS.ReadableStream): Promise; + requestRawWithCallback(info: RequestInfo, data: string | NodeJS.ReadableStream, onResult: (err?: Error, res?: HttpClientResponse) => void): void; +} +export interface RequestHandler { + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(response: HttpClientResponse): boolean; + handleAuthentication(httpClient: HttpClient, requestInfo: RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; +} +export interface RequestInfo { + options: http.RequestOptions; + parsedUrl: URL; + httpModule: typeof http | typeof https; +} +export interface RequestOptions { + headers?: http.OutgoingHttpHeaders; + socketTimeout?: number; + ignoreSslError?: boolean; + allowRedirects?: boolean; + allowRedirectDowngrade?: boolean; + maxRedirects?: number; + maxSockets?: number; + keepAlive?: boolean; + deserializeDates?: boolean; + allowRetries?: boolean; + maxRetries?: number; +} +export interface TypedResponse { + statusCode: number; + result: T | null; + headers: http.IncomingHttpHeaders; +} diff --git a/node_modules/@actions/http-client/lib/interfaces.js b/node_modules/@actions/http-client/lib/interfaces.js new file mode 100644 index 0000000..db91911 --- /dev/null +++ b/node_modules/@actions/http-client/lib/interfaces.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/interfaces.js.map b/node_modules/@actions/http-client/lib/interfaces.js.map new file mode 100644 index 0000000..8fb5f7d --- /dev/null +++ b/node_modules/@actions/http-client/lib/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/proxy.d.ts b/node_modules/@actions/http-client/lib/proxy.d.ts new file mode 100644 index 0000000..4599865 --- /dev/null +++ b/node_modules/@actions/http-client/lib/proxy.d.ts @@ -0,0 +1,2 @@ +export declare function getProxyUrl(reqUrl: URL): URL | undefined; +export declare function checkBypass(reqUrl: URL): boolean; diff --git a/node_modules/@actions/http-client/lib/proxy.js b/node_modules/@actions/http-client/lib/proxy.js new file mode 100644 index 0000000..fac1cfc --- /dev/null +++ b/node_modules/@actions/http-client/lib/proxy.js @@ -0,0 +1,95 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + try { + return new DecodedURL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new DecodedURL(`http://${proxyVar}`); + } + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +class DecodedURL extends URL { + constructor(url, base) { + super(url, base); + this._decodedUsername = decodeURIComponent(super.username); + this._decodedPassword = decodeURIComponent(super.password); + } + get username() { + return this._decodedUsername; + } + get password() { + return this._decodedPassword; + } +} +//# sourceMappingURL=proxy.js.map \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/proxy.js.map b/node_modules/@actions/http-client/lib/proxy.js.map new file mode 100644 index 0000000..0d7f8cc --- /dev/null +++ b/node_modules/@actions/http-client/lib/proxy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,IAAI;YACF,OAAO,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAA;SAChC;QAAC,WAAM;YACN,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC;gBACrE,OAAO,IAAI,UAAU,CAAC,UAAU,QAAQ,EAAE,CAAC,CAAA;SAC9C;KACF;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AAzBD,kCAyBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAA;IAC/B,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IACE,gBAAgB,KAAK,GAAG;YACxB,aAAa,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CACF,CAAC,KAAK,gBAAgB;gBACtB,CAAC,CAAC,QAAQ,CAAC,IAAI,gBAAgB,EAAE,CAAC;gBAClC,CAAC,gBAAgB,CAAC,UAAU,CAAC,GAAG,CAAC;oBAC/B,CAAC,CAAC,QAAQ,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC,CACvC,EACD;YACA,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAnDD,kCAmDC;AAED,SAAS,iBAAiB,CAAC,IAAY;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAA;IACpC,OAAO,CACL,SAAS,KAAK,WAAW;QACzB,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;QAC5B,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC;QAC7B,SAAS,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C,CAAA;AACH,CAAC;AAED,MAAM,UAAW,SAAQ,GAAG;IAI1B,YAAY,GAAiB,EAAE,IAAmB;QAChD,KAAK,CAAC,GAAG,EAAE,IAAI,CAAC,CAAA;QAChB,IAAI,CAAC,gBAAgB,GAAG,kBAAkB,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;QAC1D,IAAI,CAAC,gBAAgB,GAAG,kBAAkB,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;IAC5D,CAAC;IAED,IAAI,QAAQ;QACV,OAAO,IAAI,CAAC,gBAAgB,CAAA;IAC9B,CAAC;IAED,IAAI,QAAQ;QACV,OAAO,IAAI,CAAC,gBAAgB,CAAA;IAC9B,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/@actions/http-client/package.json b/node_modules/@actions/http-client/package.json new file mode 100644 index 0000000..3960a83 --- /dev/null +++ b/node_modules/@actions/http-client/package.json @@ -0,0 +1,51 @@ +{ + "name": "@actions/http-client", + "version": "2.2.3", + "description": "Actions Http Client", + "keywords": [ + "github", + "actions", + "http" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/http-client", + "license": "MIT", + "main": "lib/index.js", + "types": "lib/index.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/http-client" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "build": "tsc", + "format": "prettier --write **/*.ts", + "format-check": "prettier --check **/*.ts", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "devDependencies": { + "@types/node": "20.7.1", + "@types/tunnel": "0.0.3", + "proxy": "^2.1.1", + "@types/proxy": "^1.0.1" + }, + "dependencies": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" + } +} \ No newline at end of file diff --git a/node_modules/@actions/io/LICENSE.md b/node_modules/@actions/io/LICENSE.md new file mode 100644 index 0000000..dbae2ed --- /dev/null +++ b/node_modules/@actions/io/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/io/README.md b/node_modules/@actions/io/README.md new file mode 100644 index 0000000..9aadf2f --- /dev/null +++ b/node_modules/@actions/io/README.md @@ -0,0 +1,53 @@ +# `@actions/io` + +> Core functions for cli filesystem scenarios + +## Usage + +#### mkdir -p + +Recursively make a directory. Follows rules specified in [man mkdir](https://linux.die.net/man/1/mkdir) with the `-p` option specified: + +```js +const io = require('@actions/io'); + +await io.mkdirP('path/to/make'); +``` + +#### cp/mv + +Copy or move files or folders. Follows rules specified in [man cp](https://linux.die.net/man/1/cp) and [man mv](https://linux.die.net/man/1/mv): + +```js +const io = require('@actions/io'); + +// Recursive must be true for directories +const options = { recursive: true, force: false } + +await io.cp('path/to/directory', 'path/to/dest', options); +await io.mv('path/to/file', 'path/to/dest'); +``` + +#### rm -rf + +Remove a file or folder recursively. Follows rules specified in [man rm](https://linux.die.net/man/1/rm) with the `-r` and `-f` rules specified. + +```js +const io = require('@actions/io'); + +await io.rmRF('path/to/directory'); +await io.rmRF('path/to/file'); +``` + +#### which + +Get the path to a tool and resolves via paths. Follows the rules specified in [man which](https://linux.die.net/man/1/which). + +```js +const exec = require('@actions/exec'); +const io = require('@actions/io'); + +const pythonPath: string = await io.which('python', true) + +await exec.exec(`"${pythonPath}"`, ['main.py']); +``` diff --git a/node_modules/@actions/io/lib/io-util.d.ts b/node_modules/@actions/io/lib/io-util.d.ts new file mode 100644 index 0000000..0241e72 --- /dev/null +++ b/node_modules/@actions/io/lib/io-util.d.ts @@ -0,0 +1,21 @@ +/// +import * as fs from 'fs'; +export declare const chmod: typeof fs.promises.chmod, copyFile: typeof fs.promises.copyFile, lstat: typeof fs.promises.lstat, mkdir: typeof fs.promises.mkdir, open: typeof fs.promises.open, readdir: typeof fs.promises.readdir, readlink: typeof fs.promises.readlink, rename: typeof fs.promises.rename, rm: typeof fs.promises.rm, rmdir: typeof fs.promises.rmdir, stat: typeof fs.promises.stat, symlink: typeof fs.promises.symlink, unlink: typeof fs.promises.unlink; +export declare const IS_WINDOWS: boolean; +export declare const UV_FS_O_EXLOCK = 268435456; +export declare const READONLY: number; +export declare function exists(fsPath: string): Promise; +export declare function isDirectory(fsPath: string, useStat?: boolean): Promise; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +export declare function isRooted(p: string): boolean; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +export declare function tryGetExecutablePath(filePath: string, extensions: string[]): Promise; +export declare function getCmdPath(): string; diff --git a/node_modules/@actions/io/lib/io-util.js b/node_modules/@actions/io/lib/io-util.js new file mode 100644 index 0000000..f12e5b0 --- /dev/null +++ b/node_modules/@actions/io/lib/io-util.js @@ -0,0 +1,183 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(require("fs")); +const path = __importStar(require("path")); +_a = fs.promises +// export const {open} = 'fs' +, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +// export const {open} = 'fs' +exports.IS_WINDOWS = process.platform === 'win32'; +// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 +exports.UV_FS_O_EXLOCK = 0x10000000; +exports.READONLY = fs.constants.O_RDONLY; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; +//# sourceMappingURL=io-util.js.map \ No newline at end of file diff --git a/node_modules/@actions/io/lib/io-util.js.map b/node_modules/@actions/io/lib/io-util.js.map new file mode 100644 index 0000000..9a587b3 --- /dev/null +++ b/node_modules/@actions/io/lib/io-util.js.map @@ -0,0 +1 @@ +{"version":3,"file":"io-util.js","sourceRoot":"","sources":["../src/io-util.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAEf,KAcT,EAAE,CAAC,QAAQ;AACf,6BAA6B;EAd3B,aAAK,aACL,gBAAQ,gBACR,aAAK,aACL,aAAK,aACL,YAAI,YACJ,eAAO,eACP,gBAAQ,gBACR,cAAM,cACN,UAAE,UACF,aAAK,aACL,YAAI,YACJ,eAAO,eACP,cAAM,aACO;AACf,6BAA6B;AAChB,QAAA,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AACtD,iHAAiH;AACpG,QAAA,cAAc,GAAG,UAAU,CAAA;AAC3B,QAAA,QAAQ,GAAG,EAAE,CAAC,SAAS,CAAC,QAAQ,CAAA;AAE7C,SAAsB,MAAM,CAAC,MAAc;;QACzC,IAAI;YACF,MAAM,YAAI,CAAC,MAAM,CAAC,CAAA;SACnB;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACzB,OAAO,KAAK,CAAA;aACb;YAED,MAAM,GAAG,CAAA;SACV;QAED,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAZD,wBAYC;AAED,SAAsB,WAAW,CAC/B,MAAc,EACd,OAAO,GAAG,KAAK;;QAEf,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,MAAM,YAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,aAAK,CAAC,MAAM,CAAC,CAAA;QAChE,OAAO,KAAK,CAAC,WAAW,EAAE,CAAA;IAC5B,CAAC;CAAA;AAND,kCAMC;AAED;;;GAGG;AACH,SAAgB,QAAQ,CAAC,CAAS;IAChC,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAA;IAC1B,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;IAED,IAAI,kBAAU,EAAE;QACd,OAAO,CACL,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,8BAA8B;SACxE,CAAA,CAAC,sBAAsB;KACzB;IAED,OAAO,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AAC1B,CAAC;AAbD,4BAaC;AAED;;;;;GAKG;AACH,SAAsB,oBAAoB,CACxC,QAAgB,EAChB,UAAoB;;QAEpB,IAAI,KAAK,GAAyB,SAAS,CAAA;QAC3C,IAAI;YACF,mBAAmB;YACnB,KAAK,GAAG,MAAM,YAAI,CAAC,QAAQ,CAAC,CAAA;SAC7B;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACzB,sCAAsC;gBACtC,OAAO,CAAC,GAAG,CACT,uEAAuE,QAAQ,MAAM,GAAG,EAAE,CAC3F,CAAA;aACF;SACF;QACD,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;YAC3B,IAAI,kBAAU,EAAE;gBACd,uCAAuC;gBACvC,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAA;gBACrD,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,QAAQ,CAAC,EAAE;oBACpE,OAAO,QAAQ,CAAA;iBAChB;aACF;iBAAM;gBACL,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;oBAC3B,OAAO,QAAQ,CAAA;iBAChB;aACF;SACF;QAED,qBAAqB;QACrB,MAAM,gBAAgB,GAAG,QAAQ,CAAA;QACjC,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE;YAClC,QAAQ,GAAG,gBAAgB,GAAG,SAAS,CAAA;YAEvC,KAAK,GAAG,SAAS,CAAA;YACjB,IAAI;gBACF,KAAK,GAAG,MAAM,YAAI,CAAC,QAAQ,CAAC,CAAA;aAC7B;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;oBACzB,sCAAsC;oBACtC,OAAO,CAAC,GAAG,CACT,uEAAuE,QAAQ,MAAM,GAAG,EAAE,CAC3F,CAAA;iBACF;aACF;YAED,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;gBAC3B,IAAI,kBAAU,EAAE;oBACd,yEAAyE;oBACzE,IAAI;wBACF,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBACxC,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAA;wBACvD,KAAK,MAAM,UAAU,IAAI,MAAM,eAAO,CAAC,SAAS,CAAC,EAAE;4BACjD,IAAI,SAAS,KAAK,UAAU,CAAC,WAAW,EAAE,EAAE;gCAC1C,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,UAAU,CAAC,CAAA;gCAC3C,MAAK;6BACN;yBACF;qBACF;oBAAC,OAAO,GAAG,EAAE;wBACZ,sCAAsC;wBACtC,OAAO,CAAC,GAAG,CACT,yEAAyE,QAAQ,MAAM,GAAG,EAAE,CAC7F,CAAA;qBACF;oBAED,OAAO,QAAQ,CAAA;iBAChB;qBAAM;oBACL,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;wBAC3B,OAAO,QAAQ,CAAA;qBAChB;iBACF;aACF;SACF;QAED,OAAO,EAAE,CAAA;IACX,CAAC;CAAA;AA5ED,oDA4EC;AAED,SAAS,mBAAmB,CAAC,CAAS;IACpC,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;IACX,IAAI,kBAAU,EAAE;QACd,6BAA6B;QAC7B,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;QAE1B,2BAA2B;QAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAA;KACjC;IAED,2BAA2B;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;AACjC,CAAC;AAED,qCAAqC;AACrC,6BAA6B;AAC7B,6BAA6B;AAC7B,SAAS,gBAAgB,CAAC,KAAe;IACvC,OAAO,CACL,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC;QACpB,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,GAAG,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC;QACxD,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,EAAE,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,GAAG,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAC1D,CAAA;AACH,CAAC;AAED,qCAAqC;AACrC,SAAgB,UAAU;;IACxB,aAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,mCAAI,SAAS,CAAA;AAC5C,CAAC;AAFD,gCAEC"} \ No newline at end of file diff --git a/node_modules/@actions/io/lib/io.d.ts b/node_modules/@actions/io/lib/io.d.ts new file mode 100644 index 0000000..a674522 --- /dev/null +++ b/node_modules/@actions/io/lib/io.d.ts @@ -0,0 +1,64 @@ +/** + * Interface for cp/mv options + */ +export interface CopyOptions { + /** Optional. Whether to recursively copy all subdirectories. Defaults to false */ + recursive?: boolean; + /** Optional. Whether to overwrite existing files in the destination. Defaults to true */ + force?: boolean; + /** Optional. Whether to copy the source directory along with all the files. Only takes effect when recursive=true and copying a directory. Default is true*/ + copySourceDirectory?: boolean; +} +/** + * Interface for cp/mv options + */ +export interface MoveOptions { + /** Optional. Whether to overwrite existing files in the destination. Defaults to true */ + force?: boolean; +} +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +export declare function cp(source: string, dest: string, options?: CopyOptions): Promise; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +export declare function mv(source: string, dest: string, options?: MoveOptions): Promise; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +export declare function rmRF(inputPath: string): Promise; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +export declare function mkdirP(fsPath: string): Promise; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +export declare function which(tool: string, check?: boolean): Promise; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +export declare function findInPath(tool: string): Promise; diff --git a/node_modules/@actions/io/lib/io.js b/node_modules/@actions/io/lib/io.js new file mode 100644 index 0000000..15f7d7c --- /dev/null +++ b/node_modules/@actions/io/lib/io.js @@ -0,0 +1,299 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = require("assert"); +const path = __importStar(require("path")); +const ioUtil = __importStar(require("./io-util")); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() && copySourceDirectory + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); + } + } + try { + // note if path does not exist, error is silent + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 + }); + } + catch (err) { + throw new Error(`File was unable to be removed ${err}`); + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + return result; + } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ''; + }); +} +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); +} +exports.findInPath = findInPath; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); +} +//# sourceMappingURL=io.js.map \ No newline at end of file diff --git a/node_modules/@actions/io/lib/io.js.map b/node_modules/@actions/io/lib/io.js.map new file mode 100644 index 0000000..4021d28 --- /dev/null +++ b/node_modules/@actions/io/lib/io.js.map @@ -0,0 +1 @@ +{"version":3,"file":"io.js","sourceRoot":"","sources":["../src/io.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,mCAAyB;AACzB,2CAA4B;AAC5B,kDAAmC;AAsBnC;;;;;;;GAOG;AACH,SAAsB,EAAE,CACtB,MAAc,EACd,IAAY,EACZ,UAAuB,EAAE;;QAEzB,MAAM,EAAC,KAAK,EAAE,SAAS,EAAE,mBAAmB,EAAC,GAAG,eAAe,CAAC,OAAO,CAAC,CAAA;QAExE,MAAM,QAAQ,GAAG,CAAC,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;QAC7E,4CAA4C;QAC5C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,KAAK,EAAE;YAC3C,OAAM;SACP;QAED,wDAAwD;QACxD,MAAM,OAAO,GACX,QAAQ,IAAI,QAAQ,CAAC,WAAW,EAAE,IAAI,mBAAmB;YACvD,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;YACxC,CAAC,CAAC,IAAI,CAAA;QAEV,IAAI,CAAC,CAAC,MAAM,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,8BAA8B,MAAM,EAAE,CAAC,CAAA;SACxD;QACD,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QAE5C,IAAI,UAAU,CAAC,WAAW,EAAE,EAAE;YAC5B,IAAI,CAAC,SAAS,EAAE;gBACd,MAAM,IAAI,KAAK,CACb,mBAAmB,MAAM,4DAA4D,CACtF,CAAA;aACF;iBAAM;gBACL,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC,EAAE,KAAK,CAAC,CAAA;aAChD;SACF;aAAM;YACL,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,EAAE,EAAE;gBACzC,oCAAoC;gBACpC,MAAM,IAAI,KAAK,CAAC,IAAI,OAAO,UAAU,MAAM,qBAAqB,CAAC,CAAA;aAClE;YAED,MAAM,QAAQ,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,CAAC,CAAA;SACvC;IACH,CAAC;CAAA;AAxCD,gBAwCC;AAED;;;;;;GAMG;AACH,SAAsB,EAAE,CACtB,MAAc,EACd,IAAY,EACZ,UAAuB,EAAE;;QAEzB,IAAI,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;YAC7B,IAAI,UAAU,GAAG,IAAI,CAAA;YACrB,IAAI,MAAM,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;gBAClC,0CAA0C;gBAC1C,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAA;gBAC7C,UAAU,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;aACvC;YAED,IAAI,UAAU,EAAE;gBACd,IAAI,OAAO,CAAC,KAAK,IAAI,IAAI,IAAI,OAAO,CAAC,KAAK,EAAE;oBAC1C,MAAM,IAAI,CAAC,IAAI,CAAC,CAAA;iBACjB;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAA;iBAC9C;aACF;SACF;QACD,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAA;QAChC,MAAM,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IACnC,CAAC;CAAA;AAvBD,gBAuBC;AAED;;;;GAIG;AACH,SAAsB,IAAI,CAAC,SAAiB;;QAC1C,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,+BAA+B;YAC/B,sEAAsE;YACtE,IAAI,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;gBAC7B,MAAM,IAAI,KAAK,CACb,iEAAiE,CAClE,CAAA;aACF;SACF;QACD,IAAI;YACF,+CAA+C;YAC/C,MAAM,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE;gBACzB,KAAK,EAAE,IAAI;gBACX,UAAU,EAAE,CAAC;gBACb,SAAS,EAAE,IAAI;gBACf,UAAU,EAAE,GAAG;aAChB,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,iCAAiC,GAAG,EAAE,CAAC,CAAA;SACxD;IACH,CAAC;CAAA;AArBD,oBAqBC;AAED;;;;;;GAMG;AACH,SAAsB,MAAM,CAAC,MAAc;;QACzC,WAAE,CAAC,MAAM,EAAE,kCAAkC,CAAC,CAAA;QAC9C,MAAM,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,EAAC,SAAS,EAAE,IAAI,EAAC,CAAC,CAAA;IAC/C,CAAC;CAAA;AAHD,wBAGC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAC,IAAY,EAAE,KAAe;;QACvD,IAAI,CAAC,IAAI,EAAE;YACT,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAA;SAChD;QAED,4BAA4B;QAC5B,IAAI,KAAK,EAAE;YACT,MAAM,MAAM,GAAW,MAAM,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAE/C,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI,MAAM,CAAC,UAAU,EAAE;oBACrB,MAAM,IAAI,KAAK,CACb,qCAAqC,IAAI,wMAAwM,CAClP,CAAA;iBACF;qBAAM;oBACL,MAAM,IAAI,KAAK,CACb,qCAAqC,IAAI,gMAAgM,CAC1O,CAAA;iBACF;aACF;YAED,OAAO,MAAM,CAAA;SACd;QAED,MAAM,OAAO,GAAa,MAAM,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhD,IAAI,OAAO,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,OAAO,OAAO,CAAC,CAAC,CAAC,CAAA;SAClB;QAED,OAAO,EAAE,CAAA;IACX,CAAC;CAAA;AA/BD,sBA+BC;AAED;;;;GAIG;AACH,SAAsB,UAAU,CAAC,IAAY;;QAC3C,IAAI,CAAC,IAAI,EAAE;YACT,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAA;SAChD;QAED,sCAAsC;QACtC,MAAM,UAAU,GAAa,EAAE,CAAA;QAC/B,IAAI,MAAM,CAAC,UAAU,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE;YAC/C,KAAK,MAAM,SAAS,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI,SAAS,EAAE;oBACb,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;iBAC3B;aACF;SACF;QAED,+DAA+D;QAC/D,IAAI,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YACzB,MAAM,QAAQ,GAAW,MAAM,MAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,UAAU,CAAC,CAAA;YAE5E,IAAI,QAAQ,EAAE;gBACZ,OAAO,CAAC,QAAQ,CAAC,CAAA;aAClB;YAED,OAAO,EAAE,CAAA;SACV;QAED,uCAAuC;QACvC,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;YAC3B,OAAO,EAAE,CAAA;SACV;QAED,gCAAgC;QAChC,EAAE;QACF,iGAAiG;QACjG,+FAA+F;QAC/F,iGAAiG;QACjG,oBAAoB;QACpB,MAAM,WAAW,GAAa,EAAE,CAAA;QAEhC,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE;YACpB,KAAK,MAAM,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;gBACtD,IAAI,CAAC,EAAE;oBACL,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;iBACpB;aACF;SACF;QAED,mBAAmB;QACnB,MAAM,OAAO,GAAa,EAAE,CAAA;QAE5B,KAAK,MAAM,SAAS,IAAI,WAAW,EAAE;YACnC,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,oBAAoB,CAChD,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,EAC1B,UAAU,CACX,CAAA;YACD,IAAI,QAAQ,EAAE;gBACZ,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;aACvB;SACF;QAED,OAAO,OAAO,CAAA;IAChB,CAAC;CAAA;AA7DD,gCA6DC;AAED,SAAS,eAAe,CAAC,OAAoB;IAC3C,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,IAAI,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAA;IAC1D,MAAM,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;IAC5C,MAAM,mBAAmB,GACvB,OAAO,CAAC,mBAAmB,IAAI,IAAI;QACjC,CAAC,CAAC,IAAI;QACN,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,mBAAmB,CAAC,CAAA;IAC1C,OAAO,EAAC,KAAK,EAAE,SAAS,EAAE,mBAAmB,EAAC,CAAA;AAChD,CAAC;AAED,SAAe,cAAc,CAC3B,SAAiB,EACjB,OAAe,EACf,YAAoB,EACpB,KAAc;;QAEd,gDAAgD;QAChD,IAAI,YAAY,IAAI,GAAG;YAAE,OAAM;QAC/B,YAAY,EAAE,CAAA;QAEd,MAAM,MAAM,CAAC,OAAO,CAAC,CAAA;QAErB,MAAM,KAAK,GAAa,MAAM,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;QAEvD,KAAK,MAAM,QAAQ,IAAI,KAAK,EAAE;YAC5B,MAAM,OAAO,GAAG,GAAG,SAAS,IAAI,QAAQ,EAAE,CAAA;YAC1C,MAAM,QAAQ,GAAG,GAAG,OAAO,IAAI,QAAQ,EAAE,CAAA;YACzC,MAAM,WAAW,GAAG,MAAM,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YAE/C,IAAI,WAAW,CAAC,WAAW,EAAE,EAAE;gBAC7B,UAAU;gBACV,MAAM,cAAc,CAAC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,KAAK,CAAC,CAAA;aAC7D;iBAAM;gBACL,MAAM,QAAQ,CAAC,OAAO,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;aACzC;SACF;QAED,kDAAkD;QAClD,MAAM,MAAM,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,MAAM,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAA;IAClE,CAAC;CAAA;AAED,qBAAqB;AACrB,SAAe,QAAQ,CACrB,OAAe,EACf,QAAgB,EAChB,KAAc;;QAEd,IAAI,CAAC,MAAM,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,cAAc,EAAE,EAAE;YAClD,oBAAoB;YACpB,IAAI;gBACF,MAAM,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;gBAC5B,MAAM,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;aAC9B;YAAC,OAAO,CAAC,EAAE;gBACV,kCAAkC;gBAClC,IAAI,CAAC,CAAC,IAAI,KAAK,OAAO,EAAE;oBACtB,MAAM,MAAM,CAAC,KAAK,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAA;oBACpC,MAAM,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;iBAC9B;gBACD,iDAAiD;aAClD;YAED,oBAAoB;YACpB,MAAM,WAAW,GAAW,MAAM,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA;YAC1D,MAAM,MAAM,CAAC,OAAO,CAClB,WAAW,EACX,QAAQ,EACR,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CACtC,CAAA;SACF;aAAM,IAAI,CAAC,CAAC,MAAM,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,IAAI,KAAK,EAAE;YACpD,MAAM,MAAM,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAA;SACzC;IACH,CAAC;CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/io/package.json b/node_modules/@actions/io/package.json new file mode 100644 index 0000000..e8c8d8f --- /dev/null +++ b/node_modules/@actions/io/package.json @@ -0,0 +1,37 @@ +{ + "name": "@actions/io", + "version": "1.1.3", + "description": "Actions io lib", + "keywords": [ + "github", + "actions", + "io" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/io", + "license": "MIT", + "main": "lib/io.js", + "types": "lib/io.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/io" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + } +} diff --git a/node_modules/@fastify/busboy/LICENSE b/node_modules/@fastify/busboy/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/node_modules/@fastify/busboy/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@fastify/busboy/README.md b/node_modules/@fastify/busboy/README.md new file mode 100644 index 0000000..ece3cc8 --- /dev/null +++ b/node_modules/@fastify/busboy/README.md @@ -0,0 +1,271 @@ +# busboy + +
+ +[![Build Status](https://github.com/fastify/busboy/actions/workflows/ci.yml/badge.svg)](https://github.com/fastify/busboy/actions) +[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) +[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/fastify/.github/blob/main/SECURITY.md) + +
+ +
+ +[![NPM version](https://img.shields.io/npm/v/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) +[![NPM downloads](https://img.shields.io/npm/dm/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) + +
+ +Description +=========== + +A Node.js module for parsing incoming HTML form data. + +This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White, +aimed at addressing long-standing issues with it. + +Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup): + +| Library | Version | Mean time in nanoseconds (less is better) | +|-----------------------|---------|-------------------------------------------| +| busboy | 0.3.1 | `340114` | +| @fastify/busboy | 1.0.0 | `270984` | + +[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31. + +Requirements +============ + +* [Node.js](http://nodejs.org/) 10+ + + +Install +======= + + npm i @fastify/busboy + + +Examples +======== + +* Parsing (multipart) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); +const Busboy = require('busboy'); + +http.createServer((req, res) => { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', (fieldname, file, filename, encoding, mimetype) => { + console.log(`File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`); + file.on('data', data => { + console.log(`File [${fieldname}] got ${data.length} bytes`); + }); + file.on('end', () => { + console.log(`File [${fieldname}] Finished`); + }); + }); + busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + console.log(`Field [${fieldname}]: value: ${inspect(val)}`); + }); + busboy.on('finish', () => { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end(` +
+
+
+ +
+ `); + } +}).listen(8000, () => { + console.log('Listening for requests'); +}); + +// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file: +// +// Listening for requests +// File [filefield]: filename: ryan-speaker.jpg, encoding: binary +// File [filefield] got 11971 bytes +// Field [textfield]: value: 'testing! :-)' +// File [filefield] Finished +// Done parsing form! +``` + +* Save all incoming files to disk: + +```javascript +const http = require('node:http'); +const path = require('node:path'); +const os = require('node:os'); +const fs = require('node:fs'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + var saveTo = path.join(os.tmpdir(), path.basename(fieldname)); + file.pipe(fs.createWriteStream(saveTo)); + }); + busboy.on('finish', function() { + res.writeHead(200, { 'Connection': 'close' }); + res.end("That's all folks!"); + }); + return req.pipe(busboy); + } + res.writeHead(404); + res.end(); +}).listen(8000, function() { + console.log('Listening for requests'); +}); +``` + +* Parsing (urlencoded) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + console.log('File [' + fieldname + ']: filename: ' + filename); + file.on('data', function(data) { + console.log('File [' + fieldname + '] got ' + data.length + ' bytes'); + }); + file.on('end', function() { + console.log('File [' + fieldname + '] Finished'); + }); + }); + busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) { + console.log('Field [' + fieldname + ']: value: ' + inspect(val)); + }); + busboy.on('finish', function() { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end('\ +
\ +
\ +
\ + Node.js rules!
\ + \ +
\ + '); + } +}).listen(8000, function() { + console.log('Listening for requests'); +}); + +// Example output: +// +// Listening for requests +// Field [textfield]: value: 'testing! :-)' +// Field [selectfield]: value: '9001' +// Field [checkfield]: value: 'on' +// Done parsing form! +``` + + +API +=== + +_Busboy_ is a _Writable_ stream + +Busboy (special) events +----------------------- + +* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream. + * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * The property `bytesRead` informs about the number of bytes that have been read so far. + +* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found. + +* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + +* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + +* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + + +Busboy methods +-------------- + +* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance. + + * The constructor takes the following valid `config` settings: + + * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers. + + * **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false). + + * **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default). + + * **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default). + + * **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8'). + + * **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false). + + * **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters: + + * fieldName - __string__ The name of the field. + + * contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream` + + * fileName - __string__ The name of a file supplied by the part. + + (Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`) + + * **limits** - _object_ - Various limits on incoming data. Valid properties are: + + * **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes). + + * **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes). + + * **fields** - _integer_ - Max number of non-file fields (Default: Infinity). + + * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity). + + * **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity). + + * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity). + + * **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000 + + * **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920. + + * The constructor can throw errors: + + * **Busboy expected an options-Object.** - Busboy expected an Object as first parameters. + + * **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute. + + * **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number. + + * **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse. + + * **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all. diff --git a/node_modules/@fastify/busboy/deps/dicer/LICENSE b/node_modules/@fastify/busboy/deps/dicer/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js b/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js new file mode 100644 index 0000000..3c8c081 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js @@ -0,0 +1,213 @@ +'use strict' + +const WritableStream = require('node:stream').Writable +const inherits = require('node:util').inherits + +const StreamSearch = require('../../streamsearch/sbmh') + +const PartStream = require('./PartStream') +const HeaderParser = require('./HeaderParser') + +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} + +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) + + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } + + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return + } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + } + + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } +} + +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } + } + if (this._dashes === 2) { + if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } + } + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() + } + if (this._isPreamble && this.listenerCount('preamble') !== 0) { + this.emit('preamble', this._part) + } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) { + this.emit('part', this._part) + } else { + this._ignore() + } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + } + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() + } + } + }) + } + } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } +} + +Dicer.prototype._unpause = function () { + if (!this._pause) { return } + + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} + +module.exports = Dicer diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js b/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js new file mode 100644 index 0000000..65f667b --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js @@ -0,0 +1,100 @@ +'use strict' + +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits +const getLimit = require('../../../lib/utils/getLimit') + +const StreamSearch = require('../../streamsearch/sbmh') + +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex + +function HeaderParser (cfg) { + EventEmitter.call(this) + + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } + + self.buffer += data.toString('binary', start, end) + } + if (isMatch) { self._finish() } + }) +} +inherits(HeaderParser, EventEmitter) + +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} + +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} + +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} + +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } + + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } + } + + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return + } + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} + +module.exports = HeaderParser diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js b/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js new file mode 100644 index 0000000..c91da1c --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js @@ -0,0 +1,13 @@ +'use strict' + +const inherits = require('node:util').inherits +const ReadableStream = require('node:stream').Readable + +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) + +PartStream.prototype._read = function (n) {} + +module.exports = PartStream diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts b/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts new file mode 100644 index 0000000..3c5b896 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts @@ -0,0 +1,164 @@ +// Type definitions for dicer 0.2 +// Project: https://github.com/mscdex/dicer +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped +// TypeScript Version: 2.2 +/// + +import stream = require("stream"); + +// tslint:disable:unified-signatures + +/** + * A very fast streaming multipart parser for node.js. + * Dicer is a WritableStream + * + * Dicer (special) events: + * - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended. + * - on('part', (stream: PartStream)) - Emitted when a new part has been found. + * - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored). + * - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too). + */ +export class Dicer extends stream.Writable { + /** + * Creates and returns a new Dicer instance with the following valid config settings: + * + * @param config The configuration to use + */ + constructor(config: Dicer.Config); + /** + * Sets the boundary to use for parsing and performs some initialization needed for parsing. + * You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header. + * + * @param boundary The boundary to use + */ + setBoundary(boundary: string): void; + addListener(event: "finish", listener: () => void): this; + addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + addListener(event: "trailer", listener: (data: Buffer) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "part", listener: (stream: Dicer.PartStream) => void): this; + on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + on(event: "trailer", listener: (data: Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "part", listener: (stream: Dicer.PartStream) => void): this; + once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + once(event: "trailer", listener: (data: Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + prependListener(event: "trailer", listener: (data: Buffer) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + removeListener(event: "trailer", listener: (data: Buffer) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "pipe", listener: (src: stream.Readable) => void): this; + removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; +} + +declare namespace Dicer { + interface Config { + /** + * This is the boundary used to detect the beginning of a new part. + */ + boundary?: string | undefined; + /** + * If true, preamble header parsing will be performed first. + */ + headerFirst?: boolean | undefined; + /** + * The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http). + */ + maxHeaderPairs?: number | undefined; + } + + /** + * PartStream is a _ReadableStream_ + * + * PartStream (special) events: + * - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values. + */ + interface PartStream extends stream.Readable { + addListener(event: "header", listener: (header: object) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: "header", listener: (header: object) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "header", listener: (header: object) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "header", listener: (header: object) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "header", listener: (header: object) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + removeListener(event: "header", listener: (header: object) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: Buffer | string) => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; + } +} \ No newline at end of file diff --git a/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js b/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js new file mode 100644 index 0000000..b90c0e8 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js @@ -0,0 +1,228 @@ +'use strict' + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH diff --git a/node_modules/@fastify/busboy/lib/main.d.ts b/node_modules/@fastify/busboy/lib/main.d.ts new file mode 100644 index 0000000..91b6448 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/main.d.ts @@ -0,0 +1,196 @@ +// Definitions by: Jacob Baskin +// BendingBender +// Igor Savin + +/// + +import * as http from 'http'; +import { Readable, Writable } from 'stream'; +export { Dicer } from "../deps/dicer/lib/dicer"; + +export const Busboy: BusboyConstructor; +export default Busboy; + +export interface BusboyConfig { + /** + * These are the HTTP headers of the incoming request, which are used by individual parsers. + */ + headers: BusboyHeaders; + /** + * `highWaterMark` to use for this Busboy instance. + * @default WritableStream default. + */ + highWaterMark?: number | undefined; + /** + * highWaterMark to use for file streams. + * @default ReadableStream default. + */ + fileHwm?: number | undefined; + /** + * Default character set to use when one isn't defined. + * @default 'utf8' + */ + defCharset?: string | undefined; + /** + * Detect if a Part is a file. + * + * By default a file is detected if contentType + * is application/octet-stream or fileName is not + * undefined. + * + * Modify this to handle e.g. Blobs. + */ + isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean; + /** + * If paths in the multipart 'filename' field shall be preserved. + * @default false + */ + preservePath?: boolean | undefined; + /** + * Various limits on incoming data. + */ + limits?: + | { + /** + * Max field name size (in bytes) + * @default 100 bytes + */ + fieldNameSize?: number | undefined; + /** + * Max field value size (in bytes) + * @default 1MB + */ + fieldSize?: number | undefined; + /** + * Max number of non-file fields + * @default Infinity + */ + fields?: number | undefined; + /** + * For multipart forms, the max file size (in bytes) + * @default Infinity + */ + fileSize?: number | undefined; + /** + * For multipart forms, the max number of file fields + * @default Infinity + */ + files?: number | undefined; + /** + * For multipart forms, the max number of parts (fields + files) + * @default Infinity + */ + parts?: number | undefined; + /** + * For multipart forms, the max number of header key=>value pairs to parse + * @default 2000 + */ + headerPairs?: number | undefined; + + /** + * For multipart forms, the max size of a header part + * @default 81920 + */ + headerSize?: number | undefined; + } + | undefined; +} + +export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders; + +export interface BusboyFileStream extends + Readable { + + truncated: boolean; + + /** + * The number of bytes that have been read so far. + */ + bytesRead: number; +} + +export interface Busboy extends Writable { + addListener(event: Event, listener: BusboyEvents[Event]): this; + + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + + on(event: Event, listener: BusboyEvents[Event]): this; + + on(event: string | symbol, listener: (...args: any[]) => void): this; + + once(event: Event, listener: BusboyEvents[Event]): this; + + once(event: string | symbol, listener: (...args: any[]) => void): this; + + removeListener(event: Event, listener: BusboyEvents[Event]): this; + + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + + off(event: Event, listener: BusboyEvents[Event]): this; + + off(event: string | symbol, listener: (...args: any[]) => void): this; + + prependListener(event: Event, listener: BusboyEvents[Event]): this; + + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + + prependOnceListener(event: Event, listener: BusboyEvents[Event]): this; + + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; +} + +export interface BusboyEvents { + /** + * Emitted for each new file form field found. + * + * * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the + * file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), + * otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** + * incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically + * and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` + * (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * + * @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream. + * @param listener.mimeType Contains the 'Content-Type' value for the file stream. + */ + file: ( + fieldname: string, + stream: BusboyFileStream, + filename: string, + transferEncoding: string, + mimeType: string, + ) => void; + /** + * Emitted for each new non-file field found. + */ + field: ( + fieldname: string, + value: string, + fieldnameTruncated: boolean, + valueTruncated: boolean, + transferEncoding: string, + mimeType: string, + ) => void; + finish: () => void; + /** + * Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + */ + partsLimit: () => void; + /** + * Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + */ + filesLimit: () => void; + /** + * Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + */ + fieldsLimit: () => void; + error: (error: unknown) => void; +} + +export interface BusboyConstructor { + (options: BusboyConfig): Busboy; + + new(options: BusboyConfig): Busboy; +} + diff --git a/node_modules/@fastify/busboy/lib/main.js b/node_modules/@fastify/busboy/lib/main.js new file mode 100644 index 0000000..8794beb --- /dev/null +++ b/node_modules/@fastify/busboy/lib/main.js @@ -0,0 +1,85 @@ +'use strict' + +const WritableStream = require('node:stream').Writable +const { inherits } = require('node:util') +const Dicer = require('../deps/dicer/lib/Dicer') + +const MultipartParser = require('./types/multipart') +const UrlencodedParser = require('./types/urlencoded') +const parseParams = require('./utils/parseParams') + +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } + + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } + + const { + headers, + ...streamOptions + } = opts + + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) + + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) + +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return + } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} + +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) + + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } + + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') +} + +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} + +module.exports = Busboy +module.exports.default = Busboy +module.exports.Busboy = Busboy + +module.exports.Dicer = Dicer diff --git a/node_modules/@fastify/busboy/lib/types/multipart.js b/node_modules/@fastify/busboy/lib/types/multipart.js new file mode 100644 index 0000000..d691eca --- /dev/null +++ b/node_modules/@fastify/busboy/lib/types/multipart.js @@ -0,0 +1,306 @@ +'use strict' + +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams + +const { Readable } = require('node:stream') +const { inherits } = require('node:util') + +const Dicer = require('../../deps/dicer/lib/Dicer') + +const parseParams = require('../utils/parseParams') +const decodeText = require('../utils/decodeText') +const basename = require('../utils/basename') +const getLimit = require('../utils/getLimit') + +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i + +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } + + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break + } + } + + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() + } + } + + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } + + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) + + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false + + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy + + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } + + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() + } + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) + } + + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') + } + + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 + + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break + } + } + } + } + + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } + + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } + } + } else { return skipPart(part) } + + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } + + let onData, + onEnd + + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) + } + + ++nfiles + + if (boy.listenerCount('file') === 0) { + self.parser._ignore() + return + } + + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + } + boy.emit('file', fieldname, file, filename, encoding, contype) + + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } + + file.bytesRead = nsize + } + + onEnd = function () { + curFile = undefined + file.push(null) + } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) + } + + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part + + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } + } + + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() + } + } + + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false + + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} + +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} + +Multipart.prototype.end = function () { + const self = this + + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} + +function skipPart (part) { + part.resume() +} + +function FileStream (opts) { + Readable.call(this, opts) + + this.bytesRead = 0 + + this.truncated = false +} + +inherits(FileStream, Readable) + +FileStream.prototype._read = function (n) {} + +module.exports = Multipart diff --git a/node_modules/@fastify/busboy/lib/types/urlencoded.js b/node_modules/@fastify/busboy/lib/types/urlencoded.js new file mode 100644 index 0000000..6f5f784 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/types/urlencoded.js @@ -0,0 +1,190 @@ +'use strict' + +const Decoder = require('../utils/Decoder') +const decodeText = require('../utils/decodeText') +const getLimit = require('../utils/getLimit') + +const RE_CHARSET = /^charset$/i + +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy + + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) + + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break + } + } + + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } + + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') + } + return cb() + } + + let idxeq; let idxamp; let i; let p = 0; const len = data.length + + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } + + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' + + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() + + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) + } + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true + } + } else { + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len + } + } else { + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true + } + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len + } + } + } + cb() +} + +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } + + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + } + this.boy._done = true + this.boy.emit('finish') +} + +module.exports = UrlEncoded diff --git a/node_modules/@fastify/busboy/lib/utils/Decoder.js b/node_modules/@fastify/busboy/lib/utils/Decoder.js new file mode 100644 index 0000000..7917678 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/Decoder.js @@ -0,0 +1,54 @@ +'use strict' + +const RE_PLUS = /\+/g + +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] + +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined + } + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p + } + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} + +module.exports = Decoder diff --git a/node_modules/@fastify/busboy/lib/utils/basename.js b/node_modules/@fastify/busboy/lib/utils/basename.js new file mode 100644 index 0000000..db58819 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/basename.js @@ -0,0 +1,14 @@ +'use strict' + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) + } + } + return (path === '..' || path === '.' ? '' : path) +} diff --git a/node_modules/@fastify/busboy/lib/utils/decodeText.js b/node_modules/@fastify/busboy/lib/utils/decodeText.js new file mode 100644 index 0000000..eac7d35 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/decodeText.js @@ -0,0 +1,114 @@ +'use strict' + +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) + +function getDecoder (charset) { + let lc + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8 + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1 + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le + case 'base64': + return decoders.base64 + default: + if (lc === undefined) { + lc = true + charset = charset.toLowerCase() + continue + } + return decoders.other.bind(charset) + } + } +} + +const decoders = { + utf8: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.utf8Slice(0, data.length) + }, + + latin1: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + return data + } + return data.latin1Slice(0, data.length) + }, + + utf16le: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.ucs2Slice(0, data.length) + }, + + base64: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.base64Slice(0, data.length) + }, + + other: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + + if (textDecoders.has(this.toString())) { + try { + return textDecoders.get(this).decode(data) + } catch {} + } + return typeof data === 'string' + ? data + : data.toString() + } +} + +function decodeText (text, sourceEncoding, destEncoding) { + if (text) { + return getDecoder(destEncoding)(text, sourceEncoding) + } + return text +} + +module.exports = decodeText diff --git a/node_modules/@fastify/busboy/lib/utils/getLimit.js b/node_modules/@fastify/busboy/lib/utils/getLimit.js new file mode 100644 index 0000000..cb64fd6 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/getLimit.js @@ -0,0 +1,16 @@ +'use strict' + +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } + + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + + return limits[name] +} diff --git a/node_modules/@fastify/busboy/lib/utils/parseParams.js b/node_modules/@fastify/busboy/lib/utils/parseParams.js new file mode 100644 index 0000000..1698e62 --- /dev/null +++ b/node_modules/@fastify/busboy/lib/utils/parseParams.js @@ -0,0 +1,196 @@ +/* eslint-disable object-property-newline */ +'use strict' + +const decodeText = require('./decodeText') + +const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g + +const EncodedLookup = { + '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', + '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', + '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', + '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', + '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', + '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', + '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', + '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', + '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', + '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', + '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', + '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', + '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', + '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', + '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', + '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', + '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', + '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', + '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', + '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', + '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', + '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', + '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', + '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', + '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', + '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', + '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', + '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', + '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', + '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', + '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', + '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', + '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', + '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', + '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', + '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', + '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', + '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', + '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', + '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', + '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', + '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', + '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', + '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', + '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', + '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', + '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', + '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', + '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', + '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', + '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', + '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', + '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', + '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', + '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', + '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', + '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', + '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', + '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', + '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', + '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', + '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', + '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', + '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', + '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', + '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', + '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', + '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', + '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', + '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', + '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', + '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', + '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', + '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', + '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', + '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', + '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', + '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', + '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', + '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', + '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', + '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', + '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', + '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', + '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', + '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', + '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', + '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', + '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', + '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', + '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', + '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', + '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', + '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', + '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', + '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', + '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' +} + +function encodedReplacer (match) { + return EncodedLookup[match] +} + +const STATE_KEY = 0 +const STATE_VALUE = 1 +const STATE_CHARSET = 2 +const STATE_LANG = 3 + +function parseParams (str) { + const res = [] + let state = STATE_KEY + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + const len = str.length + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue + } + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = STATE_KEY + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { + if (state === STATE_CHARSET) { + state = STATE_LANG + charset = tmp.substring(1) + } else { state = STATE_VALUE } + tmp = '' + continue + } else if (state === STATE_KEY && + (char === '*' || char === '=') && + res.length) { + state = char === '*' + ? STATE_CHARSET + : STATE_VALUE + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = STATE_KEY + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } + } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } + + return res +} + +module.exports = parseParams diff --git a/node_modules/@fastify/busboy/package.json b/node_modules/@fastify/busboy/package.json new file mode 100644 index 0000000..83693ac --- /dev/null +++ b/node_modules/@fastify/busboy/package.json @@ -0,0 +1,86 @@ +{ + "name": "@fastify/busboy", + "version": "2.1.1", + "private": false, + "author": "Brian White ", + "contributors": [ + { + "name": "Igor Savin", + "email": "kibertoad@gmail.com", + "url": "https://github.com/kibertoad" + }, + { + "name": "Aras Abbasi", + "email": "aras.abbasi@gmail.com", + "url": "https://github.com/uzlopak" + } + ], + "description": "A streaming parser for HTML form data for node.js", + "main": "lib/main", + "type": "commonjs", + "types": "lib/main.d.ts", + "scripts": { + "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify", + "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js", + "coveralls": "nyc report --reporter=lcov", + "lint": "npm run lint:standard", + "lint:everything": "npm run lint && npm run test:types", + "lint:fix": "standard --fix", + "lint:standard": "standard --verbose | snazzy", + "test:mocha": "tap", + "test:types": "tsd", + "test:coverage": "nyc npm run test", + "test": "npm run test:mocha" + }, + "engines": { + "node": ">=14" + }, + "devDependencies": { + "@types/node": "^20.1.0", + "busboy": "^1.0.0", + "photofinish": "^1.8.0", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "tap": "^16.3.8", + "tinybench": "^2.5.1", + "tsd": "^0.30.0", + "typescript": "^5.0.2" + }, + "keywords": [ + "uploads", + "forms", + "multipart", + "form-data" + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/fastify/busboy.git" + }, + "tsd": { + "directory": "test/types", + "compilerOptions": { + "esModuleInterop": false, + "module": "commonjs", + "target": "ES2017" + } + }, + "standard": { + "globals": [ + "describe", + "it" + ], + "ignore": [ + "bench" + ] + }, + "files": [ + "README.md", + "LICENSE", + "lib/*", + "deps/encoding/*", + "deps/dicer/lib", + "deps/streamsearch/", + "deps/dicer/LICENSE" + ] +} diff --git a/node_modules/@octokit/auth-token/LICENSE b/node_modules/@octokit/auth-token/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/node_modules/@octokit/auth-token/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@octokit/auth-token/README.md b/node_modules/@octokit/auth-token/README.md new file mode 100644 index 0000000..a1f6d35 --- /dev/null +++ b/node_modules/@octokit/auth-token/README.md @@ -0,0 +1,290 @@ +# auth-token.js + +> GitHub API token authentication for browsers and Node.js + +[![@latest](https://img.shields.io/npm/v/@octokit/auth-token.svg)](https://www.npmjs.com/package/@octokit/auth-token) +[![Build Status](https://github.com/octokit/auth-token.js/workflows/Test/badge.svg)](https://github.com/octokit/auth-token.js/actions?query=workflow%3ATest) + +`@octokit/auth-token` is the simplest of [GitHub’s authentication strategies](https://github.com/octokit/auth.js). + +It is useful if you want to support multiple authentication strategies, as it’s API is compatible with its sibling packages for [basic](https://github.com/octokit/auth-basic.js), [GitHub App](https://github.com/octokit/auth-app.js) and [OAuth app](https://github.com/octokit/auth.js) authentication. + + + +- [Usage](#usage) +- [`createTokenAuth(token) options`](#createtokenauthtoken-options) +- [`auth()`](#auth) +- [Authentication object](#authentication-object) +- [`auth.hook(request, route, options)` or `auth.hook(request, options)`](#authhookrequest-route-options-or-authhookrequest-options) +- [Find more information](#find-more-information) + - [Find out what scopes are enabled for oauth tokens](#find-out-what-scopes-are-enabled-for-oauth-tokens) + - [Find out if token is a personal access token or if it belongs to an OAuth app](#find-out-if-token-is-a-personal-access-token-or-if-it-belongs-to-an-oauth-app) + - [Find out what permissions are enabled for a repository](#find-out-what-permissions-are-enabled-for-a-repository) + - [Use token for git operations](#use-token-for-git-operations) +- [License](#license) + + + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/auth-token` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with npm install @octokit/auth-token + +```js +const { createTokenAuth } = require("@octokit/auth-token"); +// or: import { createTokenAuth } from "@octokit/auth-token"; +``` + +
+ +```js +const auth = createTokenAuth("ghp_PersonalAccessToken01245678900000000"); +const authentication = await auth(); +// { +// type: 'token', +// token: 'ghp_PersonalAccessToken01245678900000000', +// tokenType: 'oauth' +// } +``` + +## `createTokenAuth(token) options` + +The `createTokenAuth` method accepts a single argument of type string, which is the token. The passed token can be one of the following: + +- [Personal access token](https://help.github.com/en/articles/creating-a-personal-access-token-for-the-command-line) +- [OAuth access token](https://developer.github.com/apps/building-oauth-apps/authorizing-oauth-apps/) +- [GITHUB_TOKEN provided to GitHub Actions](https://developer.github.com/actions/creating-github-actions/accessing-the-runtime-environment/#environment-variables) +- Installation access token ([server-to-server](https://developer.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation)) +- User authentication for installation ([user-to-server](https://docs.github.com/en/developers/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps)) + +Examples + +```js +// Personal access token or OAuth access token +createTokenAuth("ghp_PersonalAccessToken01245678900000000"); +// { +// type: 'token', +// token: 'ghp_PersonalAccessToken01245678900000000', +// tokenType: 'oauth' +// } + +// Installation access token or GitHub Action token +createTokenAuth("ghs_InstallallationOrActionToken00000000"); +// { +// type: 'token', +// token: 'ghs_InstallallationOrActionToken00000000', +// tokenType: 'installation' +// } + +// Installation access token or GitHub Action token +createTokenAuth("ghu_InstallationUserToServer000000000000"); +// { +// type: 'token', +// token: 'ghu_InstallationUserToServer000000000000', +// tokenType: 'user-to-server' +// } +``` + +## `auth()` + +The `auth()` method has no options. It returns a promise which resolves with the the authentication object. + +## Authentication object + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ token + + string + + The provided token. +
+ tokenType + + string + + Can be either "oauth" for personal access tokens and OAuth tokens, "installation" for installation access tokens (includes GITHUB_TOKEN provided to GitHub Actions), "app" for a GitHub App JSON Web Token, or "user-to-server" for a user authentication token through an app installation. +
+ +## `auth.hook(request, route, options)` or `auth.hook(request, options)` + +`auth.hook()` hooks directly into the request life cycle. It authenticates the request using the provided token. + +The `request` option is an instance of [`@octokit/request`](https://github.com/octokit/request.js#readme). The `route`/`options` parameters are the same as for the [`request()` method](https://github.com/octokit/request.js#request). + +`auth.hook()` can be called directly to send an authenticated request + +```js +const { data: authorizations } = await auth.hook( + request, + "GET /authorizations" +); +``` + +Or it can be passed as option to [`request()`](https://github.com/octokit/request.js#request). + +```js +const requestWithAuth = request.defaults({ + request: { + hook: auth.hook, + }, +}); + +const { data: authorizations } = await requestWithAuth("GET /authorizations"); +``` + +## Find more information + +`auth()` does not send any requests, it only transforms the provided token string into an authentication object. + +Here is a list of things you can do to retrieve further information + +### Find out what scopes are enabled for oauth tokens + +Note that this does not work for installations. There is no way to retrieve permissions based on an installation access tokens. + +```js +const TOKEN = "ghp_PersonalAccessToken01245678900000000"; + +const auth = createTokenAuth(TOKEN); +const authentication = await auth(); + +const response = await request("HEAD /", { + headers: authentication.headers, +}); +const scopes = response.headers["x-oauth-scopes"].split(/,\s+/); + +if (scopes.length) { + console.log( + `"${TOKEN}" has ${scopes.length} scopes enabled: ${scopes.join(", ")}` + ); +} else { + console.log(`"${TOKEN}" has no scopes enabled`); +} +``` + +### Find out if token is a personal access token or if it belongs to an OAuth app + +```js +const TOKEN = "ghp_PersonalAccessToken01245678900000000"; + +const auth = createTokenAuth(TOKEN); +const authentication = await auth(); + +const response = await request("HEAD /", { + headers: authentication.headers, +}); +const clientId = response.headers["x-oauth-client-id"]; + +if (clientId) { + console.log( + `"${token}" is an OAuth token, its app’s client_id is ${clientId}.` + ); +} else { + console.log(`"${token}" is a personal access token`); +} +``` + +### Find out what permissions are enabled for a repository + +Note that the `permissions` key is not set when authenticated using an installation access token. + +```js +const TOKEN = "ghp_PersonalAccessToken01245678900000000"; + +const auth = createTokenAuth(TOKEN); +const authentication = await auth(); + +const response = await request("GET /repos/{owner}/{repo}", { + owner: 'octocat', + repo: 'hello-world' + headers: authentication.headers +}); + +console.log(response.data.permissions) +// { +// admin: true, +// push: true, +// pull: true +// } +``` + +### Use token for git operations + +Both OAuth and installation access tokens can be used for git operations. However, when using with an installation, [the token must be prefixed with `x-access-token`](https://developer.github.com/apps/building-github-apps/authenticating-with-github-apps/#http-based-git-access-by-an-installation). + +This example is using the [`execa`](https://github.com/sindresorhus/execa) package to run a `git push` command. + +```js +const TOKEN = "ghp_PersonalAccessToken01245678900000000"; + +const auth = createTokenAuth(TOKEN); +const { token, tokenType } = await auth(); +const tokenWithPrefix = + tokenType === "installation" ? `x-access-token:${token}` : token; + +const repositoryUrl = `https://${tokenWithPrefix}@github.com/octocat/hello-world.git`; + +const { stdout } = await execa("git", ["push", repositoryUrl]); +console.log(stdout); +``` + +## License + +[MIT](LICENSE) diff --git a/node_modules/@octokit/auth-token/dist-node/index.js b/node_modules/@octokit/auth-token/dist-node/index.js new file mode 100644 index 0000000..af0f0a6 --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-node/index.js @@ -0,0 +1,55 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +const REGEX_IS_INSTALLATION = /^ghs_/; +const REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; +} + +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + + return `token ${token}`; +} + +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } + + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; + +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/auth-token/dist-node/index.js.map b/node_modules/@octokit/auth-token/dist-node/index.js.map new file mode 100644 index 0000000..af0c2e2 --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/auth.js","../dist-src/with-authorization-prefix.js","../dist-src/hook.js","../dist-src/index.js"],"sourcesContent":["const REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nexport async function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) ||\n REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp\n ? \"app\"\n : isInstallation\n ? \"installation\"\n : isUserToServer\n ? \"user-to-server\"\n : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType,\n };\n}\n","/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nexport function withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\n","import { withAuthorizationPrefix } from \"./with-authorization-prefix\";\nexport async function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n","import { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nexport const createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token),\n });\n};\n"],"names":["REGEX_IS_INSTALLATION_LEGACY","REGEX_IS_INSTALLATION","REGEX_IS_USER_TO_SERVER","auth","token","isApp","split","length","isInstallation","test","isUserToServer","tokenType","type","withAuthorizationPrefix","hook","request","route","parameters","endpoint","merge","headers","authorization","createTokenAuth","Error","replace","Object","assign","bind"],"mappings":";;;;AAAA,MAAMA,4BAA4B,GAAG,OAArC;AACA,MAAMC,qBAAqB,GAAG,OAA9B;AACA,MAAMC,uBAAuB,GAAG,OAAhC;AACO,eAAeC,IAAf,CAAoBC,KAApB,EAA2B;AAC9B,QAAMC,KAAK,GAAGD,KAAK,CAACE,KAAN,CAAY,IAAZ,EAAkBC,MAAlB,KAA6B,CAA3C;AACA,QAAMC,cAAc,GAAGR,4BAA4B,CAACS,IAA7B,CAAkCL,KAAlC,KACnBH,qBAAqB,CAACQ,IAAtB,CAA2BL,KAA3B,CADJ;AAEA,QAAMM,cAAc,GAAGR,uBAAuB,CAACO,IAAxB,CAA6BL,KAA7B,CAAvB;AACA,QAAMO,SAAS,GAAGN,KAAK,GACjB,KADiB,GAEjBG,cAAc,GACV,cADU,GAEVE,cAAc,GACV,gBADU,GAEV,OANd;AAOA,SAAO;AACHE,IAAAA,IAAI,EAAE,OADH;AAEHR,IAAAA,KAAK,EAAEA,KAFJ;AAGHO,IAAAA;AAHG,GAAP;AAKH;;ACpBD;AACA;AACA;AACA;AACA;AACA,AAAO,SAASE,uBAAT,CAAiCT,KAAjC,EAAwC;AAC3C,MAAIA,KAAK,CAACE,KAAN,CAAY,IAAZ,EAAkBC,MAAlB,KAA6B,CAAjC,EAAoC;AAChC,WAAQ,UAASH,KAAM,EAAvB;AACH;;AACD,SAAQ,SAAQA,KAAM,EAAtB;AACH;;ACTM,eAAeU,IAAf,CAAoBV,KAApB,EAA2BW,OAA3B,EAAoCC,KAApC,EAA2CC,UAA3C,EAAuD;AAC1D,QAAMC,QAAQ,GAAGH,OAAO,CAACG,QAAR,CAAiBC,KAAjB,CAAuBH,KAAvB,EAA8BC,UAA9B,CAAjB;AACAC,EAAAA,QAAQ,CAACE,OAAT,CAAiBC,aAAjB,GAAiCR,uBAAuB,CAACT,KAAD,CAAxD;AACA,SAAOW,OAAO,CAACG,QAAD,CAAd;AACH;;MCHYI,eAAe,GAAG,SAASA,eAAT,CAAyBlB,KAAzB,EAAgC;AAC3D,MAAI,CAACA,KAAL,EAAY;AACR,UAAM,IAAImB,KAAJ,CAAU,0DAAV,CAAN;AACH;;AACD,MAAI,OAAOnB,KAAP,KAAiB,QAArB,EAA+B;AAC3B,UAAM,IAAImB,KAAJ,CAAU,uEAAV,CAAN;AACH;;AACDnB,EAAAA,KAAK,GAAGA,KAAK,CAACoB,OAAN,CAAc,oBAAd,EAAoC,EAApC,CAAR;AACA,SAAOC,MAAM,CAACC,MAAP,CAAcvB,IAAI,CAACwB,IAAL,CAAU,IAAV,EAAgBvB,KAAhB,CAAd,EAAsC;AACzCU,IAAAA,IAAI,EAAEA,IAAI,CAACa,IAAL,CAAU,IAAV,EAAgBvB,KAAhB;AADmC,GAAtC,CAAP;AAGH,CAXM;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/auth-token/dist-src/auth.js b/node_modules/@octokit/auth-token/dist-src/auth.js new file mode 100644 index 0000000..b22ce98 --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-src/auth.js @@ -0,0 +1,21 @@ +const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +const REGEX_IS_INSTALLATION = /^ghs_/; +const REGEX_IS_USER_TO_SERVER = /^ghu_/; +export async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || + REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp + ? "app" + : isInstallation + ? "installation" + : isUserToServer + ? "user-to-server" + : "oauth"; + return { + type: "token", + token: token, + tokenType, + }; +} diff --git a/node_modules/@octokit/auth-token/dist-src/hook.js b/node_modules/@octokit/auth-token/dist-src/hook.js new file mode 100644 index 0000000..f8e47f0 --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-src/hook.js @@ -0,0 +1,6 @@ +import { withAuthorizationPrefix } from "./with-authorization-prefix"; +export async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} diff --git a/node_modules/@octokit/auth-token/dist-src/index.js b/node_modules/@octokit/auth-token/dist-src/index.js new file mode 100644 index 0000000..f2ddd63 --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-src/index.js @@ -0,0 +1,14 @@ +import { auth } from "./auth"; +import { hook } from "./hook"; +export const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token), + }); +}; diff --git a/node_modules/@octokit/auth-token/dist-src/types.js b/node_modules/@octokit/auth-token/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/auth-token/dist-src/with-authorization-prefix.js b/node_modules/@octokit/auth-token/dist-src/with-authorization-prefix.js new file mode 100644 index 0000000..9035813 --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-src/with-authorization-prefix.js @@ -0,0 +1,11 @@ +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +export function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} diff --git a/node_modules/@octokit/auth-token/dist-types/auth.d.ts b/node_modules/@octokit/auth-token/dist-types/auth.d.ts new file mode 100644 index 0000000..dc41835 --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-types/auth.d.ts @@ -0,0 +1,2 @@ +import { Token, Authentication } from "./types"; +export declare function auth(token: Token): Promise; diff --git a/node_modules/@octokit/auth-token/dist-types/hook.d.ts b/node_modules/@octokit/auth-token/dist-types/hook.d.ts new file mode 100644 index 0000000..21e4b6f --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-types/hook.d.ts @@ -0,0 +1,2 @@ +import { AnyResponse, EndpointOptions, RequestInterface, RequestParameters, Route, Token } from "./types"; +export declare function hook(token: Token, request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise; diff --git a/node_modules/@octokit/auth-token/dist-types/index.d.ts b/node_modules/@octokit/auth-token/dist-types/index.d.ts new file mode 100644 index 0000000..5999429 --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-types/index.d.ts @@ -0,0 +1,7 @@ +import { StrategyInterface, Token, Authentication } from "./types"; +export declare type Types = { + StrategyOptions: Token; + AuthOptions: never; + Authentication: Authentication; +}; +export declare const createTokenAuth: StrategyInterface; diff --git a/node_modules/@octokit/auth-token/dist-types/types.d.ts b/node_modules/@octokit/auth-token/dist-types/types.d.ts new file mode 100644 index 0000000..0ae24de --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-types/types.d.ts @@ -0,0 +1,33 @@ +import * as OctokitTypes from "@octokit/types"; +export declare type AnyResponse = OctokitTypes.OctokitResponse; +export declare type StrategyInterface = OctokitTypes.StrategyInterface<[ + Token +], [ +], Authentication>; +export declare type EndpointDefaults = OctokitTypes.EndpointDefaults; +export declare type EndpointOptions = OctokitTypes.EndpointOptions; +export declare type RequestParameters = OctokitTypes.RequestParameters; +export declare type RequestInterface = OctokitTypes.RequestInterface; +export declare type Route = OctokitTypes.Route; +export declare type Token = string; +export declare type OAuthTokenAuthentication = { + type: "token"; + tokenType: "oauth"; + token: Token; +}; +export declare type InstallationTokenAuthentication = { + type: "token"; + tokenType: "installation"; + token: Token; +}; +export declare type AppAuthentication = { + type: "token"; + tokenType: "app"; + token: Token; +}; +export declare type UserToServerAuthentication = { + type: "token"; + tokenType: "user-to-server"; + token: Token; +}; +export declare type Authentication = OAuthTokenAuthentication | InstallationTokenAuthentication | AppAuthentication | UserToServerAuthentication; diff --git a/node_modules/@octokit/auth-token/dist-types/with-authorization-prefix.d.ts b/node_modules/@octokit/auth-token/dist-types/with-authorization-prefix.d.ts new file mode 100644 index 0000000..2e52c31 --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-types/with-authorization-prefix.d.ts @@ -0,0 +1,6 @@ +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +export declare function withAuthorizationPrefix(token: string): string; diff --git a/node_modules/@octokit/auth-token/dist-web/index.js b/node_modules/@octokit/auth-token/dist-web/index.js new file mode 100644 index 0000000..8b1cd7d --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-web/index.js @@ -0,0 +1,55 @@ +const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +const REGEX_IS_INSTALLATION = /^ghs_/; +const REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || + REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp + ? "app" + : isInstallation + ? "installation" + : isUserToServer + ? "user-to-server" + : "oauth"; + return { + type: "token", + token: token, + tokenType, + }; +} + +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} + +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token), + }); +}; + +export { createTokenAuth }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/auth-token/dist-web/index.js.map b/node_modules/@octokit/auth-token/dist-web/index.js.map new file mode 100644 index 0000000..1d6197b --- /dev/null +++ b/node_modules/@octokit/auth-token/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/auth.js","../dist-src/with-authorization-prefix.js","../dist-src/hook.js","../dist-src/index.js"],"sourcesContent":["const REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nexport async function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) ||\n REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp\n ? \"app\"\n : isInstallation\n ? \"installation\"\n : isUserToServer\n ? \"user-to-server\"\n : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType,\n };\n}\n","/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nexport function withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\n","import { withAuthorizationPrefix } from \"./with-authorization-prefix\";\nexport async function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n","import { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nexport const createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token),\n });\n};\n"],"names":[],"mappings":"AAAA,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAC7C,MAAM,qBAAqB,GAAG,OAAO,CAAC;AACtC,MAAM,uBAAuB,GAAG,OAAO,CAAC;AACjC,eAAe,IAAI,CAAC,KAAK,EAAE;AAClC,IAAI,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC;AACjD,IAAI,MAAM,cAAc,GAAG,4BAA4B,CAAC,IAAI,CAAC,KAAK,CAAC;AACnE,QAAQ,qBAAqB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AAC1C,IAAI,MAAM,cAAc,GAAG,uBAAuB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AAC/D,IAAI,MAAM,SAAS,GAAG,KAAK;AAC3B,UAAU,KAAK;AACf,UAAU,cAAc;AACxB,cAAc,cAAc;AAC5B,cAAc,cAAc;AAC5B,kBAAkB,gBAAgB;AAClC,kBAAkB,OAAO,CAAC;AAC1B,IAAI,OAAO;AACX,QAAQ,IAAI,EAAE,OAAO;AACrB,QAAQ,KAAK,EAAE,KAAK;AACpB,QAAQ,SAAS;AACjB,KAAK,CAAC;AACN;;ACpBA;AACA;AACA;AACA;AACA;AACA,AAAO,SAAS,uBAAuB,CAAC,KAAK,EAAE;AAC/C,IAAI,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE;AACxC,QAAQ,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC;AACjC,KAAK;AACL,IAAI,OAAO,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAC;AAC5B,CAAC;;ACTM,eAAe,IAAI,CAAC,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE;AAC9D,IAAI,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;AAC/D,IAAI,QAAQ,CAAC,OAAO,CAAC,aAAa,GAAG,uBAAuB,CAAC,KAAK,CAAC,CAAC;AACpE,IAAI,OAAO,OAAO,CAAC,QAAQ,CAAC,CAAC;AAC7B,CAAC;;ACHW,MAAC,eAAe,GAAG,SAAS,eAAe,CAAC,KAAK,EAAE;AAC/D,IAAI,IAAI,CAAC,KAAK,EAAE;AAChB,QAAQ,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;AACpF,KAAK;AACL,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACnC,QAAQ,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AACjG,KAAK;AACL,IAAI,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,oBAAoB,EAAE,EAAE,CAAC,CAAC;AACpD,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,EAAE;AACjD,QAAQ,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC;AACpC,KAAK,CAAC,CAAC;AACP,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/auth-token/package.json b/node_modules/@octokit/auth-token/package.json new file mode 100644 index 0000000..1b0df71 --- /dev/null +++ b/node_modules/@octokit/auth-token/package.json @@ -0,0 +1,45 @@ +{ + "name": "@octokit/auth-token", + "description": "GitHub API token authentication for browsers and Node.js", + "version": "2.5.0", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "pika": true, + "sideEffects": false, + "keywords": [ + "github", + "octokit", + "authentication", + "api" + ], + "repository": "github:octokit/auth-token.js", + "dependencies": { + "@octokit/types": "^6.0.3" + }, + "devDependencies": { + "@octokit/core": "^3.0.0", + "@octokit/request": "^5.3.0", + "@pika/pack": "^0.5.0", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^27.0.0", + "fetch-mock": "^9.0.0", + "jest": "^27.0.0", + "prettier": "2.4.1", + "semantic-release": "^17.0.0", + "ts-jest": "^27.0.0-next.12", + "typescript": "^4.0.0" + }, + "publishConfig": { + "access": "public" + }, + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js" +} diff --git a/node_modules/@octokit/core/LICENSE b/node_modules/@octokit/core/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/node_modules/@octokit/core/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@octokit/core/README.md b/node_modules/@octokit/core/README.md new file mode 100644 index 0000000..b540cb9 --- /dev/null +++ b/node_modules/@octokit/core/README.md @@ -0,0 +1,448 @@ +# core.js + +> Extendable client for GitHub's REST & GraphQL APIs + +[![@latest](https://img.shields.io/npm/v/@octokit/core.svg)](https://www.npmjs.com/package/@octokit/core) +[![Build Status](https://github.com/octokit/core.js/workflows/Test/badge.svg)](https://github.com/octokit/core.js/actions?query=workflow%3ATest+branch%3Amaster) + + + +- [Usage](#usage) + - [REST API example](#rest-api-example) + - [GraphQL example](#graphql-example) +- [Options](#options) +- [Defaults](#defaults) +- [Authentication](#authentication) +- [Logging](#logging) +- [Hooks](#hooks) +- [Plugins](#plugins) +- [Build your own Octokit with Plugins and Defaults](#build-your-own-octokit-with-plugins-and-defaults) +- [LICENSE](#license) + + + +If you need a minimalistic library to utilize GitHub's [REST API](https://developer.github.com/v3/) and [GraphQL API](https://developer.github.com/v4/) which you can extend with plugins as needed, then `@octokit/core` is a great starting point. + +If you don't need the Plugin API then using [`@octokit/request`](https://github.com/octokit/request.js/) or [`@octokit/graphql`](https://github.com/octokit/graphql.js/) directly is a good alternative. + +## Usage + + + + + + +
+Browsers + +Load @octokit/core directly from cdn.skypack.dev + +```html + +``` + +
+Node + + +Install with npm install @octokit/core + +```js +const { Octokit } = require("@octokit/core"); +// or: import { Octokit } from "@octokit/core"; +``` + +
+ +### REST API example + +```js +// Create a personal access token at https://github.com/settings/tokens/new?scopes=repo +const octokit = new Octokit({ auth: `personal-access-token123` }); + +const response = await octokit.request("GET /orgs/{org}/repos", { + org: "octokit", + type: "private", +}); +``` + +See [`@octokit/request`](https://github.com/octokit/request.js) for full documentation of the `.request` method. + +### GraphQL example + +```js +const octokit = new Octokit({ auth: `secret123` }); + +const response = await octokit.graphql( + `query ($login: String!) { + organization(login: $login) { + repositories(privacy: PRIVATE) { + totalCount + } + } + }`, + { login: "octokit" } +); +``` + +See [`@octokit/graphql`](https://github.com/octokit/graphql.js) for full documentation of the `.graphql` method. + +## Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ options.authStrategy + + Function + + Defaults to @octokit/auth-token. See Authentication below for examples. +
+ options.auth + + String or Object + + See Authentication below for examples. +
+ options.baseUrl + + String + + +When using with GitHub Enterprise Server, set `options.baseUrl` to the root URL of the API. For example, if your GitHub Enterprise Server's hostname is `github.acme-inc.com`, then set `options.baseUrl` to `https://github.acme-inc.com/api/v3`. Example + +```js +const octokit = new Octokit({ + baseUrl: "https://github.acme-inc.com/api/v3", +}); +``` + +
+ options.previews + + Array of Strings + + +Some REST API endpoints require preview headers to be set, or enable +additional features. Preview headers can be set on a per-request basis, e.g. + +```js +octokit.request("POST /repos/{owner}/{repo}/pulls", { + mediaType: { + previews: ["shadow-cat"], + }, + owner, + repo, + title: "My pull request", + base: "master", + head: "my-feature", + draft: true, +}); +``` + +You can also set previews globally, by setting the `options.previews` option on the constructor. Example: + +```js +const octokit = new Octokit({ + previews: ["shadow-cat"], +}); +``` + +
+ options.request + + Object + + +Set a default request timeout (`options.request.timeout`) or an [`http(s).Agent`](https://nodejs.org/api/http.html#http_class_http_agent) e.g. for proxy usage (Node only, `options.request.agent`). + +There are more `options.request.*` options, see [`@octokit/request` options](https://github.com/octokit/request.js#request). `options.request` can also be set on a per-request basis. + +
+ options.timeZone + + String + + +Sets the `Time-Zone` header which defines a timezone according to the [list of names from the Olson database](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). + +```js +const octokit = new Octokit({ + timeZone: "America/Los_Angeles", +}); +``` + +The time zone header will determine the timezone used for generating the timestamp when creating commits. See [GitHub's Timezones documentation](https://developer.github.com/v3/#timezones). + +
+ options.userAgent + + String + + +A custom user agent string for your app or library. Example + +```js +const octokit = new Octokit({ + userAgent: "my-app/v1.2.3", +}); +``` + +
+ +## Defaults + +You can create a new Octokit class with customized default options. + +```js +const MyOctokit = Octokit.defaults({ + auth: "personal-access-token123", + baseUrl: "https://github.acme-inc.com/api/v3", + userAgent: "my-app/v1.2.3", +}); +const octokit1 = new MyOctokit(); +const octokit2 = new MyOctokit(); +``` + +If you pass additional options to your new constructor, the options will be merged shallowly. + +```js +const MyOctokit = Octokit.defaults({ + foo: { + opt1: 1, + }, +}); +const octokit = new MyOctokit({ + foo: { + opt2: 1, + }, +}); +// options will be { foo: { opt2: 1 }} +``` + +If you need a deep or conditional merge, you can pass a function instead. + +```js +const MyOctokit = Octokit.defaults((options) => { + return { + foo: Object.assign({}, options.foo, { opt2: 1 }), + }; +}); +const octokit = new MyOctokit({ + foo: { opt2: 1 }, +}); +// options will be { foo: { opt1: 1, opt2: 1 }} +``` + +Be careful about mutating the `options` object in the `Octokit.defaults` callback, as it can have unforeseen consequences. + +## Authentication + +Authentication is optional for some REST API endpoints accessing public data, but is required for GraphQL queries. Using authentication also increases your [API rate limit](https://developer.github.com/v3/#rate-limiting). + +By default, Octokit authenticates using the [token authentication strategy](https://github.com/octokit/auth-token.js). Pass in a token using `options.auth`. It can be a personal access token, an OAuth token, an installation access token or a JSON Web Token for GitHub App authentication. The `Authorization` header will be set according to the type of token. + +```js +import { Octokit } from "@octokit/core"; + +const octokit = new Octokit({ + auth: "mypersonalaccesstoken123", +}); + +const { data } = await octokit.request("/user"); +``` + +To use a different authentication strategy, set `options.authStrategy`. A list of authentication strategies is available at [octokit/authentication-strategies.js](https://github.com/octokit/authentication-strategies.js/#readme). + +Example + +```js +import { Octokit } from "@octokit/core"; +import { createAppAuth } from "@octokit/auth-app"; + +const appOctokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId: 123, + privateKey: process.env.PRIVATE_KEY, + }, +}); + +const { data } = await appOctokit.request("/app"); +``` + +The `.auth()` method returned by the current authentication strategy can be accessed at `octokit.auth()`. Example + +```js +const { token } = await appOctokit.auth({ + type: "installation", + installationId: 123, +}); +``` + +## Logging + +There are four built-in log methods + +1. `octokit.log.debug(message[, additionalInfo])` +1. `octokit.log.info(message[, additionalInfo])` +1. `octokit.log.warn(message[, additionalInfo])` +1. `octokit.log.error(message[, additionalInfo])` + +They can be configured using the [`log` client option](client-options). By default, `octokit.log.debug()` and `octokit.log.info()` are no-ops, while the other two call `console.warn()` and `console.error()` respectively. + +This is useful if you build reusable [plugins](#plugins). + +If you would like to make the log level configurable using an environment variable or external option, we recommend the [console-log-level](https://github.com/watson/console-log-level) package. Example + +```js +const octokit = new Octokit({ + log: require("console-log-level")({ level: "info" }), +}); +``` + +## Hooks + +You can customize Octokit's request lifecycle with hooks. + +```js +octokit.hook.before("request", async (options) => { + validate(options); +}); +octokit.hook.after("request", async (response, options) => { + console.log(`${options.method} ${options.url}: ${response.status}`); +}); +octokit.hook.error("request", async (error, options) => { + if (error.status === 304) { + return findInCache(error.response.headers.etag); + } + + throw error; +}); +octokit.hook.wrap("request", async (request, options) => { + // add logic before, after, catch errors or replace the request altogether + return request(options); +}); +``` + +See [before-after-hook](https://github.com/gr2m/before-after-hook#readme) for more documentation on hooks. + +## Plugins + +Octokit’s functionality can be extended using plugins. The `Octokit.plugin()` method accepts a plugin (or many) and returns a new constructor. + +A plugin is a function which gets two arguments: + +1. the current instance +2. the options passed to the constructor. + +In order to extend `octokit`'s API, the plugin must return an object with the new methods. + +```js +// index.js +const { Octokit } = require("@octokit/core") +const MyOctokit = Octokit.plugin( + require("./lib/my-plugin"), + require("octokit-plugin-example") +); + +const octokit = new MyOctokit({ greeting: "Moin moin" }); +octokit.helloWorld(); // logs "Moin moin, world!" +octokit.request("GET /"); // logs "GET / - 200 in 123ms" + +// lib/my-plugin.js +module.exports = (octokit, options = { greeting: "Hello" }) => { + // hook into the request lifecycle + octokit.hook.wrap("request", async (request, options) => { + const time = Date.now(); + const response = await request(options); + console.log( + `${options.method} ${options.url} – ${response.status} in ${Date.now() - + time}ms` + ); + return response; + }); + + // add a custom method + return { + helloWorld: () => console.log(`${options.greeting}, world!`); + } +}; +``` + +## Build your own Octokit with Plugins and Defaults + +You can build your own Octokit class with preset default options and plugins. In fact, this is mostly how the `@octokit/` modules work, such as [`@octokit/action`](https://github.com/octokit/action.js): + +```js +const { Octokit } = require("@octokit/core"); +const MyActionOctokit = Octokit.plugin( + require("@octokit/plugin-paginate-rest").paginateRest, + require("@octokit/plugin-throttling").throttling, + require("@octokit/plugin-retry").retry +).defaults({ + throttle: { + onAbuseLimit: (retryAfter, options) => { + /* ... */ + }, + onRateLimit: (retryAfter, options) => { + /* ... */ + }, + }, + authStrategy: require("@octokit/auth-action").createActionAuth, + userAgent: `my-octokit-action/v1.2.3`, +}); + +const octokit = new MyActionOctokit(); +const installations = await octokit.paginate("GET /app/installations"); +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/node_modules/@octokit/core/dist-node/index.js b/node_modules/@octokit/core/dist-node/index.js new file mode 100644 index 0000000..0f46e61 --- /dev/null +++ b/node_modules/@octokit/core/dist-node/index.js @@ -0,0 +1,176 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var universalUserAgent = require('universal-user-agent'); +var beforeAfterHook = require('before-after-hook'); +var request = require('@octokit/request'); +var graphql = require('@octokit/graphql'); +var authToken = require('@octokit/auth-token'); + +function _objectWithoutPropertiesLoose(source, excluded) { + if (source == null) return {}; + var target = {}; + var sourceKeys = Object.keys(source); + var key, i; + + for (i = 0; i < sourceKeys.length; i++) { + key = sourceKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + target[key] = source[key]; + } + + return target; +} + +function _objectWithoutProperties(source, excluded) { + if (source == null) return {}; + + var target = _objectWithoutPropertiesLoose(source, excluded); + + var key, i; + + if (Object.getOwnPropertySymbols) { + var sourceSymbolKeys = Object.getOwnPropertySymbols(source); + + for (i = 0; i < sourceSymbolKeys.length; i++) { + key = sourceSymbolKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; + target[key] = source[key]; + } + } + + return target; +} + +const VERSION = "3.6.0"; + +const _excluded = ["authStrategy"]; +class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; // prepend default user agent with `options.userAgent` if set + + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { + authStrategy + } = options, + otherOptions = _objectWithoutProperties(options, _excluded); + + const auth = authStrategy(Object.assign({ + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 + + + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); + } + + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); + } + + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + + + static plugin(...newPlugins) { + var _a; + + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; + } + +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; + +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/core/dist-node/index.js.map b/node_modules/@octokit/core/dist-node/index.js.map new file mode 100644 index 0000000..3467e52 --- /dev/null +++ b/node_modules/@octokit/core/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"3.6.0\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { Collection } from \"before-after-hook\";\nimport { request } from \"@octokit/request\";\nimport { withCustomRequest } from \"@octokit/graphql\";\nimport { createTokenAuth } from \"@octokit/auth-token\";\nimport { VERSION } from \"./version\";\nexport class Octokit {\n constructor(options = {}) {\n const hook = new Collection();\n const requestDefaults = {\n baseUrl: request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\"),\n }),\n mediaType: {\n previews: [],\n format: \"\",\n },\n };\n // prepend default user agent with `options.userAgent` if set\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${getUserAgent()}`,\n ]\n .filter(Boolean)\n .join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = request.defaults(requestDefaults);\n this.graphql = withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => { },\n info: () => { },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n }, options.log);\n this.hook = hook;\n // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\",\n });\n }\n else {\n // (2)\n const auth = createTokenAuth(options.auth);\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n }\n else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions,\n }, options.auth));\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n // apply plugins\n // https://stackoverflow.com/a/16345172\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent\n ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`,\n }\n : null));\n }\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n var _a;\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {\n },\n _a.plugins = currentPlugins.concat(newPlugins.filter((plugin) => !currentPlugins.includes(plugin))),\n _a);\n return NewOctokit;\n }\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n"],"names":["VERSION","Octokit","constructor","options","hook","Collection","requestDefaults","baseUrl","request","endpoint","DEFAULTS","headers","Object","assign","bind","mediaType","previews","format","userAgent","getUserAgent","filter","Boolean","join","timeZone","defaults","graphql","withCustomRequest","log","debug","info","warn","console","error","authStrategy","auth","type","createTokenAuth","wrap","otherOptions","octokit","octokitOptions","classConstructor","plugins","forEach","plugin","OctokitWithDefaults","args","newPlugins","_a","currentPlugins","NewOctokit","concat","includes"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;;ACAP,AAMO,MAAMC,OAAN,CAAc;AACjBC,EAAAA,WAAW,CAACC,OAAO,GAAG,EAAX,EAAe;AACtB,UAAMC,IAAI,GAAG,IAAIC,0BAAJ,EAAb;AACA,UAAMC,eAAe,GAAG;AACpBC,MAAAA,OAAO,EAAEC,eAAO,CAACC,QAAR,CAAiBC,QAAjB,CAA0BH,OADf;AAEpBI,MAAAA,OAAO,EAAE,EAFW;AAGpBH,MAAAA,OAAO,EAAEI,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBV,OAAO,CAACK,OAA1B,EAAmC;AACxC;AACAJ,QAAAA,IAAI,EAAEA,IAAI,CAACU,IAAL,CAAU,IAAV,EAAgB,SAAhB;AAFkC,OAAnC,CAHW;AAOpBC,MAAAA,SAAS,EAAE;AACPC,QAAAA,QAAQ,EAAE,EADH;AAEPC,QAAAA,MAAM,EAAE;AAFD;AAPS,KAAxB,CAFsB;;AAetBX,IAAAA,eAAe,CAACK,OAAhB,CAAwB,YAAxB,IAAwC,CACpCR,OAAO,CAACe,SAD4B,EAEnC,mBAAkBlB,OAAQ,IAAGmB,+BAAY,EAAG,EAFT,EAInCC,MAJmC,CAI5BC,OAJ4B,EAKnCC,IALmC,CAK9B,GAL8B,CAAxC;;AAMA,QAAInB,OAAO,CAACI,OAAZ,EAAqB;AACjBD,MAAAA,eAAe,CAACC,OAAhB,GAA0BJ,OAAO,CAACI,OAAlC;AACH;;AACD,QAAIJ,OAAO,CAACa,QAAZ,EAAsB;AAClBV,MAAAA,eAAe,CAACS,SAAhB,CAA0BC,QAA1B,GAAqCb,OAAO,CAACa,QAA7C;AACH;;AACD,QAAIb,OAAO,CAACoB,QAAZ,EAAsB;AAClBjB,MAAAA,eAAe,CAACK,OAAhB,CAAwB,WAAxB,IAAuCR,OAAO,CAACoB,QAA/C;AACH;;AACD,SAAKf,OAAL,GAAeA,eAAO,CAACgB,QAAR,CAAiBlB,eAAjB,CAAf;AACA,SAAKmB,OAAL,GAAeC,yBAAiB,CAAC,KAAKlB,OAAN,CAAjB,CAAgCgB,QAAhC,CAAyClB,eAAzC,CAAf;AACA,SAAKqB,GAAL,GAAWf,MAAM,CAACC,MAAP,CAAc;AACrBe,MAAAA,KAAK,EAAE,MAAM,EADQ;AAErBC,MAAAA,IAAI,EAAE,MAAM,EAFS;AAGrBC,MAAAA,IAAI,EAAEC,OAAO,CAACD,IAAR,CAAahB,IAAb,CAAkBiB,OAAlB,CAHe;AAIrBC,MAAAA,KAAK,EAAED,OAAO,CAACC,KAAR,CAAclB,IAAd,CAAmBiB,OAAnB;AAJc,KAAd,EAKR5B,OAAO,CAACwB,GALA,CAAX;AAMA,SAAKvB,IAAL,GAAYA,IAAZ,CAtCsB;AAwCtB;AACA;AACA;AACA;;AACA,QAAI,CAACD,OAAO,CAAC8B,YAAb,EAA2B;AACvB,UAAI,CAAC9B,OAAO,CAAC+B,IAAb,EAAmB;AACf;AACA,aAAKA,IAAL,GAAY,aAAa;AACrBC,UAAAA,IAAI,EAAE;AADe,SAAb,CAAZ;AAGH,OALD,MAMK;AACD;AACA,cAAMD,IAAI,GAAGE,yBAAe,CAACjC,OAAO,CAAC+B,IAAT,CAA5B,CAFC;;AAID9B,QAAAA,IAAI,CAACiC,IAAL,CAAU,SAAV,EAAqBH,IAAI,CAAC9B,IAA1B;AACA,aAAK8B,IAAL,GAAYA,IAAZ;AACH;AACJ,KAdD,MAeK;AACD,YAAM;AAAED,QAAAA;AAAF,UAAoC9B,OAA1C;AAAA,YAAyBmC,YAAzB,4BAA0CnC,OAA1C;;AACA,YAAM+B,IAAI,GAAGD,YAAY,CAACrB,MAAM,CAACC,MAAP,CAAc;AACpCL,QAAAA,OAAO,EAAE,KAAKA,OADsB;AAEpCmB,QAAAA,GAAG,EAAE,KAAKA,GAF0B;AAGpC;AACA;AACA;AACA;AACA;AACAY,QAAAA,OAAO,EAAE,IAR2B;AASpCC,QAAAA,cAAc,EAAEF;AAToB,OAAd,EAUvBnC,OAAO,CAAC+B,IAVe,CAAD,CAAzB,CAFC;;AAcD9B,MAAAA,IAAI,CAACiC,IAAL,CAAU,SAAV,EAAqBH,IAAI,CAAC9B,IAA1B;AACA,WAAK8B,IAAL,GAAYA,IAAZ;AACH,KA3EqB;AA6EtB;;;AACA,UAAMO,gBAAgB,GAAG,KAAKvC,WAA9B;AACAuC,IAAAA,gBAAgB,CAACC,OAAjB,CAAyBC,OAAzB,CAAkCC,MAAD,IAAY;AACzChC,MAAAA,MAAM,CAACC,MAAP,CAAc,IAAd,EAAoB+B,MAAM,CAAC,IAAD,EAAOzC,OAAP,CAA1B;AACH,KAFD;AAGH;;AACc,SAARqB,QAAQ,CAACA,QAAD,EAAW;AACtB,UAAMqB,mBAAmB,GAAG,cAAc,IAAd,CAAmB;AAC3C3C,MAAAA,WAAW,CAAC,GAAG4C,IAAJ,EAAU;AACjB,cAAM3C,OAAO,GAAG2C,IAAI,CAAC,CAAD,CAAJ,IAAW,EAA3B;;AACA,YAAI,OAAOtB,QAAP,KAAoB,UAAxB,EAAoC;AAChC,gBAAMA,QAAQ,CAACrB,OAAD,CAAd;AACA;AACH;;AACD,cAAMS,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBW,QAAlB,EAA4BrB,OAA5B,EAAqCA,OAAO,CAACe,SAAR,IAAqBM,QAAQ,CAACN,SAA9B,GACrC;AACEA,UAAAA,SAAS,EAAG,GAAEf,OAAO,CAACe,SAAU,IAAGM,QAAQ,CAACN,SAAU;AADxD,SADqC,GAIrC,IAJA,CAAN;AAKH;;AAZ0C,KAA/C;AAcA,WAAO2B,mBAAP;AACH;AACD;AACJ;AACA;AACA;AACA;AACA;;;AACiB,SAAND,MAAM,CAAC,GAAGG,UAAJ,EAAgB;AACzB,QAAIC,EAAJ;;AACA,UAAMC,cAAc,GAAG,KAAKP,OAA5B;AACA,UAAMQ,UAAU,IAAIF,EAAE,GAAG,cAAc,IAAd,CAAmB,EAAxB,EAEhBA,EAAE,CAACN,OAAH,GAAaO,cAAc,CAACE,MAAf,CAAsBJ,UAAU,CAAC3B,MAAX,CAAmBwB,MAAD,IAAY,CAACK,cAAc,CAACG,QAAf,CAAwBR,MAAxB,CAA/B,CAAtB,CAFG,EAGhBI,EAHY,CAAhB;AAIA,WAAOE,UAAP;AACH;;AAnHgB;AAqHrBjD,OAAO,CAACD,OAAR,GAAkBA,OAAlB;AACAC,OAAO,CAACyC,OAAR,GAAkB,EAAlB;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/core/dist-src/index.js b/node_modules/@octokit/core/dist-src/index.js new file mode 100644 index 0000000..bdbc335 --- /dev/null +++ b/node_modules/@octokit/core/dist-src/index.js @@ -0,0 +1,125 @@ +import { getUserAgent } from "universal-user-agent"; +import { Collection } from "before-after-hook"; +import { request } from "@octokit/request"; +import { withCustomRequest } from "@octokit/graphql"; +import { createTokenAuth } from "@octokit/auth-token"; +import { VERSION } from "./version"; +export class Octokit { + constructor(options = {}) { + const hook = new Collection(); + const requestDefaults = { + baseUrl: request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request"), + }), + mediaType: { + previews: [], + format: "", + }, + }; + // prepend default user agent with `options.userAgent` if set + requestDefaults.headers["user-agent"] = [ + options.userAgent, + `octokit-core.js/${VERSION} ${getUserAgent()}`, + ] + .filter(Boolean) + .join(" "); + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = request.defaults(requestDefaults); + this.graphql = withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign({ + debug: () => { }, + info: () => { }, + warn: console.warn.bind(console), + error: console.error.bind(console), + }, options.log); + this.hook = hook; + // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated", + }); + } + else { + // (2) + const auth = createTokenAuth(options.auth); + // @ts-ignore ¯\_(ツ)_/¯ + hook.wrap("request", auth.hook); + this.auth = auth; + } + } + else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy(Object.assign({ + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions, + }, options.auth)); + // @ts-ignore ¯\_(ツ)_/¯ + hook.wrap("request", auth.hook); + this.auth = auth; + } + // apply plugins + // https://stackoverflow.com/a/16345172 + const classConstructor = this.constructor; + classConstructor.plugins.forEach((plugin) => { + Object.assign(this, plugin(this, options)); + }); + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent + ? { + userAgent: `${options.userAgent} ${defaults.userAgent}`, + } + : null)); + } + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + var _a; + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this { + }, + _a.plugins = currentPlugins.concat(newPlugins.filter((plugin) => !currentPlugins.includes(plugin))), + _a); + return NewOctokit; + } +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; diff --git a/node_modules/@octokit/core/dist-src/types.js b/node_modules/@octokit/core/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/core/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/core/dist-src/version.js b/node_modules/@octokit/core/dist-src/version.js new file mode 100644 index 0000000..bace1a9 --- /dev/null +++ b/node_modules/@octokit/core/dist-src/version.js @@ -0,0 +1 @@ +export const VERSION = "3.6.0"; diff --git a/node_modules/@octokit/core/dist-types/index.d.ts b/node_modules/@octokit/core/dist-types/index.d.ts new file mode 100644 index 0000000..b757c5b --- /dev/null +++ b/node_modules/@octokit/core/dist-types/index.d.ts @@ -0,0 +1,30 @@ +import { HookCollection } from "before-after-hook"; +import { request } from "@octokit/request"; +import { graphql } from "@octokit/graphql"; +import { Constructor, Hooks, OctokitOptions, OctokitPlugin, ReturnTypeOf, UnionToIntersection } from "./types"; +export declare class Octokit { + static VERSION: string; + static defaults>(this: S, defaults: OctokitOptions | Function): S; + static plugins: OctokitPlugin[]; + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin & { + plugins: any[]; + }, T extends OctokitPlugin[]>(this: S, ...newPlugins: T): S & Constructor>>; + constructor(options?: OctokitOptions); + request: typeof request; + graphql: typeof graphql; + log: { + debug: (message: string, additionalInfo?: object) => any; + info: (message: string, additionalInfo?: object) => any; + warn: (message: string, additionalInfo?: object) => any; + error: (message: string, additionalInfo?: object) => any; + [key: string]: any; + }; + hook: HookCollection; + auth: (...args: unknown[]) => Promise; +} diff --git a/node_modules/@octokit/core/dist-types/types.d.ts b/node_modules/@octokit/core/dist-types/types.d.ts new file mode 100644 index 0000000..3970d0d --- /dev/null +++ b/node_modules/@octokit/core/dist-types/types.d.ts @@ -0,0 +1,44 @@ +import * as OctokitTypes from "@octokit/types"; +import { RequestError } from "@octokit/request-error"; +import { Octokit } from "."; +export declare type RequestParameters = OctokitTypes.RequestParameters; +export interface OctokitOptions { + authStrategy?: any; + auth?: any; + userAgent?: string; + previews?: string[]; + baseUrl?: string; + log?: { + debug: (message: string) => unknown; + info: (message: string) => unknown; + warn: (message: string) => unknown; + error: (message: string) => unknown; + }; + request?: OctokitTypes.RequestRequestOptions; + timeZone?: string; + [option: string]: any; +} +export declare type Constructor = new (...args: any[]) => T; +export declare type ReturnTypeOf = T extends AnyFunction ? ReturnType : T extends AnyFunction[] ? UnionToIntersection, void>> : never; +/** + * @author https://stackoverflow.com/users/2887218/jcalz + * @see https://stackoverflow.com/a/50375286/10325032 + */ +export declare type UnionToIntersection = (Union extends any ? (argument: Union) => void : never) extends (argument: infer Intersection) => void ? Intersection : never; +declare type AnyFunction = (...args: any) => any; +export declare type OctokitPlugin = (octokit: Octokit, options: OctokitOptions) => { + [key: string]: any; +} | void; +export declare type Hooks = { + request: { + Options: Required; + Result: OctokitTypes.OctokitResponse; + Error: RequestError | Error; + }; + [key: string]: { + Options: unknown; + Result: unknown; + Error: unknown; + }; +}; +export {}; diff --git a/node_modules/@octokit/core/dist-types/version.d.ts b/node_modules/@octokit/core/dist-types/version.d.ts new file mode 100644 index 0000000..f1a3d02 --- /dev/null +++ b/node_modules/@octokit/core/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "3.6.0"; diff --git a/node_modules/@octokit/core/dist-web/index.js b/node_modules/@octokit/core/dist-web/index.js new file mode 100644 index 0000000..741d231 --- /dev/null +++ b/node_modules/@octokit/core/dist-web/index.js @@ -0,0 +1,130 @@ +import { getUserAgent } from 'universal-user-agent'; +import { Collection } from 'before-after-hook'; +import { request } from '@octokit/request'; +import { withCustomRequest } from '@octokit/graphql'; +import { createTokenAuth } from '@octokit/auth-token'; + +const VERSION = "3.6.0"; + +class Octokit { + constructor(options = {}) { + const hook = new Collection(); + const requestDefaults = { + baseUrl: request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request"), + }), + mediaType: { + previews: [], + format: "", + }, + }; + // prepend default user agent with `options.userAgent` if set + requestDefaults.headers["user-agent"] = [ + options.userAgent, + `octokit-core.js/${VERSION} ${getUserAgent()}`, + ] + .filter(Boolean) + .join(" "); + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = request.defaults(requestDefaults); + this.graphql = withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign({ + debug: () => { }, + info: () => { }, + warn: console.warn.bind(console), + error: console.error.bind(console), + }, options.log); + this.hook = hook; + // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated", + }); + } + else { + // (2) + const auth = createTokenAuth(options.auth); + // @ts-ignore ¯\_(ツ)_/¯ + hook.wrap("request", auth.hook); + this.auth = auth; + } + } + else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy(Object.assign({ + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions, + }, options.auth)); + // @ts-ignore ¯\_(ツ)_/¯ + hook.wrap("request", auth.hook); + this.auth = auth; + } + // apply plugins + // https://stackoverflow.com/a/16345172 + const classConstructor = this.constructor; + classConstructor.plugins.forEach((plugin) => { + Object.assign(this, plugin(this, options)); + }); + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent + ? { + userAgent: `${options.userAgent} ${defaults.userAgent}`, + } + : null)); + } + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + var _a; + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this { + }, + _a.plugins = currentPlugins.concat(newPlugins.filter((plugin) => !currentPlugins.includes(plugin))), + _a); + return NewOctokit; + } +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; + +export { Octokit }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/core/dist-web/index.js.map b/node_modules/@octokit/core/dist-web/index.js.map new file mode 100644 index 0000000..238c82e --- /dev/null +++ b/node_modules/@octokit/core/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"3.6.0\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { Collection } from \"before-after-hook\";\nimport { request } from \"@octokit/request\";\nimport { withCustomRequest } from \"@octokit/graphql\";\nimport { createTokenAuth } from \"@octokit/auth-token\";\nimport { VERSION } from \"./version\";\nexport class Octokit {\n constructor(options = {}) {\n const hook = new Collection();\n const requestDefaults = {\n baseUrl: request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\"),\n }),\n mediaType: {\n previews: [],\n format: \"\",\n },\n };\n // prepend default user agent with `options.userAgent` if set\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${getUserAgent()}`,\n ]\n .filter(Boolean)\n .join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = request.defaults(requestDefaults);\n this.graphql = withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => { },\n info: () => { },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n }, options.log);\n this.hook = hook;\n // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\",\n });\n }\n else {\n // (2)\n const auth = createTokenAuth(options.auth);\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n }\n else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions,\n }, options.auth));\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n // apply plugins\n // https://stackoverflow.com/a/16345172\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent\n ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`,\n }\n : null));\n }\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n var _a;\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {\n },\n _a.plugins = currentPlugins.concat(newPlugins.filter((plugin) => !currentPlugins.includes(plugin))),\n _a);\n return NewOctokit;\n }\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n"],"names":[],"mappings":";;;;;;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACMnC,MAAM,OAAO,CAAC;AACrB,IAAI,WAAW,CAAC,OAAO,GAAG,EAAE,EAAE;AAC9B,QAAQ,MAAM,IAAI,GAAG,IAAI,UAAU,EAAE,CAAC;AACtC,QAAQ,MAAM,eAAe,GAAG;AAChC,YAAY,OAAO,EAAE,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO;AACtD,YAAY,OAAO,EAAE,EAAE;AACvB,YAAY,OAAO,EAAE,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,EAAE;AACxD;AACA,gBAAgB,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,SAAS,CAAC;AAChD,aAAa,CAAC;AACd,YAAY,SAAS,EAAE;AACvB,gBAAgB,QAAQ,EAAE,EAAE;AAC5B,gBAAgB,MAAM,EAAE,EAAE;AAC1B,aAAa;AACb,SAAS,CAAC;AACV;AACA,QAAQ,eAAe,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG;AAChD,YAAY,OAAO,CAAC,SAAS;AAC7B,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1D,SAAS;AACT,aAAa,MAAM,CAAC,OAAO,CAAC;AAC5B,aAAa,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,QAAQ,IAAI,OAAO,CAAC,OAAO,EAAE;AAC7B,YAAY,eAAe,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACtD,SAAS;AACT,QAAQ,IAAI,OAAO,CAAC,QAAQ,EAAE;AAC9B,YAAY,eAAe,CAAC,SAAS,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AAClE,SAAS;AACT,QAAQ,IAAI,OAAO,CAAC,QAAQ,EAAE;AAC9B,YAAY,eAAe,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,OAAO,CAAC,QAAQ,CAAC;AACpE,SAAS;AACT,QAAQ,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC;AACzD,QAAQ,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC;AACjF,QAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,CAAC;AACjC,YAAY,KAAK,EAAE,MAAM,GAAG;AAC5B,YAAY,IAAI,EAAE,MAAM,GAAG;AAC3B,YAAY,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC;AAC5C,YAAY,KAAK,EAAE,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC;AAC9C,SAAS,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;AACxB,QAAQ,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACzB;AACA;AACA;AACA;AACA;AACA,QAAQ,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;AACnC,YAAY,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;AAC/B;AACA,gBAAgB,IAAI,CAAC,IAAI,GAAG,aAAa;AACzC,oBAAoB,IAAI,EAAE,iBAAiB;AAC3C,iBAAiB,CAAC,CAAC;AACnB,aAAa;AACb,iBAAiB;AACjB;AACA,gBAAgB,MAAM,IAAI,GAAG,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC3D;AACA,gBAAgB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;AAChD,gBAAgB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjC,aAAa;AACb,SAAS;AACT,aAAa;AACb,YAAY,MAAM,EAAE,YAAY,EAAE,GAAG,YAAY,EAAE,GAAG,OAAO,CAAC;AAC9D,YAAY,MAAM,IAAI,GAAG,YAAY,CAAC,MAAM,CAAC,MAAM,CAAC;AACpD,gBAAgB,OAAO,EAAE,IAAI,CAAC,OAAO;AACrC,gBAAgB,GAAG,EAAE,IAAI,CAAC,GAAG;AAC7B;AACA;AACA;AACA;AACA;AACA,gBAAgB,OAAO,EAAE,IAAI;AAC7B,gBAAgB,cAAc,EAAE,YAAY;AAC5C,aAAa,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;AAC9B;AACA,YAAY,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;AAC5C,YAAY,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AAC7B,SAAS;AACT;AACA;AACA,QAAQ,MAAM,gBAAgB,GAAG,IAAI,CAAC,WAAW,CAAC;AAClD,QAAQ,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,MAAM,KAAK;AACrD,YAAY,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;AACvD,SAAS,CAAC,CAAC;AACX,KAAK;AACL,IAAI,OAAO,QAAQ,CAAC,QAAQ,EAAE;AAC9B,QAAQ,MAAM,mBAAmB,GAAG,cAAc,IAAI,CAAC;AACvD,YAAY,WAAW,CAAC,GAAG,IAAI,EAAE;AACjC,gBAAgB,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;AAC9C,gBAAgB,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;AACpD,oBAAoB,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;AAC7C,oBAAoB,OAAO;AAC3B,iBAAiB;AACjB,gBAAgB,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,IAAI,QAAQ,CAAC,SAAS;AAClG,sBAAsB;AACtB,wBAAwB,SAAS,EAAE,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;AAC/E,qBAAqB;AACrB,sBAAsB,IAAI,CAAC,CAAC,CAAC;AAC7B,aAAa;AACb,SAAS,CAAC;AACV,QAAQ,OAAO,mBAAmB,CAAC;AACnC,KAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,OAAO,MAAM,CAAC,GAAG,UAAU,EAAE;AACjC,QAAQ,IAAI,EAAE,CAAC;AACf,QAAQ,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CAAC;AAC5C,QAAQ,MAAM,UAAU,IAAI,EAAE,GAAG,cAAc,IAAI,CAAC;AACpD,aAAa;AACb,YAAY,EAAE,CAAC,OAAO,GAAG,cAAc,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,MAAM,KAAK,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC/G,YAAY,EAAE,CAAC,CAAC;AAChB,QAAQ,OAAO,UAAU,CAAC;AAC1B,KAAK;AACL,CAAC;AACD,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC;AAC1B,OAAO,CAAC,OAAO,GAAG,EAAE,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/core/package.json b/node_modules/@octokit/core/package.json new file mode 100644 index 0000000..548e76c --- /dev/null +++ b/node_modules/@octokit/core/package.json @@ -0,0 +1,57 @@ +{ + "name": "@octokit/core", + "description": "Extendable client for GitHub's REST & GraphQL APIs", + "version": "3.6.0", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "pika": true, + "sideEffects": false, + "keywords": [ + "octokit", + "github", + "api", + "sdk", + "toolkit" + ], + "repository": "github:octokit/core.js", + "dependencies": { + "@octokit/auth-token": "^2.4.4", + "@octokit/graphql": "^4.5.8", + "@octokit/request": "^5.6.3", + "@octokit/request-error": "^2.0.5", + "@octokit/types": "^6.0.3", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/auth": "^3.0.1", + "@pika/pack": "^0.5.0", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^27.0.0", + "@types/lolex": "^5.1.0", + "@types/node": "^14.0.4", + "fetch-mock": "^9.0.0", + "http-proxy-agent": "^5.0.0", + "jest": "^27.0.0", + "lolex": "^6.0.0", + "prettier": "2.4.1", + "proxy": "^1.0.1", + "semantic-release": "^18.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^27.0.0", + "typescript": "^4.0.2" + }, + "publishConfig": { + "access": "public" + }, + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js" +} diff --git a/node_modules/@octokit/endpoint/LICENSE b/node_modules/@octokit/endpoint/LICENSE new file mode 100644 index 0000000..af5366d --- /dev/null +++ b/node_modules/@octokit/endpoint/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@octokit/endpoint/README.md b/node_modules/@octokit/endpoint/README.md new file mode 100644 index 0000000..1e5153f --- /dev/null +++ b/node_modules/@octokit/endpoint/README.md @@ -0,0 +1,421 @@ +# endpoint.js + +> Turns GitHub REST API endpoints into generic request options + +[![@latest](https://img.shields.io/npm/v/@octokit/endpoint.svg)](https://www.npmjs.com/package/@octokit/endpoint) +![Build Status](https://github.com/octokit/endpoint.js/workflows/Test/badge.svg) + +`@octokit/endpoint` combines [GitHub REST API routes](https://developer.github.com/v3/) with your parameters and turns them into generic request options that can be used in any request library. + + + + + +- [Usage](#usage) +- [API](#api) + - [`endpoint(route, options)` or `endpoint(options)`](#endpointroute-options-or-endpointoptions) + - [`endpoint.defaults()`](#endpointdefaults) + - [`endpoint.DEFAULTS`](#endpointdefaults) + - [`endpoint.merge(route, options)` or `endpoint.merge(options)`](#endpointmergeroute-options-or-endpointmergeoptions) + - [`endpoint.parse()`](#endpointparse) +- [Special cases](#special-cases) + - [The `data` parameter – set request body directly](#the-data-parameter-%E2%80%93-set-request-body-directly) + - [Set parameters for both the URL/query and the request body](#set-parameters-for-both-the-urlquery-and-the-request-body) +- [LICENSE](#license) + + + +## Usage + + + + + + +
+Browsers + +Load @octokit/endpoint directly from cdn.skypack.dev + +```html + +``` + +
+Node + + +Install with npm install @octokit/endpoint + +```js +const { endpoint } = require("@octokit/endpoint"); +// or: import { endpoint } from "@octokit/endpoint"; +``` + +
+ +Example for [List organization repositories](https://developer.github.com/v3/repos/#list-organization-repositories) + +```js +const requestOptions = endpoint("GET /orgs/{org}/repos", { + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, + org: "octokit", + type: "private", +}); +``` + +The resulting `requestOptions` looks as follows + +```json +{ + "method": "GET", + "url": "https://api.github.com/orgs/octokit/repos?type=private", + "headers": { + "accept": "application/vnd.github.v3+json", + "authorization": "token 0000000000000000000000000000000000000001", + "user-agent": "octokit/endpoint.js v1.2.3" + } +} +``` + +You can pass `requestOptions` to common request libraries + +```js +const { url, ...options } = requestOptions; +// using with fetch (https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) +fetch(url, options); +// using with request (https://github.com/request/request) +request(requestOptions); +// using with got (https://github.com/sindresorhus/got) +got[options.method](url, options); +// using with axios +axios(requestOptions); +``` + +## API + +### `endpoint(route, options)` or `endpoint(options)` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ route + + String + + If set, it has to be a string consisting of URL and the request method, e.g., GET /orgs/{org}. If it’s set to a URL, only the method defaults to GET. +
+ options.method + + String + + Required unless route is set. Any supported http verb. Defaults to GET. +
+ options.url + + String + + Required unless route is set. A path or full URL which may contain :variable or {variable} placeholders, + e.g., /orgs/{org}/repos. The url is parsed using url-template. +
+ options.baseUrl + + String + + Defaults to https://api.github.com. +
+ options.headers + + Object + + Custom headers. Passed headers are merged with defaults:
+ headers['user-agent'] defaults to octokit-endpoint.js/1.2.3 (where 1.2.3 is the released version).
+ headers['accept'] defaults to application/vnd.github.v3+json.
+
+ options.mediaType.format + + String + + Media type param, such as raw, diff, or text+json. See Media Types. Setting options.mediaType.format will amend the headers.accept value. +
+ options.mediaType.previews + + Array of Strings + + Name of previews, such as mercy, symmetra, or scarlet-witch. See API Previews. If options.mediaType.previews was set as default, the new previews will be merged into the default ones. Setting options.mediaType.previews will amend the headers.accept value. options.mediaType.previews will be merged with an existing array set using .defaults(). +
+ options.data + + Any + + Set request body directly instead of setting it to JSON based on additional parameters. See "The data parameter" below. +
+ options.request + + Object + + Pass custom meta information for the request. The request object will be returned as is. +
+ +All other options will be passed depending on the `method` and `url` options. + +1. If the option key has a placeholder in the `url`, it will be used as the replacement. For example, if the passed options are `{url: '/orgs/{org}/repos', org: 'foo'}` the returned `options.url` is `https://api.github.com/orgs/foo/repos`. +2. If the `method` is `GET` or `HEAD`, the option is passed as a query parameter. +3. Otherwise, the parameter is passed in the request body as a JSON key. + +**Result** + +`endpoint()` is a synchronous method and returns an object with the following keys: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ key + + type + + description +
methodStringThe http method. Always lowercase.
urlStringThe url with placeholders replaced with passed parameters.
headersObjectAll header names are lowercased.
bodyAnyThe request body if one is present. Only for PATCH, POST, PUT, DELETE requests.
requestObjectRequest meta option, it will be returned as it was passed into endpoint()
+ +### `endpoint.defaults()` + +Override or set default options. Example: + +```js +const request = require("request"); +const myEndpoint = require("@octokit/endpoint").defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + authorization: `token 0000000000000000000000000000000000000001`, + }, + org: "my-project", + per_page: 100, +}); + +request(myEndpoint(`GET /orgs/{org}/repos`)); +``` + +You can call `.defaults()` again on the returned method, the defaults will cascade. + +```js +const myProjectEndpoint = endpoint.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + }, + org: "my-project", +}); +const myProjectEndpointWithAuth = myProjectEndpoint.defaults({ + headers: { + authorization: `token 0000000000000000000000000000000000000001`, + }, +}); +``` + +`myProjectEndpointWithAuth` now defaults the `baseUrl`, `headers['user-agent']`, +`org` and `headers['authorization']` on top of `headers['accept']` that is set +by the global default. + +### `endpoint.DEFAULTS` + +The current default options. + +```js +endpoint.DEFAULTS.baseUrl; // https://api.github.com +const myEndpoint = endpoint.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", +}); +myEndpoint.DEFAULTS.baseUrl; // https://github-enterprise.acme-inc.com/api/v3 +``` + +### `endpoint.merge(route, options)` or `endpoint.merge(options)` + +Get the defaulted endpoint options, but without parsing them into request options: + +```js +const myProjectEndpoint = endpoint.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + }, + org: "my-project", +}); +myProjectEndpoint.merge("GET /orgs/{org}/repos", { + headers: { + authorization: `token 0000000000000000000000000000000000000001`, + }, + org: "my-secret-project", + type: "private", +}); + +// { +// baseUrl: 'https://github-enterprise.acme-inc.com/api/v3', +// method: 'GET', +// url: '/orgs/{org}/repos', +// headers: { +// accept: 'application/vnd.github.v3+json', +// authorization: `token 0000000000000000000000000000000000000001`, +// 'user-agent': 'myApp/1.2.3' +// }, +// org: 'my-secret-project', +// type: 'private' +// } +``` + +### `endpoint.parse()` + +Stateless method to turn endpoint options into request options. Calling +`endpoint(options)` is the same as calling `endpoint.parse(endpoint.merge(options))`. + +## Special cases + + + +### The `data` parameter – set request body directly + +Some endpoints such as [Render a Markdown document in raw mode](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode) don’t have parameters that are sent as request body keys, instead, the request body needs to be set directly. In these cases, set the `data` parameter. + +```js +const options = endpoint("POST /markdown/raw", { + data: "Hello world github/linguist#1 **cool**, and #1!", + headers: { + accept: "text/html;charset=utf-8", + "content-type": "text/plain", + }, +}); + +// options is +// { +// method: 'post', +// url: 'https://api.github.com/markdown/raw', +// headers: { +// accept: 'text/html;charset=utf-8', +// 'content-type': 'text/plain', +// 'user-agent': userAgent +// }, +// body: 'Hello world github/linguist#1 **cool**, and #1!' +// } +``` + +### Set parameters for both the URL/query and the request body + +There are API endpoints that accept both query parameters as well as a body. In that case, you need to add the query parameters as templates to `options.url`, as defined in the [RFC 6570 URI Template specification](https://tools.ietf.org/html/rfc6570). + +Example + +```js +endpoint( + "POST https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}", + { + name: "example.zip", + label: "short description", + headers: { + "content-type": "text/plain", + "content-length": 14, + authorization: `token 0000000000000000000000000000000000000001`, + }, + data: "Hello, world!", + } +); +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/node_modules/@octokit/endpoint/dist-node/index.js b/node_modules/@octokit/endpoint/dist-node/index.js new file mode 100644 index 0000000..70f24ff --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-node/index.js @@ -0,0 +1,390 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var isPlainObject = require('is-plain-object'); +var universalUserAgent = require('universal-user-agent'); + +function lowercaseKeys(object) { + if (!object) { + return {}; + } + + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject.isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); + } + }); + return result; +} + +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === undefined) { + delete obj[key]; + } + } + + return obj; +} + +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates + + + options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging + + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} + +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + + if (names.length === 0) { + return url; + } + + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +const urlVariableRegex = /\{[^}]+\}/g; + +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} + +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + + if (!matches) { + return []; + } + + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + + return part; + }).join(""); +} + +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} + +function isDefined(value) { + return value !== undefined && value !== null; +} + +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} + +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; + + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + + return result; +} + +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} + +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + + if (operator && operator !== "+") { + var separator = ","; + + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + }); +} + +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible + + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later + + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + + if (!isBinaryRequest) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + } + + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + + + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } else { + headers["content-length"] = 0; + } + } + } // default content-type for JSON if body is set + + + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string + + + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present + + + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); +} + +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); +} + +const VERSION = "6.0.12"; + +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. + +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; + +const endpoint = withDefaults(null, DEFAULTS); + +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/endpoint/dist-node/index.js.map b/node_modules/@octokit/endpoint/dist-node/index.js.map new file mode 100644 index 0000000..003e4f2 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/util/lowercase-keys.js","../dist-src/util/merge-deep.js","../dist-src/util/remove-undefined-properties.js","../dist-src/merge.js","../dist-src/util/add-query-parameters.js","../dist-src/util/extract-url-variable-names.js","../dist-src/util/omit.js","../dist-src/util/url-template.js","../dist-src/parse.js","../dist-src/endpoint-with-defaults.js","../dist-src/with-defaults.js","../dist-src/version.js","../dist-src/defaults.js","../dist-src/index.js"],"sourcesContent":["export function lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n","import { isPlainObject } from \"is-plain-object\";\nexport function mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n }\n else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n","export function removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n return obj;\n}\n","import { lowercaseKeys } from \"./util/lowercase-keys\";\nimport { mergeDeep } from \"./util/merge-deep\";\nimport { removeUndefinedProperties } from \"./util/remove-undefined-properties\";\nexport function merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n }\n else {\n options = Object.assign({}, route);\n }\n // lowercase header names before merging with defaults to avoid duplicates\n options.headers = lowercaseKeys(options.headers);\n // remove properties with undefined values before merging\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n // mediaType.previews arrays are merged, instead of overwritten\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews\n .filter((preview) => !mergedOptions.mediaType.previews.includes(preview))\n .concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map((preview) => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n","export function addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return (url +\n separator +\n names\n .map((name) => {\n if (name === \"q\") {\n return (\"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\"));\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n })\n .join(\"&\"));\n}\n","const urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nexport function extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n","export function omit(object, keysToOmit) {\n return Object.keys(object)\n .filter((option) => !keysToOmit.includes(option))\n .reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n","// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str\n .split(/(%[0-9A-Fa-f]{2})/g)\n .map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n })\n .join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value =\n operator === \"+\" || operator === \"#\"\n ? encodeReserved(value)\n : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n }\n else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" ||\n typeof value === \"number\" ||\n typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n }\n else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n }\n else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n }\n else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n }\n else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n }\n else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n }\n else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nexport function parseUrl(template) {\n return {\n expand: expand.bind(null, template),\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n }\n else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n }\n else {\n return values.join(\",\");\n }\n }\n else {\n return encodeReserved(literal);\n }\n });\n}\n","import { addQueryParameters } from \"./util/add-query-parameters\";\nimport { extractUrlVariableNames } from \"./util/extract-url-variable-names\";\nimport { omit } from \"./util/omit\";\nimport { parseUrl } from \"./util/url-template\";\nexport function parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase();\n // replace :varname with {varname} to make it RFC 6570 compatible\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\",\n ]);\n // extract variable names from URL to calculate remaining variables later\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options)\n .filter((option) => urlVariableNames.includes(option))\n .concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept\n .split(/,/)\n .map((preview) => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`))\n .join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader\n .concat(options.mediaType.previews)\n .map((preview) => {\n const format = options.mediaType.format\n ? `.${options.mediaType.format}`\n : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n })\n .join(\",\");\n }\n }\n // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n }\n else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n }\n else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n else {\n headers[\"content-length\"] = 0;\n }\n }\n }\n // default content-type for JSON if body is set\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n // Only return body/request keys if present\n return Object.assign({ method, url, headers }, typeof body !== \"undefined\" ? { body } : null, options.request ? { request: options.request } : null);\n}\n","import { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n","import { endpointWithDefaults } from \"./endpoint-with-defaults\";\nimport { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse,\n });\n}\n","export const VERSION = \"6.0.12\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nconst userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`;\n// DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\nexport const DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent,\n },\n mediaType: {\n format: \"\",\n previews: [],\n },\n};\n","import { withDefaults } from \"./with-defaults\";\nimport { DEFAULTS } from \"./defaults\";\nexport const endpoint = withDefaults(null, DEFAULTS);\n"],"names":["lowercaseKeys","object","Object","keys","reduce","newObj","key","toLowerCase","mergeDeep","defaults","options","result","assign","forEach","isPlainObject","removeUndefinedProperties","obj","undefined","merge","route","method","url","split","headers","mergedOptions","mediaType","previews","length","filter","preview","includes","concat","map","replace","addQueryParameters","parameters","separator","test","names","name","q","encodeURIComponent","join","urlVariableRegex","removeNonChars","variableName","extractUrlVariableNames","matches","match","a","b","omit","keysToOmit","option","encodeReserved","str","part","encodeURI","encodeUnreserved","c","charCodeAt","toString","toUpperCase","encodeValue","operator","value","isDefined","isKeyOperator","getValues","context","modifier","substring","parseInt","push","Array","isArray","k","tmp","parseUrl","template","expand","bind","operators","_","expression","literal","values","indexOf","charAt","substr","variable","exec","parse","body","urlVariableNames","baseUrl","omittedParameters","remainingParameters","isBinaryRequest","accept","format","previewsFromAcceptHeader","data","request","endpointWithDefaults","withDefaults","oldDefaults","newDefaults","DEFAULTS","endpoint","VERSION","userAgent","getUserAgent"],"mappings":";;;;;;;AAAO,SAASA,aAAT,CAAuBC,MAAvB,EAA+B;AAClC,MAAI,CAACA,MAAL,EAAa;AACT,WAAO,EAAP;AACH;;AACD,SAAOC,MAAM,CAACC,IAAP,CAAYF,MAAZ,EAAoBG,MAApB,CAA2B,CAACC,MAAD,EAASC,GAAT,KAAiB;AAC/CD,IAAAA,MAAM,CAACC,GAAG,CAACC,WAAJ,EAAD,CAAN,GAA4BN,MAAM,CAACK,GAAD,CAAlC;AACA,WAAOD,MAAP;AACH,GAHM,EAGJ,EAHI,CAAP;AAIH;;ACPM,SAASG,SAAT,CAAmBC,QAAnB,EAA6BC,OAA7B,EAAsC;AACzC,QAAMC,MAAM,GAAGT,MAAM,CAACU,MAAP,CAAc,EAAd,EAAkBH,QAAlB,CAAf;AACAP,EAAAA,MAAM,CAACC,IAAP,CAAYO,OAAZ,EAAqBG,OAArB,CAA8BP,GAAD,IAAS;AAClC,QAAIQ,2BAAa,CAACJ,OAAO,CAACJ,GAAD,CAAR,CAAjB,EAAiC;AAC7B,UAAI,EAAEA,GAAG,IAAIG,QAAT,CAAJ,EACIP,MAAM,CAACU,MAAP,CAAcD,MAAd,EAAsB;AAAE,SAACL,GAAD,GAAOI,OAAO,CAACJ,GAAD;AAAhB,OAAtB,EADJ,KAGIK,MAAM,CAACL,GAAD,CAAN,GAAcE,SAAS,CAACC,QAAQ,CAACH,GAAD,CAAT,EAAgBI,OAAO,CAACJ,GAAD,CAAvB,CAAvB;AACP,KALD,MAMK;AACDJ,MAAAA,MAAM,CAACU,MAAP,CAAcD,MAAd,EAAsB;AAAE,SAACL,GAAD,GAAOI,OAAO,CAACJ,GAAD;AAAhB,OAAtB;AACH;AACJ,GAVD;AAWA,SAAOK,MAAP;AACH;;ACfM,SAASI,yBAAT,CAAmCC,GAAnC,EAAwC;AAC3C,OAAK,MAAMV,GAAX,IAAkBU,GAAlB,EAAuB;AACnB,QAAIA,GAAG,CAACV,GAAD,CAAH,KAAaW,SAAjB,EAA4B;AACxB,aAAOD,GAAG,CAACV,GAAD,CAAV;AACH;AACJ;;AACD,SAAOU,GAAP;AACH;;ACJM,SAASE,KAAT,CAAeT,QAAf,EAAyBU,KAAzB,EAAgCT,OAAhC,EAAyC;AAC5C,MAAI,OAAOS,KAAP,KAAiB,QAArB,EAA+B;AAC3B,QAAI,CAACC,MAAD,EAASC,GAAT,IAAgBF,KAAK,CAACG,KAAN,CAAY,GAAZ,CAApB;AACAZ,IAAAA,OAAO,GAAGR,MAAM,CAACU,MAAP,CAAcS,GAAG,GAAG;AAAED,MAAAA,MAAF;AAAUC,MAAAA;AAAV,KAAH,GAAqB;AAAEA,MAAAA,GAAG,EAAED;AAAP,KAAtC,EAAuDV,OAAvD,CAAV;AACH,GAHD,MAIK;AACDA,IAAAA,OAAO,GAAGR,MAAM,CAACU,MAAP,CAAc,EAAd,EAAkBO,KAAlB,CAAV;AACH,GAP2C;;;AAS5CT,EAAAA,OAAO,CAACa,OAAR,GAAkBvB,aAAa,CAACU,OAAO,CAACa,OAAT,CAA/B,CAT4C;;AAW5CR,EAAAA,yBAAyB,CAACL,OAAD,CAAzB;AACAK,EAAAA,yBAAyB,CAACL,OAAO,CAACa,OAAT,CAAzB;AACA,QAAMC,aAAa,GAAGhB,SAAS,CAACC,QAAQ,IAAI,EAAb,EAAiBC,OAAjB,CAA/B,CAb4C;;AAe5C,MAAID,QAAQ,IAAIA,QAAQ,CAACgB,SAAT,CAAmBC,QAAnB,CAA4BC,MAA5C,EAAoD;AAChDH,IAAAA,aAAa,CAACC,SAAd,CAAwBC,QAAxB,GAAmCjB,QAAQ,CAACgB,SAAT,CAAmBC,QAAnB,CAC9BE,MAD8B,CACtBC,OAAD,IAAa,CAACL,aAAa,CAACC,SAAd,CAAwBC,QAAxB,CAAiCI,QAAjC,CAA0CD,OAA1C,CADS,EAE9BE,MAF8B,CAEvBP,aAAa,CAACC,SAAd,CAAwBC,QAFD,CAAnC;AAGH;;AACDF,EAAAA,aAAa,CAACC,SAAd,CAAwBC,QAAxB,GAAmCF,aAAa,CAACC,SAAd,CAAwBC,QAAxB,CAAiCM,GAAjC,CAAsCH,OAAD,IAAaA,OAAO,CAACI,OAAR,CAAgB,UAAhB,EAA4B,EAA5B,CAAlD,CAAnC;AACA,SAAOT,aAAP;AACH;;ACzBM,SAASU,kBAAT,CAA4Bb,GAA5B,EAAiCc,UAAjC,EAA6C;AAChD,QAAMC,SAAS,GAAG,KAAKC,IAAL,CAAUhB,GAAV,IAAiB,GAAjB,GAAuB,GAAzC;AACA,QAAMiB,KAAK,GAAGpC,MAAM,CAACC,IAAP,CAAYgC,UAAZ,CAAd;;AACA,MAAIG,KAAK,CAACX,MAAN,KAAiB,CAArB,EAAwB;AACpB,WAAON,GAAP;AACH;;AACD,SAAQA,GAAG,GACPe,SADI,GAEJE,KAAK,CACAN,GADL,CACUO,IAAD,IAAU;AACf,QAAIA,IAAI,KAAK,GAAb,EAAkB;AACd,aAAQ,OAAOJ,UAAU,CAACK,CAAX,CAAalB,KAAb,CAAmB,GAAnB,EAAwBU,GAAxB,CAA4BS,kBAA5B,EAAgDC,IAAhD,CAAqD,GAArD,CAAf;AACH;;AACD,WAAQ,GAAEH,IAAK,IAAGE,kBAAkB,CAACN,UAAU,CAACI,IAAD,CAAX,CAAmB,EAAvD;AACH,GAND,EAOKG,IAPL,CAOU,GAPV,CAFJ;AAUH;;AChBD,MAAMC,gBAAgB,GAAG,YAAzB;;AACA,SAASC,cAAT,CAAwBC,YAAxB,EAAsC;AAClC,SAAOA,YAAY,CAACZ,OAAb,CAAqB,YAArB,EAAmC,EAAnC,EAAuCX,KAAvC,CAA6C,GAA7C,CAAP;AACH;;AACD,AAAO,SAASwB,uBAAT,CAAiCzB,GAAjC,EAAsC;AACzC,QAAM0B,OAAO,GAAG1B,GAAG,CAAC2B,KAAJ,CAAUL,gBAAV,CAAhB;;AACA,MAAI,CAACI,OAAL,EAAc;AACV,WAAO,EAAP;AACH;;AACD,SAAOA,OAAO,CAACf,GAAR,CAAYY,cAAZ,EAA4BxC,MAA5B,CAAmC,CAAC6C,CAAD,EAAIC,CAAJ,KAAUD,CAAC,CAAClB,MAAF,CAASmB,CAAT,CAA7C,EAA0D,EAA1D,CAAP;AACH;;ACVM,SAASC,IAAT,CAAclD,MAAd,EAAsBmD,UAAtB,EAAkC;AACrC,SAAOlD,MAAM,CAACC,IAAP,CAAYF,MAAZ,EACF2B,MADE,CACMyB,MAAD,IAAY,CAACD,UAAU,CAACtB,QAAX,CAAoBuB,MAApB,CADlB,EAEFjD,MAFE,CAEK,CAACY,GAAD,EAAMV,GAAN,KAAc;AACtBU,IAAAA,GAAG,CAACV,GAAD,CAAH,GAAWL,MAAM,CAACK,GAAD,CAAjB;AACA,WAAOU,GAAP;AACH,GALM,EAKJ,EALI,CAAP;AAMH;;ACPD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AACA;AACA,SAASsC,cAAT,CAAwBC,GAAxB,EAA6B;AACzB,SAAOA,GAAG,CACLjC,KADE,CACI,oBADJ,EAEFU,GAFE,CAEE,UAAUwB,IAAV,EAAgB;AACrB,QAAI,CAAC,eAAenB,IAAf,CAAoBmB,IAApB,CAAL,EAAgC;AAC5BA,MAAAA,IAAI,GAAGC,SAAS,CAACD,IAAD,CAAT,CAAgBvB,OAAhB,CAAwB,MAAxB,EAAgC,GAAhC,EAAqCA,OAArC,CAA6C,MAA7C,EAAqD,GAArD,CAAP;AACH;;AACD,WAAOuB,IAAP;AACH,GAPM,EAQFd,IARE,CAQG,EARH,CAAP;AASH;;AACD,SAASgB,gBAAT,CAA0BH,GAA1B,EAA+B;AAC3B,SAAOd,kBAAkB,CAACc,GAAD,CAAlB,CAAwBtB,OAAxB,CAAgC,UAAhC,EAA4C,UAAU0B,CAAV,EAAa;AAC5D,WAAO,MAAMA,CAAC,CAACC,UAAF,CAAa,CAAb,EAAgBC,QAAhB,CAAyB,EAAzB,EAA6BC,WAA7B,EAAb;AACH,GAFM,CAAP;AAGH;;AACD,SAASC,WAAT,CAAqBC,QAArB,EAA+BC,KAA/B,EAAsC3D,GAAtC,EAA2C;AACvC2D,EAAAA,KAAK,GACDD,QAAQ,KAAK,GAAb,IAAoBA,QAAQ,KAAK,GAAjC,GACMV,cAAc,CAACW,KAAD,CADpB,GAEMP,gBAAgB,CAACO,KAAD,CAH1B;;AAIA,MAAI3D,GAAJ,EAAS;AACL,WAAOoD,gBAAgB,CAACpD,GAAD,CAAhB,GAAwB,GAAxB,GAA8B2D,KAArC;AACH,GAFD,MAGK;AACD,WAAOA,KAAP;AACH;AACJ;;AACD,SAASC,SAAT,CAAmBD,KAAnB,EAA0B;AACtB,SAAOA,KAAK,KAAKhD,SAAV,IAAuBgD,KAAK,KAAK,IAAxC;AACH;;AACD,SAASE,aAAT,CAAuBH,QAAvB,EAAiC;AAC7B,SAAOA,QAAQ,KAAK,GAAb,IAAoBA,QAAQ,KAAK,GAAjC,IAAwCA,QAAQ,KAAK,GAA5D;AACH;;AACD,SAASI,SAAT,CAAmBC,OAAnB,EAA4BL,QAA5B,EAAsC1D,GAAtC,EAA2CgE,QAA3C,EAAqD;AACjD,MAAIL,KAAK,GAAGI,OAAO,CAAC/D,GAAD,CAAnB;AAAA,MAA0BK,MAAM,GAAG,EAAnC;;AACA,MAAIuD,SAAS,CAACD,KAAD,CAAT,IAAoBA,KAAK,KAAK,EAAlC,EAAsC;AAClC,QAAI,OAAOA,KAAP,KAAiB,QAAjB,IACA,OAAOA,KAAP,KAAiB,QADjB,IAEA,OAAOA,KAAP,KAAiB,SAFrB,EAEgC;AAC5BA,MAAAA,KAAK,GAAGA,KAAK,CAACJ,QAAN,EAAR;;AACA,UAAIS,QAAQ,IAAIA,QAAQ,KAAK,GAA7B,EAAkC;AAC9BL,QAAAA,KAAK,GAAGA,KAAK,CAACM,SAAN,CAAgB,CAAhB,EAAmBC,QAAQ,CAACF,QAAD,EAAW,EAAX,CAA3B,CAAR;AACH;;AACD3D,MAAAA,MAAM,CAAC8D,IAAP,CAAYV,WAAW,CAACC,QAAD,EAAWC,KAAX,EAAkBE,aAAa,CAACH,QAAD,CAAb,GAA0B1D,GAA1B,GAAgC,EAAlD,CAAvB;AACH,KARD,MASK;AACD,UAAIgE,QAAQ,KAAK,GAAjB,EAAsB;AAClB,YAAII,KAAK,CAACC,OAAN,CAAcV,KAAd,CAAJ,EAA0B;AACtBA,UAAAA,KAAK,CAACrC,MAAN,CAAasC,SAAb,EAAwBrD,OAAxB,CAAgC,UAAUoD,KAAV,EAAiB;AAC7CtD,YAAAA,MAAM,CAAC8D,IAAP,CAAYV,WAAW,CAACC,QAAD,EAAWC,KAAX,EAAkBE,aAAa,CAACH,QAAD,CAAb,GAA0B1D,GAA1B,GAAgC,EAAlD,CAAvB;AACH,WAFD;AAGH,SAJD,MAKK;AACDJ,UAAAA,MAAM,CAACC,IAAP,CAAY8D,KAAZ,EAAmBpD,OAAnB,CAA2B,UAAU+D,CAAV,EAAa;AACpC,gBAAIV,SAAS,CAACD,KAAK,CAACW,CAAD,CAAN,CAAb,EAAyB;AACrBjE,cAAAA,MAAM,CAAC8D,IAAP,CAAYV,WAAW,CAACC,QAAD,EAAWC,KAAK,CAACW,CAAD,CAAhB,EAAqBA,CAArB,CAAvB;AACH;AACJ,WAJD;AAKH;AACJ,OAbD,MAcK;AACD,cAAMC,GAAG,GAAG,EAAZ;;AACA,YAAIH,KAAK,CAACC,OAAN,CAAcV,KAAd,CAAJ,EAA0B;AACtBA,UAAAA,KAAK,CAACrC,MAAN,CAAasC,SAAb,EAAwBrD,OAAxB,CAAgC,UAAUoD,KAAV,EAAiB;AAC7CY,YAAAA,GAAG,CAACJ,IAAJ,CAASV,WAAW,CAACC,QAAD,EAAWC,KAAX,CAApB;AACH,WAFD;AAGH,SAJD,MAKK;AACD/D,UAAAA,MAAM,CAACC,IAAP,CAAY8D,KAAZ,EAAmBpD,OAAnB,CAA2B,UAAU+D,CAAV,EAAa;AACpC,gBAAIV,SAAS,CAACD,KAAK,CAACW,CAAD,CAAN,CAAb,EAAyB;AACrBC,cAAAA,GAAG,CAACJ,IAAJ,CAASf,gBAAgB,CAACkB,CAAD,CAAzB;AACAC,cAAAA,GAAG,CAACJ,IAAJ,CAASV,WAAW,CAACC,QAAD,EAAWC,KAAK,CAACW,CAAD,CAAL,CAASf,QAAT,EAAX,CAApB;AACH;AACJ,WALD;AAMH;;AACD,YAAIM,aAAa,CAACH,QAAD,CAAjB,EAA6B;AACzBrD,UAAAA,MAAM,CAAC8D,IAAP,CAAYf,gBAAgB,CAACpD,GAAD,CAAhB,GAAwB,GAAxB,GAA8BuE,GAAG,CAACnC,IAAJ,CAAS,GAAT,CAA1C;AACH,SAFD,MAGK,IAAImC,GAAG,CAAClD,MAAJ,KAAe,CAAnB,EAAsB;AACvBhB,UAAAA,MAAM,CAAC8D,IAAP,CAAYI,GAAG,CAACnC,IAAJ,CAAS,GAAT,CAAZ;AACH;AACJ;AACJ;AACJ,GAhDD,MAiDK;AACD,QAAIsB,QAAQ,KAAK,GAAjB,EAAsB;AAClB,UAAIE,SAAS,CAACD,KAAD,CAAb,EAAsB;AAClBtD,QAAAA,MAAM,CAAC8D,IAAP,CAAYf,gBAAgB,CAACpD,GAAD,CAA5B;AACH;AACJ,KAJD,MAKK,IAAI2D,KAAK,KAAK,EAAV,KAAiBD,QAAQ,KAAK,GAAb,IAAoBA,QAAQ,KAAK,GAAlD,CAAJ,EAA4D;AAC7DrD,MAAAA,MAAM,CAAC8D,IAAP,CAAYf,gBAAgB,CAACpD,GAAD,CAAhB,GAAwB,GAApC;AACH,KAFI,MAGA,IAAI2D,KAAK,KAAK,EAAd,EAAkB;AACnBtD,MAAAA,MAAM,CAAC8D,IAAP,CAAY,EAAZ;AACH;AACJ;;AACD,SAAO9D,MAAP;AACH;;AACD,AAAO,SAASmE,QAAT,CAAkBC,QAAlB,EAA4B;AAC/B,SAAO;AACHC,IAAAA,MAAM,EAAEA,MAAM,CAACC,IAAP,CAAY,IAAZ,EAAkBF,QAAlB;AADL,GAAP;AAGH;;AACD,SAASC,MAAT,CAAgBD,QAAhB,EAA0BV,OAA1B,EAAmC;AAC/B,MAAIa,SAAS,GAAG,CAAC,GAAD,EAAM,GAAN,EAAW,GAAX,EAAgB,GAAhB,EAAqB,GAArB,EAA0B,GAA1B,EAA+B,GAA/B,CAAhB;AACA,SAAOH,QAAQ,CAAC9C,OAAT,CAAiB,4BAAjB,EAA+C,UAAUkD,CAAV,EAAaC,UAAb,EAAyBC,OAAzB,EAAkC;AACpF,QAAID,UAAJ,EAAgB;AACZ,UAAIpB,QAAQ,GAAG,EAAf;AACA,YAAMsB,MAAM,GAAG,EAAf;;AACA,UAAIJ,SAAS,CAACK,OAAV,CAAkBH,UAAU,CAACI,MAAX,CAAkB,CAAlB,CAAlB,MAA4C,CAAC,CAAjD,EAAoD;AAChDxB,QAAAA,QAAQ,GAAGoB,UAAU,CAACI,MAAX,CAAkB,CAAlB,CAAX;AACAJ,QAAAA,UAAU,GAAGA,UAAU,CAACK,MAAX,CAAkB,CAAlB,CAAb;AACH;;AACDL,MAAAA,UAAU,CAAC9D,KAAX,CAAiB,IAAjB,EAAuBT,OAAvB,CAA+B,UAAU6E,QAAV,EAAoB;AAC/C,YAAIb,GAAG,GAAG,4BAA4Bc,IAA5B,CAAiCD,QAAjC,CAAV;AACAJ,QAAAA,MAAM,CAACb,IAAP,CAAYL,SAAS,CAACC,OAAD,EAAUL,QAAV,EAAoBa,GAAG,CAAC,CAAD,CAAvB,EAA4BA,GAAG,CAAC,CAAD,CAAH,IAAUA,GAAG,CAAC,CAAD,CAAzC,CAArB;AACH,OAHD;;AAIA,UAAIb,QAAQ,IAAIA,QAAQ,KAAK,GAA7B,EAAkC;AAC9B,YAAI5B,SAAS,GAAG,GAAhB;;AACA,YAAI4B,QAAQ,KAAK,GAAjB,EAAsB;AAClB5B,UAAAA,SAAS,GAAG,GAAZ;AACH,SAFD,MAGK,IAAI4B,QAAQ,KAAK,GAAjB,EAAsB;AACvB5B,UAAAA,SAAS,GAAG4B,QAAZ;AACH;;AACD,eAAO,CAACsB,MAAM,CAAC3D,MAAP,KAAkB,CAAlB,GAAsBqC,QAAtB,GAAiC,EAAlC,IAAwCsB,MAAM,CAAC5C,IAAP,CAAYN,SAAZ,CAA/C;AACH,OATD,MAUK;AACD,eAAOkD,MAAM,CAAC5C,IAAP,CAAY,GAAZ,CAAP;AACH;AACJ,KAxBD,MAyBK;AACD,aAAOY,cAAc,CAAC+B,OAAD,CAArB;AACH;AACJ,GA7BM,CAAP;AA8BH;;AC/JM,SAASO,KAAT,CAAelF,OAAf,EAAwB;AAC3B;AACA,MAAIU,MAAM,GAAGV,OAAO,CAACU,MAAR,CAAe0C,WAAf,EAAb,CAF2B;;AAI3B,MAAIzC,GAAG,GAAG,CAACX,OAAO,CAACW,GAAR,IAAe,GAAhB,EAAqBY,OAArB,CAA6B,cAA7B,EAA6C,MAA7C,CAAV;AACA,MAAIV,OAAO,GAAGrB,MAAM,CAACU,MAAP,CAAc,EAAd,EAAkBF,OAAO,CAACa,OAA1B,CAAd;AACA,MAAIsE,IAAJ;AACA,MAAI1D,UAAU,GAAGgB,IAAI,CAACzC,OAAD,EAAU,CAC3B,QAD2B,EAE3B,SAF2B,EAG3B,KAH2B,EAI3B,SAJ2B,EAK3B,SAL2B,EAM3B,WAN2B,CAAV,CAArB,CAP2B;;AAgB3B,QAAMoF,gBAAgB,GAAGhD,uBAAuB,CAACzB,GAAD,CAAhD;AACAA,EAAAA,GAAG,GAAGyD,QAAQ,CAACzD,GAAD,CAAR,CAAc2D,MAAd,CAAqB7C,UAArB,CAAN;;AACA,MAAI,CAAC,QAAQE,IAAR,CAAahB,GAAb,CAAL,EAAwB;AACpBA,IAAAA,GAAG,GAAGX,OAAO,CAACqF,OAAR,GAAkB1E,GAAxB;AACH;;AACD,QAAM2E,iBAAiB,GAAG9F,MAAM,CAACC,IAAP,CAAYO,OAAZ,EACrBkB,MADqB,CACbyB,MAAD,IAAYyC,gBAAgB,CAAChE,QAAjB,CAA0BuB,MAA1B,CADE,EAErBtB,MAFqB,CAEd,SAFc,CAA1B;AAGA,QAAMkE,mBAAmB,GAAG9C,IAAI,CAAChB,UAAD,EAAa6D,iBAAb,CAAhC;AACA,QAAME,eAAe,GAAG,6BAA6B7D,IAA7B,CAAkCd,OAAO,CAAC4E,MAA1C,CAAxB;;AACA,MAAI,CAACD,eAAL,EAAsB;AAClB,QAAIxF,OAAO,CAACe,SAAR,CAAkB2E,MAAtB,EAA8B;AAC1B;AACA7E,MAAAA,OAAO,CAAC4E,MAAR,GAAiB5E,OAAO,CAAC4E,MAAR,CACZ7E,KADY,CACN,GADM,EAEZU,GAFY,CAEPH,OAAD,IAAaA,OAAO,CAACI,OAAR,CAAgB,kDAAhB,EAAqE,uBAAsBvB,OAAO,CAACe,SAAR,CAAkB2E,MAAO,EAApH,CAFL,EAGZ1D,IAHY,CAGP,GAHO,CAAjB;AAIH;;AACD,QAAIhC,OAAO,CAACe,SAAR,CAAkBC,QAAlB,CAA2BC,MAA/B,EAAuC;AACnC,YAAM0E,wBAAwB,GAAG9E,OAAO,CAAC4E,MAAR,CAAenD,KAAf,CAAqB,qBAArB,KAA+C,EAAhF;AACAzB,MAAAA,OAAO,CAAC4E,MAAR,GAAiBE,wBAAwB,CACpCtE,MADY,CACLrB,OAAO,CAACe,SAAR,CAAkBC,QADb,EAEZM,GAFY,CAEPH,OAAD,IAAa;AAClB,cAAMuE,MAAM,GAAG1F,OAAO,CAACe,SAAR,CAAkB2E,MAAlB,GACR,IAAG1F,OAAO,CAACe,SAAR,CAAkB2E,MAAO,EADpB,GAET,OAFN;AAGA,eAAQ,0BAAyBvE,OAAQ,WAAUuE,MAAO,EAA1D;AACH,OAPgB,EAQZ1D,IARY,CAQP,GARO,CAAjB;AASH;AACJ,GA9C0B;AAgD3B;;;AACA,MAAI,CAAC,KAAD,EAAQ,MAAR,EAAgBZ,QAAhB,CAAyBV,MAAzB,CAAJ,EAAsC;AAClCC,IAAAA,GAAG,GAAGa,kBAAkB,CAACb,GAAD,EAAM4E,mBAAN,CAAxB;AACH,GAFD,MAGK;AACD,QAAI,UAAUA,mBAAd,EAAmC;AAC/BJ,MAAAA,IAAI,GAAGI,mBAAmB,CAACK,IAA3B;AACH,KAFD,MAGK;AACD,UAAIpG,MAAM,CAACC,IAAP,CAAY8F,mBAAZ,EAAiCtE,MAArC,EAA6C;AACzCkE,QAAAA,IAAI,GAAGI,mBAAP;AACH,OAFD,MAGK;AACD1E,QAAAA,OAAO,CAAC,gBAAD,CAAP,GAA4B,CAA5B;AACH;AACJ;AACJ,GAhE0B;;;AAkE3B,MAAI,CAACA,OAAO,CAAC,cAAD,CAAR,IAA4B,OAAOsE,IAAP,KAAgB,WAAhD,EAA6D;AACzDtE,IAAAA,OAAO,CAAC,cAAD,CAAP,GAA0B,iCAA1B;AACH,GApE0B;AAsE3B;;;AACA,MAAI,CAAC,OAAD,EAAU,KAAV,EAAiBO,QAAjB,CAA0BV,MAA1B,KAAqC,OAAOyE,IAAP,KAAgB,WAAzD,EAAsE;AAClEA,IAAAA,IAAI,GAAG,EAAP;AACH,GAzE0B;;;AA2E3B,SAAO3F,MAAM,CAACU,MAAP,CAAc;AAAEQ,IAAAA,MAAF;AAAUC,IAAAA,GAAV;AAAeE,IAAAA;AAAf,GAAd,EAAwC,OAAOsE,IAAP,KAAgB,WAAhB,GAA8B;AAAEA,IAAAA;AAAF,GAA9B,GAAyC,IAAjF,EAAuFnF,OAAO,CAAC6F,OAAR,GAAkB;AAAEA,IAAAA,OAAO,EAAE7F,OAAO,CAAC6F;AAAnB,GAAlB,GAAiD,IAAxI,CAAP;AACH;;AC9EM,SAASC,oBAAT,CAA8B/F,QAA9B,EAAwCU,KAAxC,EAA+CT,OAA/C,EAAwD;AAC3D,SAAOkF,KAAK,CAAC1E,KAAK,CAACT,QAAD,EAAWU,KAAX,EAAkBT,OAAlB,CAAN,CAAZ;AACH;;ACDM,SAAS+F,YAAT,CAAsBC,WAAtB,EAAmCC,WAAnC,EAAgD;AACnD,QAAMC,QAAQ,GAAG1F,KAAK,CAACwF,WAAD,EAAcC,WAAd,CAAtB;AACA,QAAME,QAAQ,GAAGL,oBAAoB,CAACvB,IAArB,CAA0B,IAA1B,EAAgC2B,QAAhC,CAAjB;AACA,SAAO1G,MAAM,CAACU,MAAP,CAAciG,QAAd,EAAwB;AAC3BD,IAAAA,QAD2B;AAE3BnG,IAAAA,QAAQ,EAAEgG,YAAY,CAACxB,IAAb,CAAkB,IAAlB,EAAwB2B,QAAxB,CAFiB;AAG3B1F,IAAAA,KAAK,EAAEA,KAAK,CAAC+D,IAAN,CAAW,IAAX,EAAiB2B,QAAjB,CAHoB;AAI3BhB,IAAAA;AAJ2B,GAAxB,CAAP;AAMH;;ACZM,MAAMkB,OAAO,GAAG,mBAAhB;;ACEP,MAAMC,SAAS,GAAI,uBAAsBD,OAAQ,IAAGE,+BAAY,EAAG,EAAnE;AAEA;;AACA,AAAO,MAAMJ,QAAQ,GAAG;AACpBxF,EAAAA,MAAM,EAAE,KADY;AAEpB2E,EAAAA,OAAO,EAAE,wBAFW;AAGpBxE,EAAAA,OAAO,EAAE;AACL4E,IAAAA,MAAM,EAAE,gCADH;AAEL,kBAAcY;AAFT,GAHW;AAOpBtF,EAAAA,SAAS,EAAE;AACP2E,IAAAA,MAAM,EAAE,EADD;AAEP1E,IAAAA,QAAQ,EAAE;AAFH;AAPS,CAAjB;;MCHMmF,QAAQ,GAAGJ,YAAY,CAAC,IAAD,EAAOG,QAAP,CAA7B;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/endpoint/dist-src/defaults.js b/node_modules/@octokit/endpoint/dist-src/defaults.js new file mode 100644 index 0000000..456e586 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/defaults.js @@ -0,0 +1,17 @@ +import { getUserAgent } from "universal-user-agent"; +import { VERSION } from "./version"; +const userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`; +// DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. +export const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent, + }, + mediaType: { + format: "", + previews: [], + }, +}; diff --git a/node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js b/node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js new file mode 100644 index 0000000..5763758 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js @@ -0,0 +1,5 @@ +import { merge } from "./merge"; +import { parse } from "./parse"; +export function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} diff --git a/node_modules/@octokit/endpoint/dist-src/index.js b/node_modules/@octokit/endpoint/dist-src/index.js new file mode 100644 index 0000000..599917f --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/index.js @@ -0,0 +1,3 @@ +import { withDefaults } from "./with-defaults"; +import { DEFAULTS } from "./defaults"; +export const endpoint = withDefaults(null, DEFAULTS); diff --git a/node_modules/@octokit/endpoint/dist-src/merge.js b/node_modules/@octokit/endpoint/dist-src/merge.js new file mode 100644 index 0000000..1abcecf --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/merge.js @@ -0,0 +1,26 @@ +import { lowercaseKeys } from "./util/lowercase-keys"; +import { mergeDeep } from "./util/merge-deep"; +import { removeUndefinedProperties } from "./util/remove-undefined-properties"; +export function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } + else { + options = Object.assign({}, route); + } + // lowercase header names before merging with defaults to avoid duplicates + options.headers = lowercaseKeys(options.headers); + // remove properties with undefined values before merging + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + // mediaType.previews arrays are merged, instead of overwritten + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews + .filter((preview) => !mergedOptions.mediaType.previews.includes(preview)) + .concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map((preview) => preview.replace(/-preview/, "")); + return mergedOptions; +} diff --git a/node_modules/@octokit/endpoint/dist-src/parse.js b/node_modules/@octokit/endpoint/dist-src/parse.js new file mode 100644 index 0000000..6bdd1bf --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/parse.js @@ -0,0 +1,81 @@ +import { addQueryParameters } from "./util/add-query-parameters"; +import { extractUrlVariableNames } from "./util/extract-url-variable-names"; +import { omit } from "./util/omit"; +import { parseUrl } from "./util/url-template"; +export function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); + // replace :varname with {varname} to make it RFC 6570 compatible + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType", + ]); + // extract variable names from URL to calculate remaining variables later + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options) + .filter((option) => urlVariableNames.includes(option)) + .concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept + .split(/,/) + .map((preview) => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)) + .join(","); + } + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader + .concat(options.mediaType.previews) + .map((preview) => { + const format = options.mediaType.format + ? `.${options.mediaType.format}` + : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }) + .join(","); + } + } + // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } + else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } + else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + else { + headers["content-length"] = 0; + } + } + } + // default content-type for JSON if body is set + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + // Only return body/request keys if present + return Object.assign({ method, url, headers }, typeof body !== "undefined" ? { body } : null, options.request ? { request: options.request } : null); +} diff --git a/node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js b/node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js new file mode 100644 index 0000000..d26be31 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js @@ -0,0 +1,17 @@ +export function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return (url + + separator + + names + .map((name) => { + if (name === "q") { + return ("q=" + parameters.q.split("+").map(encodeURIComponent).join("+")); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }) + .join("&")); +} diff --git a/node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js b/node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js new file mode 100644 index 0000000..3e75db2 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js @@ -0,0 +1,11 @@ +const urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +export function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} diff --git a/node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js b/node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js new file mode 100644 index 0000000..0780642 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js @@ -0,0 +1,9 @@ +export function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} diff --git a/node_modules/@octokit/endpoint/dist-src/util/merge-deep.js b/node_modules/@octokit/endpoint/dist-src/util/merge-deep.js new file mode 100644 index 0000000..d92aca3 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/util/merge-deep.js @@ -0,0 +1,16 @@ +import { isPlainObject } from "is-plain-object"; +export function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } + else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} diff --git a/node_modules/@octokit/endpoint/dist-src/util/omit.js b/node_modules/@octokit/endpoint/dist-src/util/omit.js new file mode 100644 index 0000000..6245031 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/util/omit.js @@ -0,0 +1,8 @@ +export function omit(object, keysToOmit) { + return Object.keys(object) + .filter((option) => !keysToOmit.includes(option)) + .reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} diff --git a/node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js b/node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js new file mode 100644 index 0000000..6b5ee5f --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js @@ -0,0 +1,8 @@ +export function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === undefined) { + delete obj[key]; + } + } + return obj; +} diff --git a/node_modules/@octokit/endpoint/dist-src/util/url-template.js b/node_modules/@octokit/endpoint/dist-src/util/url-template.js new file mode 100644 index 0000000..439b3fe --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/util/url-template.js @@ -0,0 +1,164 @@ +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +/* istanbul ignore file */ +function encodeReserved(str) { + return str + .split(/(%[0-9A-Fa-f]{2})/g) + .map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }) + .join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = + operator === "+" || operator === "#" + ? encodeReserved(value) + : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } + else { + return value; + } +} +function isDefined(value) { + return value !== undefined && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || + typeof value === "number" || + typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } + else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } + else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } + else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } + else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } + else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } + else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } + else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } + else if (value === "") { + result.push(""); + } + } + return result; +} +export function parseUrl(template) { + return { + expand: expand.bind(null, template), + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } + else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } + else { + return values.join(","); + } + } + else { + return encodeReserved(literal); + } + }); +} diff --git a/node_modules/@octokit/endpoint/dist-src/version.js b/node_modules/@octokit/endpoint/dist-src/version.js new file mode 100644 index 0000000..930e255 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/version.js @@ -0,0 +1 @@ +export const VERSION = "6.0.12"; diff --git a/node_modules/@octokit/endpoint/dist-src/with-defaults.js b/node_modules/@octokit/endpoint/dist-src/with-defaults.js new file mode 100644 index 0000000..81baf6c --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-src/with-defaults.js @@ -0,0 +1,13 @@ +import { endpointWithDefaults } from "./endpoint-with-defaults"; +import { merge } from "./merge"; +import { parse } from "./parse"; +export function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse, + }); +} diff --git a/node_modules/@octokit/endpoint/dist-types/defaults.d.ts b/node_modules/@octokit/endpoint/dist-types/defaults.d.ts new file mode 100644 index 0000000..30fcd20 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/defaults.d.ts @@ -0,0 +1,2 @@ +import { EndpointDefaults } from "@octokit/types"; +export declare const DEFAULTS: EndpointDefaults; diff --git a/node_modules/@octokit/endpoint/dist-types/endpoint-with-defaults.d.ts b/node_modules/@octokit/endpoint/dist-types/endpoint-with-defaults.d.ts new file mode 100644 index 0000000..ff39e5e --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/endpoint-with-defaults.d.ts @@ -0,0 +1,3 @@ +import { EndpointOptions, RequestParameters, Route } from "@octokit/types"; +import { DEFAULTS } from "./defaults"; +export declare function endpointWithDefaults(defaults: typeof DEFAULTS, route: Route | EndpointOptions, options?: RequestParameters): import("@octokit/types").RequestOptions; diff --git a/node_modules/@octokit/endpoint/dist-types/index.d.ts b/node_modules/@octokit/endpoint/dist-types/index.d.ts new file mode 100644 index 0000000..1ede136 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/index.d.ts @@ -0,0 +1 @@ +export declare const endpoint: import("@octokit/types").EndpointInterface; diff --git a/node_modules/@octokit/endpoint/dist-types/merge.d.ts b/node_modules/@octokit/endpoint/dist-types/merge.d.ts new file mode 100644 index 0000000..b75a15e --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/merge.d.ts @@ -0,0 +1,2 @@ +import { EndpointDefaults, RequestParameters, Route } from "@octokit/types"; +export declare function merge(defaults: EndpointDefaults | null, route?: Route | RequestParameters, options?: RequestParameters): EndpointDefaults; diff --git a/node_modules/@octokit/endpoint/dist-types/parse.d.ts b/node_modules/@octokit/endpoint/dist-types/parse.d.ts new file mode 100644 index 0000000..fbe2144 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/parse.d.ts @@ -0,0 +1,2 @@ +import { EndpointDefaults, RequestOptions } from "@octokit/types"; +export declare function parse(options: EndpointDefaults): RequestOptions; diff --git a/node_modules/@octokit/endpoint/dist-types/util/add-query-parameters.d.ts b/node_modules/@octokit/endpoint/dist-types/util/add-query-parameters.d.ts new file mode 100644 index 0000000..4b192ac --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/util/add-query-parameters.d.ts @@ -0,0 +1,4 @@ +export declare function addQueryParameters(url: string, parameters: { + [x: string]: string | undefined; + q?: string; +}): string; diff --git a/node_modules/@octokit/endpoint/dist-types/util/extract-url-variable-names.d.ts b/node_modules/@octokit/endpoint/dist-types/util/extract-url-variable-names.d.ts new file mode 100644 index 0000000..93586d4 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/util/extract-url-variable-names.d.ts @@ -0,0 +1 @@ +export declare function extractUrlVariableNames(url: string): string[]; diff --git a/node_modules/@octokit/endpoint/dist-types/util/lowercase-keys.d.ts b/node_modules/@octokit/endpoint/dist-types/util/lowercase-keys.d.ts new file mode 100644 index 0000000..1daf307 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/util/lowercase-keys.d.ts @@ -0,0 +1,5 @@ +export declare function lowercaseKeys(object?: { + [key: string]: any; +}): { + [key: string]: any; +}; diff --git a/node_modules/@octokit/endpoint/dist-types/util/merge-deep.d.ts b/node_modules/@octokit/endpoint/dist-types/util/merge-deep.d.ts new file mode 100644 index 0000000..914411c --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/util/merge-deep.d.ts @@ -0,0 +1 @@ +export declare function mergeDeep(defaults: any, options: any): object; diff --git a/node_modules/@octokit/endpoint/dist-types/util/omit.d.ts b/node_modules/@octokit/endpoint/dist-types/util/omit.d.ts new file mode 100644 index 0000000..06927d6 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/util/omit.d.ts @@ -0,0 +1,5 @@ +export declare function omit(object: { + [key: string]: any; +}, keysToOmit: string[]): { + [key: string]: any; +}; diff --git a/node_modules/@octokit/endpoint/dist-types/util/remove-undefined-properties.d.ts b/node_modules/@octokit/endpoint/dist-types/util/remove-undefined-properties.d.ts new file mode 100644 index 0000000..92d8d85 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/util/remove-undefined-properties.d.ts @@ -0,0 +1 @@ +export declare function removeUndefinedProperties(obj: any): any; diff --git a/node_modules/@octokit/endpoint/dist-types/util/url-template.d.ts b/node_modules/@octokit/endpoint/dist-types/util/url-template.d.ts new file mode 100644 index 0000000..5d967ca --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/util/url-template.d.ts @@ -0,0 +1,3 @@ +export declare function parseUrl(template: string): { + expand: (context: object) => string; +}; diff --git a/node_modules/@octokit/endpoint/dist-types/version.d.ts b/node_modules/@octokit/endpoint/dist-types/version.d.ts new file mode 100644 index 0000000..330d47a --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "6.0.12"; diff --git a/node_modules/@octokit/endpoint/dist-types/with-defaults.d.ts b/node_modules/@octokit/endpoint/dist-types/with-defaults.d.ts new file mode 100644 index 0000000..6f5afd1 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-types/with-defaults.d.ts @@ -0,0 +1,2 @@ +import { EndpointInterface, RequestParameters, EndpointDefaults } from "@octokit/types"; +export declare function withDefaults(oldDefaults: EndpointDefaults | null, newDefaults: RequestParameters): EndpointInterface; diff --git a/node_modules/@octokit/endpoint/dist-web/index.js b/node_modules/@octokit/endpoint/dist-web/index.js new file mode 100644 index 0000000..e152163 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-web/index.js @@ -0,0 +1,381 @@ +import { isPlainObject } from 'is-plain-object'; +import { getUserAgent } from 'universal-user-agent'; + +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } + else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} + +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === undefined) { + delete obj[key]; + } + } + return obj; +} + +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } + else { + options = Object.assign({}, route); + } + // lowercase header names before merging with defaults to avoid duplicates + options.headers = lowercaseKeys(options.headers); + // remove properties with undefined values before merging + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + // mediaType.previews arrays are merged, instead of overwritten + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews + .filter((preview) => !mergedOptions.mediaType.previews.includes(preview)) + .concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map((preview) => preview.replace(/-preview/, "")); + return mergedOptions; +} + +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return (url + + separator + + names + .map((name) => { + if (name === "q") { + return ("q=" + parameters.q.split("+").map(encodeURIComponent).join("+")); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }) + .join("&")); +} + +const urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +function omit(object, keysToOmit) { + return Object.keys(object) + .filter((option) => !keysToOmit.includes(option)) + .reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +/* istanbul ignore file */ +function encodeReserved(str) { + return str + .split(/(%[0-9A-Fa-f]{2})/g) + .map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }) + .join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = + operator === "+" || operator === "#" + ? encodeReserved(value) + : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } + else { + return value; + } +} +function isDefined(value) { + return value !== undefined && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || + typeof value === "number" || + typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } + else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } + else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } + else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } + else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } + else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } + else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } + else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } + else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template), + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } + else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } + else { + return values.join(","); + } + } + else { + return encodeReserved(literal); + } + }); +} + +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); + // replace :varname with {varname} to make it RFC 6570 compatible + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType", + ]); + // extract variable names from URL to calculate remaining variables later + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options) + .filter((option) => urlVariableNames.includes(option)) + .concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept + .split(/,/) + .map((preview) => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)) + .join(","); + } + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader + .concat(options.mediaType.previews) + .map((preview) => { + const format = options.mediaType.format + ? `.${options.mediaType.format}` + : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }) + .join(","); + } + } + // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } + else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } + else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + else { + headers["content-length"] = 0; + } + } + } + // default content-type for JSON if body is set + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + // Only return body/request keys if present + return Object.assign({ method, url, headers }, typeof body !== "undefined" ? { body } : null, options.request ? { request: options.request } : null); +} + +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse, + }); +} + +const VERSION = "6.0.12"; + +const userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`; +// DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent, + }, + mediaType: { + format: "", + previews: [], + }, +}; + +const endpoint = withDefaults(null, DEFAULTS); + +export { endpoint }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/endpoint/dist-web/index.js.map b/node_modules/@octokit/endpoint/dist-web/index.js.map new file mode 100644 index 0000000..1d60d02 --- /dev/null +++ b/node_modules/@octokit/endpoint/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/util/lowercase-keys.js","../dist-src/util/merge-deep.js","../dist-src/util/remove-undefined-properties.js","../dist-src/merge.js","../dist-src/util/add-query-parameters.js","../dist-src/util/extract-url-variable-names.js","../dist-src/util/omit.js","../dist-src/util/url-template.js","../dist-src/parse.js","../dist-src/endpoint-with-defaults.js","../dist-src/with-defaults.js","../dist-src/version.js","../dist-src/defaults.js","../dist-src/index.js"],"sourcesContent":["export function lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n","import { isPlainObject } from \"is-plain-object\";\nexport function mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n }\n else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n","export function removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n return obj;\n}\n","import { lowercaseKeys } from \"./util/lowercase-keys\";\nimport { mergeDeep } from \"./util/merge-deep\";\nimport { removeUndefinedProperties } from \"./util/remove-undefined-properties\";\nexport function merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n }\n else {\n options = Object.assign({}, route);\n }\n // lowercase header names before merging with defaults to avoid duplicates\n options.headers = lowercaseKeys(options.headers);\n // remove properties with undefined values before merging\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n // mediaType.previews arrays are merged, instead of overwritten\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews\n .filter((preview) => !mergedOptions.mediaType.previews.includes(preview))\n .concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map((preview) => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n","export function addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return (url +\n separator +\n names\n .map((name) => {\n if (name === \"q\") {\n return (\"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\"));\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n })\n .join(\"&\"));\n}\n","const urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nexport function extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n","export function omit(object, keysToOmit) {\n return Object.keys(object)\n .filter((option) => !keysToOmit.includes(option))\n .reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n","// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str\n .split(/(%[0-9A-Fa-f]{2})/g)\n .map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n })\n .join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value =\n operator === \"+\" || operator === \"#\"\n ? encodeReserved(value)\n : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n }\n else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" ||\n typeof value === \"number\" ||\n typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n }\n else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n }\n else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n }\n else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n }\n else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n }\n else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n }\n else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nexport function parseUrl(template) {\n return {\n expand: expand.bind(null, template),\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n }\n else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n }\n else {\n return values.join(\",\");\n }\n }\n else {\n return encodeReserved(literal);\n }\n });\n}\n","import { addQueryParameters } from \"./util/add-query-parameters\";\nimport { extractUrlVariableNames } from \"./util/extract-url-variable-names\";\nimport { omit } from \"./util/omit\";\nimport { parseUrl } from \"./util/url-template\";\nexport function parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase();\n // replace :varname with {varname} to make it RFC 6570 compatible\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\",\n ]);\n // extract variable names from URL to calculate remaining variables later\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options)\n .filter((option) => urlVariableNames.includes(option))\n .concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept\n .split(/,/)\n .map((preview) => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`))\n .join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader\n .concat(options.mediaType.previews)\n .map((preview) => {\n const format = options.mediaType.format\n ? `.${options.mediaType.format}`\n : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n })\n .join(\",\");\n }\n }\n // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n }\n else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n }\n else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n else {\n headers[\"content-length\"] = 0;\n }\n }\n }\n // default content-type for JSON if body is set\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n // Only return body/request keys if present\n return Object.assign({ method, url, headers }, typeof body !== \"undefined\" ? { body } : null, options.request ? { request: options.request } : null);\n}\n","import { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n","import { endpointWithDefaults } from \"./endpoint-with-defaults\";\nimport { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse,\n });\n}\n","export const VERSION = \"6.0.12\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nconst userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`;\n// DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\nexport const DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent,\n },\n mediaType: {\n format: \"\",\n previews: [],\n },\n};\n","import { withDefaults } from \"./with-defaults\";\nimport { DEFAULTS } from \"./defaults\";\nexport const endpoint = withDefaults(null, DEFAULTS);\n"],"names":[],"mappings":";;;AAAO,SAAS,aAAa,CAAC,MAAM,EAAE;AACtC,IAAI,IAAI,CAAC,MAAM,EAAE;AACjB,QAAQ,OAAO,EAAE,CAAC;AAClB,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,GAAG,KAAK;AACvD,QAAQ,MAAM,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;AAChD,QAAQ,OAAO,MAAM,CAAC;AACtB,KAAK,EAAE,EAAE,CAAC,CAAC;AACX;;ACPO,SAAS,SAAS,CAAC,QAAQ,EAAE,OAAO,EAAE;AAC7C,IAAI,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC;AAC/C,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,KAAK;AAC1C,QAAQ,IAAI,aAAa,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE;AACzC,YAAY,IAAI,EAAE,GAAG,IAAI,QAAQ,CAAC;AAClC,gBAAgB,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AAC/D;AACA,gBAAgB,MAAM,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;AACrE,SAAS;AACT,aAAa;AACb,YAAY,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AAC3D,SAAS;AACT,KAAK,CAAC,CAAC;AACP,IAAI,OAAO,MAAM,CAAC;AAClB,CAAC;;ACfM,SAAS,yBAAyB,CAAC,GAAG,EAAE;AAC/C,IAAI,KAAK,MAAM,GAAG,IAAI,GAAG,EAAE;AAC3B,QAAQ,IAAI,GAAG,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;AACpC,YAAY,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC;AAC5B,SAAS;AACT,KAAK;AACL,IAAI,OAAO,GAAG,CAAC;AACf,CAAC;;ACJM,SAAS,KAAK,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE;AAChD,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACnC,QAAQ,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7C,QAAQ,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC;AAClF,KAAK;AACL,SAAS;AACT,QAAQ,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;AAC3C,KAAK;AACL;AACA,IAAI,OAAO,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AACrD;AACA,IAAI,yBAAyB,CAAC,OAAO,CAAC,CAAC;AACvC,IAAI,yBAAyB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAC/C,IAAI,MAAM,aAAa,GAAG,SAAS,CAAC,QAAQ,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;AAC7D;AACA,IAAI,IAAI,QAAQ,IAAI,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE;AACxD,QAAQ,aAAa,CAAC,SAAS,CAAC,QAAQ,GAAG,QAAQ,CAAC,SAAS,CAAC,QAAQ;AACtE,aAAa,MAAM,CAAC,CAAC,OAAO,KAAK,CAAC,aAAa,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AACrF,aAAa,MAAM,CAAC,aAAa,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;AACtD,KAAK;AACL,IAAI,aAAa,CAAC,SAAS,CAAC,QAAQ,GAAG,aAAa,CAAC,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC,CAAC;AAC1H,IAAI,OAAO,aAAa,CAAC;AACzB,CAAC;;ACzBM,SAAS,kBAAkB,CAAC,GAAG,EAAE,UAAU,EAAE;AACpD,IAAI,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC;AACjD,IAAI,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1C,IAAI,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5B,QAAQ,OAAO,GAAG,CAAC;AACnB,KAAK;AACL,IAAI,QAAQ,GAAG;AACf,QAAQ,SAAS;AACjB,QAAQ,KAAK;AACb,aAAa,GAAG,CAAC,CAAC,IAAI,KAAK;AAC3B,YAAY,IAAI,IAAI,KAAK,GAAG,EAAE;AAC9B,gBAAgB,QAAQ,IAAI,GAAG,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;AAC1F,aAAa;AACb,YAAY,OAAO,CAAC,EAAE,IAAI,CAAC,CAAC,EAAE,kBAAkB,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrE,SAAS,CAAC;AACV,aAAa,IAAI,CAAC,GAAG,CAAC,EAAE;AACxB,CAAC;;AChBD,MAAM,gBAAgB,GAAG,YAAY,CAAC;AACtC,SAAS,cAAc,CAAC,YAAY,EAAE;AACtC,IAAI,OAAO,YAAY,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7D,CAAC;AACD,AAAO,SAAS,uBAAuB,CAAC,GAAG,EAAE;AAC7C,IAAI,MAAM,OAAO,GAAG,GAAG,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;AAChD,IAAI,IAAI,CAAC,OAAO,EAAE;AAClB,QAAQ,OAAO,EAAE,CAAC;AAClB,KAAK;AACL,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACzE,CAAC;;ACVM,SAAS,IAAI,CAAC,MAAM,EAAE,UAAU,EAAE;AACzC,IAAI,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;AAC9B,SAAS,MAAM,CAAC,CAAC,MAAM,KAAK,CAAC,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AACzD,SAAS,MAAM,CAAC,CAAC,GAAG,EAAE,GAAG,KAAK;AAC9B,QAAQ,GAAG,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;AAC/B,QAAQ,OAAO,GAAG,CAAC;AACnB,KAAK,EAAE,EAAE,CAAC,CAAC;AACX,CAAC;;ACPD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS,cAAc,CAAC,GAAG,EAAE;AAC7B,IAAI,OAAO,GAAG;AACd,SAAS,KAAK,CAAC,oBAAoB,CAAC;AACpC,SAAS,GAAG,CAAC,UAAU,IAAI,EAAE;AAC7B,QAAQ,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AACxC,YAAY,IAAI,GAAG,SAAS,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAC7E,SAAS;AACT,QAAQ,OAAO,IAAI,CAAC;AACpB,KAAK,CAAC;AACN,SAAS,IAAI,CAAC,EAAE,CAAC,CAAC;AAClB,CAAC;AACD,SAAS,gBAAgB,CAAC,GAAG,EAAE;AAC/B,IAAI,OAAO,kBAAkB,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,UAAU,CAAC,EAAE;AACpE,QAAQ,OAAO,GAAG,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;AAChE,KAAK,CAAC,CAAC;AACP,CAAC;AACD,SAAS,WAAW,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,EAAE;AAC3C,IAAI,KAAK;AACT,QAAQ,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG;AAC5C,cAAc,cAAc,CAAC,KAAK,CAAC;AACnC,cAAc,gBAAgB,CAAC,KAAK,CAAC,CAAC;AACtC,IAAI,IAAI,GAAG,EAAE;AACb,QAAQ,OAAO,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,KAAK,CAAC;AACnD,KAAK;AACL,SAAS;AACT,QAAQ,OAAO,KAAK,CAAC;AACrB,KAAK;AACL,CAAC;AACD,SAAS,SAAS,CAAC,KAAK,EAAE;AAC1B,IAAI,OAAO,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,CAAC;AACjD,CAAC;AACD,SAAS,aAAa,CAAC,QAAQ,EAAE;AACjC,IAAI,OAAO,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG,CAAC;AACpE,CAAC;AACD,SAAS,SAAS,CAAC,OAAO,EAAE,QAAQ,EAAE,GAAG,EAAE,QAAQ,EAAE;AACrD,IAAI,IAAI,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC;AAC1C,IAAI,IAAI,SAAS,CAAC,KAAK,CAAC,IAAI,KAAK,KAAK,EAAE,EAAE;AAC1C,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ;AACrC,YAAY,OAAO,KAAK,KAAK,QAAQ;AACrC,YAAY,OAAO,KAAK,KAAK,SAAS,EAAE;AACxC,YAAY,KAAK,GAAG,KAAK,CAAC,QAAQ,EAAE,CAAC;AACrC,YAAY,IAAI,QAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9C,gBAAgB,KAAK,GAAG,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC,CAAC;AACnE,aAAa;AACb,YAAY,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,QAAQ,CAAC,GAAG,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AAC1F,SAAS;AACT,aAAa;AACb,YAAY,IAAI,QAAQ,KAAK,GAAG,EAAE;AAClC,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AAC1C,oBAAoB,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAU,KAAK,EAAE;AACrE,wBAAwB,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,QAAQ,CAAC,GAAG,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AACtG,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,qBAAqB;AACrB,oBAAoB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;AAC5D,wBAAwB,IAAI,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,4BAA4B,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAC5E,yBAAyB;AACzB,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,aAAa;AACb,iBAAiB;AACjB,gBAAgB,MAAM,GAAG,GAAG,EAAE,CAAC;AAC/B,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AAC1C,oBAAoB,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAU,KAAK,EAAE;AACrE,wBAAwB,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;AAC/D,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,qBAAqB;AACrB,oBAAoB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;AAC5D,wBAAwB,IAAI,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,4BAA4B,GAAG,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1D,4BAA4B,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;AACjF,yBAAyB;AACzB,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,gBAAgB,IAAI,aAAa,CAAC,QAAQ,CAAC,EAAE;AAC7C,oBAAoB,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7E,iBAAiB;AACjB,qBAAqB,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;AAC3C,oBAAoB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC/C,iBAAiB;AACjB,aAAa;AACb,SAAS;AACT,KAAK;AACL,SAAS;AACT,QAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9B,YAAY,IAAI,SAAS,CAAC,KAAK,CAAC,EAAE;AAClC,gBAAgB,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC;AACnD,aAAa;AACb,SAAS;AACT,aAAa,IAAI,KAAK,KAAK,EAAE,KAAK,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG,CAAC,EAAE;AACzE,YAAY,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC;AACrD,SAAS;AACT,aAAa,IAAI,KAAK,KAAK,EAAE,EAAE;AAC/B,YAAY,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC5B,SAAS;AACT,KAAK;AACL,IAAI,OAAO,MAAM,CAAC;AAClB,CAAC;AACD,AAAO,SAAS,QAAQ,CAAC,QAAQ,EAAE;AACnC,IAAI,OAAO;AACX,QAAQ,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AAC3C,KAAK,CAAC;AACN,CAAC;AACD,SAAS,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE;AACnC,IAAI,IAAI,SAAS,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;AACxD,IAAI,OAAO,QAAQ,CAAC,OAAO,CAAC,4BAA4B,EAAE,UAAU,CAAC,EAAE,UAAU,EAAE,OAAO,EAAE;AAC5F,QAAQ,IAAI,UAAU,EAAE;AACxB,YAAY,IAAI,QAAQ,GAAG,EAAE,CAAC;AAC9B,YAAY,MAAM,MAAM,GAAG,EAAE,CAAC;AAC9B,YAAY,IAAI,SAAS,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AAChE,gBAAgB,QAAQ,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAChD,gBAAgB,UAAU,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAClD,aAAa;AACb,YAAY,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,UAAU,QAAQ,EAAE;AAC/D,gBAAgB,IAAI,GAAG,GAAG,2BAA2B,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AACrE,gBAAgB,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpF,aAAa,CAAC,CAAC;AACf,YAAY,IAAI,QAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9C,gBAAgB,IAAI,SAAS,GAAG,GAAG,CAAC;AACpC,gBAAgB,IAAI,QAAQ,KAAK,GAAG,EAAE;AACtC,oBAAoB,SAAS,GAAG,GAAG,CAAC;AACpC,iBAAiB;AACjB,qBAAqB,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC3C,oBAAoB,SAAS,GAAG,QAAQ,CAAC;AACzC,iBAAiB;AACjB,gBAAgB,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,GAAG,QAAQ,GAAG,EAAE,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACtF,aAAa;AACb,iBAAiB;AACjB,gBAAgB,OAAO,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACxC,aAAa;AACb,SAAS;AACT,aAAa;AACb,YAAY,OAAO,cAAc,CAAC,OAAO,CAAC,CAAC;AAC3C,SAAS;AACT,KAAK,CAAC,CAAC;AACP,CAAC;;AC/JM,SAAS,KAAK,CAAC,OAAO,EAAE;AAC/B;AACA,IAAI,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC;AAC9C;AACA,IAAI,IAAI,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,IAAI,GAAG,EAAE,OAAO,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;AACnE,IAAI,IAAI,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;AACrD,IAAI,IAAI,IAAI,CAAC;AACb,IAAI,IAAI,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE;AACnC,QAAQ,QAAQ;AAChB,QAAQ,SAAS;AACjB,QAAQ,KAAK;AACb,QAAQ,SAAS;AACjB,QAAQ,SAAS;AACjB,QAAQ,WAAW;AACnB,KAAK,CAAC,CAAC;AACP;AACA,IAAI,MAAM,gBAAgB,GAAG,uBAAuB,CAAC,GAAG,CAAC,CAAC;AAC1D,IAAI,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;AAC3C,IAAI,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;AAC5B,QAAQ,GAAG,GAAG,OAAO,CAAC,OAAO,GAAG,GAAG,CAAC;AACpC,KAAK;AACL,IAAI,MAAM,iBAAiB,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;AAClD,SAAS,MAAM,CAAC,CAAC,MAAM,KAAK,gBAAgB,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AAC9D,SAAS,MAAM,CAAC,SAAS,CAAC,CAAC;AAC3B,IAAI,MAAM,mBAAmB,GAAG,IAAI,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAC;AACpE,IAAI,MAAM,eAAe,GAAG,4BAA4B,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;AAC9E,IAAI,IAAI,CAAC,eAAe,EAAE;AAC1B,QAAQ,IAAI,OAAO,CAAC,SAAS,CAAC,MAAM,EAAE;AACtC;AACA,YAAY,OAAO,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM;AAC3C,iBAAiB,KAAK,CAAC,GAAG,CAAC;AAC3B,iBAAiB,GAAG,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,OAAO,CAAC,kDAAkD,EAAE,CAAC,oBAAoB,EAAE,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AACzJ,iBAAiB,IAAI,CAAC,GAAG,CAAC,CAAC;AAC3B,SAAS;AACT,QAAQ,IAAI,OAAO,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE;AAC/C,YAAY,MAAM,wBAAwB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,CAAC,IAAI,EAAE,CAAC;AAC/F,YAAY,OAAO,CAAC,MAAM,GAAG,wBAAwB;AACrD,iBAAiB,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,QAAQ,CAAC;AACnD,iBAAiB,GAAG,CAAC,CAAC,OAAO,KAAK;AAClC,gBAAgB,MAAM,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,MAAM;AACvD,sBAAsB,CAAC,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;AACpD,sBAAsB,OAAO,CAAC;AAC9B,gBAAgB,OAAO,CAAC,uBAAuB,EAAE,OAAO,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC;AAC5E,aAAa,CAAC;AACd,iBAAiB,IAAI,CAAC,GAAG,CAAC,CAAC;AAC3B,SAAS;AACT,KAAK;AACL;AACA;AACA,IAAI,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;AAC1C,QAAQ,GAAG,GAAG,kBAAkB,CAAC,GAAG,EAAE,mBAAmB,CAAC,CAAC;AAC3D,KAAK;AACL,SAAS;AACT,QAAQ,IAAI,MAAM,IAAI,mBAAmB,EAAE;AAC3C,YAAY,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;AAC5C,SAAS;AACT,aAAa;AACb,YAAY,IAAI,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE;AACzD,gBAAgB,IAAI,GAAG,mBAAmB,CAAC;AAC3C,aAAa;AACb,iBAAiB;AACjB,gBAAgB,OAAO,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC;AAC9C,aAAa;AACb,SAAS;AACT,KAAK;AACL;AACA,IAAI,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;AACjE,QAAQ,OAAO,CAAC,cAAc,CAAC,GAAG,iCAAiC,CAAC;AACpE,KAAK;AACL;AACA;AACA,IAAI,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;AAC1E,QAAQ,IAAI,GAAG,EAAE,CAAC;AAClB,KAAK;AACL;AACA,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,EAAE,OAAO,IAAI,KAAK,WAAW,GAAG,EAAE,IAAI,EAAE,GAAG,IAAI,EAAE,OAAO,CAAC,OAAO,GAAG,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AACzJ,CAAC;;AC9EM,SAAS,oBAAoB,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE;AAC/D,IAAI,OAAO,KAAK,CAAC,KAAK,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC,CAAC;AAClD,CAAC;;ACDM,SAAS,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE;AACvD,IAAI,MAAM,QAAQ,GAAG,KAAK,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC;AACrD,IAAI,MAAM,QAAQ,GAAG,oBAAoB,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;AAC/D,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;AACnC,QAAQ,QAAQ;AAChB,QAAQ,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACnD,QAAQ,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACzC,QAAQ,KAAK;AACb,KAAK,CAAC,CAAC;AACP,CAAC;;ACZM,MAAM,OAAO,GAAG,mBAAmB,CAAC;;ACE3C,MAAM,SAAS,GAAG,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC,CAAC;AACrE;AACA;AACA,AAAO,MAAM,QAAQ,GAAG;AACxB,IAAI,MAAM,EAAE,KAAK;AACjB,IAAI,OAAO,EAAE,wBAAwB;AACrC,IAAI,OAAO,EAAE;AACb,QAAQ,MAAM,EAAE,gCAAgC;AAChD,QAAQ,YAAY,EAAE,SAAS;AAC/B,KAAK;AACL,IAAI,SAAS,EAAE;AACf,QAAQ,MAAM,EAAE,EAAE;AAClB,QAAQ,QAAQ,EAAE,EAAE;AACpB,KAAK;AACL,CAAC,CAAC;;ACdU,MAAC,QAAQ,GAAG,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/endpoint/package.json b/node_modules/@octokit/endpoint/package.json new file mode 100644 index 0000000..4e4d425 --- /dev/null +++ b/node_modules/@octokit/endpoint/package.json @@ -0,0 +1,44 @@ +{ + "name": "@octokit/endpoint", + "description": "Turns REST API endpoints into generic request options", + "version": "6.0.12", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "pika": true, + "sideEffects": false, + "keywords": [ + "octokit", + "github", + "api", + "rest" + ], + "repository": "github:octokit/endpoint.js", + "dependencies": { + "@octokit/types": "^6.0.3", + "is-plain-object": "^5.0.0", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@pika/pack": "^0.5.0", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/jest": "^26.0.0", + "jest": "^27.0.0", + "prettier": "2.3.1", + "semantic-release": "^17.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^27.0.0-next.12", + "typescript": "^4.0.2" + }, + "publishConfig": { + "access": "public" + }, + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js" +} diff --git a/node_modules/@octokit/graphql/LICENSE b/node_modules/@octokit/graphql/LICENSE new file mode 100644 index 0000000..af5366d --- /dev/null +++ b/node_modules/@octokit/graphql/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@octokit/graphql/README.md b/node_modules/@octokit/graphql/README.md new file mode 100644 index 0000000..fe7881c --- /dev/null +++ b/node_modules/@octokit/graphql/README.md @@ -0,0 +1,409 @@ +# graphql.js + +> GitHub GraphQL API client for browsers and Node + +[![@latest](https://img.shields.io/npm/v/@octokit/graphql.svg)](https://www.npmjs.com/package/@octokit/graphql) +[![Build Status](https://github.com/octokit/graphql.js/workflows/Test/badge.svg)](https://github.com/octokit/graphql.js/actions?query=workflow%3ATest+branch%3Amaster) + + + +- [Usage](#usage) + - [Send a simple query](#send-a-simple-query) + - [Authentication](#authentication) + - [Variables](#variables) + - [Pass query together with headers and variables](#pass-query-together-with-headers-and-variables) + - [Use with GitHub Enterprise](#use-with-github-enterprise) + - [Use custom `@octokit/request` instance](#use-custom-octokitrequest-instance) +- [TypeScript](#typescript) + - [Additional Types](#additional-types) +- [Errors](#errors) +- [Partial responses](#partial-responses) +- [Writing tests](#writing-tests) +- [License](#license) + + + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/graphql` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with npm install @octokit/graphql + +```js +const { graphql } = require("@octokit/graphql"); +// or: import { graphql } from "@octokit/graphql"; +``` + +
+ +### Send a simple query + +```js +const { repository } = await graphql( + ` + { + repository(owner: "octokit", name: "graphql.js") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + } + `, + { + headers: { + authorization: `token secret123`, + }, + } +); +``` + +### Authentication + +The simplest way to authenticate a request is to set the `Authorization` header, e.g. to a [personal access token](https://github.com/settings/tokens/). + +```js +const graphqlWithAuth = graphql.defaults({ + headers: { + authorization: `token secret123`, + }, +}); +const { repository } = await graphqlWithAuth(` + { + repository(owner: "octokit", name: "graphql.js") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + } +`); +``` + +For more complex authentication strategies such as GitHub Apps or Basic, we recommend the according authentication library exported by [`@octokit/auth`](https://github.com/octokit/auth.js). + +```js +const { createAppAuth } = require("@octokit/auth-app"); +const auth = createAppAuth({ + id: process.env.APP_ID, + privateKey: process.env.PRIVATE_KEY, + installationId: 123, +}); +const graphqlWithAuth = graphql.defaults({ + request: { + hook: auth.hook, + }, +}); + +const { repository } = await graphqlWithAuth( + `{ + repository(owner: "octokit", name: "graphql.js") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + }` +); +``` + +### Variables + +⚠️ Do not use [template literals](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals) in the query strings as they make your code vulnerable to query injection attacks (see [#2](https://github.com/octokit/graphql.js/issues/2)). Use variables instead: + +```js +const { lastIssues } = await graphql( + ` + query lastIssues($owner: String!, $repo: String!, $num: Int = 3) { + repository(owner: $owner, name: $repo) { + issues(last: $num) { + edges { + node { + title + } + } + } + } + } + `, + { + owner: "octokit", + repo: "graphql.js", + headers: { + authorization: `token secret123`, + }, + } +); +``` + +### Pass query together with headers and variables + +```js +const { graphql } = require("@octokit/graphql"); +const { lastIssues } = await graphql({ + query: `query lastIssues($owner: String!, $repo: String!, $num: Int = 3) { + repository(owner:$owner, name:$repo) { + issues(last:$num) { + edges { + node { + title + } + } + } + } + }`, + owner: "octokit", + repo: "graphql.js", + headers: { + authorization: `token secret123`, + }, +}); +``` + +### Use with GitHub Enterprise + +```js +let { graphql } = require("@octokit/graphql"); +graphql = graphql.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api", + headers: { + authorization: `token secret123`, + }, +}); +const { repository } = await graphql(` + { + repository(owner: "acme-project", name: "acme-repo") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + } +`); +``` + +### Use custom `@octokit/request` instance + +```js +const { request } = require("@octokit/request"); +const { withCustomRequest } = require("@octokit/graphql"); + +let requestCounter = 0; +const myRequest = request.defaults({ + headers: { + authentication: "token secret123", + }, + request: { + hook(request, options) { + requestCounter++; + return request(options); + }, + }, +}); +const myGraphql = withCustomRequest(myRequest); +await request("/"); +await myGraphql(` + { + repository(owner: "acme-project", name: "acme-repo") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + } +`); +// requestCounter is now 2 +``` + +## TypeScript + +`@octokit/graphql` is exposing proper types for its usage with TypeScript projects. + +### Additional Types + +Additionally, `GraphQlQueryResponseData` has been exposed to users: + +```ts +import type { GraphQlQueryResponseData } from "@octokit/graphql"; +``` + +## Errors + +In case of a GraphQL error, `error.message` is set to a combined message describing all errors returned by the endpoint. +All errors can be accessed at `error.errors`. `error.request` has the request options such as query, variables and headers set for easier debugging. + +```js +let { graphql, GraphqlResponseError } = require("@octokit/graphql"); +graphqlt = graphql.defaults({ + headers: { + authorization: `token secret123`, + }, +}); +const query = `{ + viewer { + bioHtml + } +}`; + +try { + const result = await graphql(query); +} catch (error) { + if (error instanceof GraphqlResponseError) { + // do something with the error, allowing you to detect a graphql response error, + // compared to accidentally catching unrelated errors. + + // server responds with an object like the following (as an example) + // class GraphqlResponseError { + // "headers": { + // "status": "403", + // }, + // "data": null, + // "errors": [{ + // "message": "Field 'bioHtml' doesn't exist on type 'User'", + // "locations": [{ + // "line": 3, + // "column": 5 + // }] + // }] + // } + + console.log("Request failed:", error.request); // { query, variables: {}, headers: { authorization: 'token secret123' } } + console.log(error.message); // Field 'bioHtml' doesn't exist on type 'User' + } else { + // handle non-GraphQL error + } +} +``` + +## Partial responses + +A GraphQL query may respond with partial data accompanied by errors. In this case we will throw an error but the partial data will still be accessible through `error.data` + +```js +let { graphql } = require("@octokit/graphql"); +graphql = graphql.defaults({ + headers: { + authorization: `token secret123`, + }, +}); +const query = `{ + repository(name: "probot", owner: "probot") { + name + ref(qualifiedName: "master") { + target { + ... on Commit { + history(first: 25, after: "invalid cursor") { + nodes { + message + } + } + } + } + } + } +}`; + +try { + const result = await graphql(query); +} catch (error) { + // server responds with + // { + // "data": { + // "repository": { + // "name": "probot", + // "ref": null + // } + // }, + // "errors": [ + // { + // "type": "INVALID_CURSOR_ARGUMENTS", + // "path": [ + // "repository", + // "ref", + // "target", + // "history" + // ], + // "locations": [ + // { + // "line": 7, + // "column": 11 + // } + // ], + // "message": "`invalid cursor` does not appear to be a valid cursor." + // } + // ] + // } + + console.log("Request failed:", error.request); // { query, variables: {}, headers: { authorization: 'token secret123' } } + console.log(error.message); // `invalid cursor` does not appear to be a valid cursor. + console.log(error.data); // { repository: { name: 'probot', ref: null } } +} +``` + +## Writing tests + +You can pass a replacement for [the built-in fetch implementation](https://github.com/bitinn/node-fetch) as `request.fetch` option. For example, using [fetch-mock](http://www.wheresrhys.co.uk/fetch-mock/) works great to write tests + +```js +const assert = require("assert"); +const fetchMock = require("fetch-mock/es5/server"); + +const { graphql } = require("@octokit/graphql"); + +graphql("{ viewer { login } }", { + headers: { + authorization: "token secret123", + }, + request: { + fetch: fetchMock + .sandbox() + .post("https://api.github.com/graphql", (url, options) => { + assert.strictEqual(options.headers.authorization, "token secret123"); + assert.strictEqual( + options.body, + '{"query":"{ viewer { login } }"}', + "Sends correct query" + ); + return { data: {} }; + }), + }, +}); +``` + +## License + +[MIT](LICENSE) diff --git a/node_modules/@octokit/graphql/dist-node/index.js b/node_modules/@octokit/graphql/dist-node/index.js new file mode 100644 index 0000000..6401417 --- /dev/null +++ b/node_modules/@octokit/graphql/dist-node/index.js @@ -0,0 +1,118 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var request = require('@octokit/request'); +var universalUserAgent = require('universal-user-agent'); + +const VERSION = "4.8.0"; + +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); +} + +class GraphqlResponseError extends Error { + constructor(request, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. + + this.errors = response.errors; + this.data = response.data; // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } + +} + +const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; +const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); + } + + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; + return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); + } + } + + const parsedOptions = typeof query === "string" ? Object.assign({ + query + }, options) : query; + const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + + if (!result.variables) { + result.variables = {}; + } + + result.variables[key] = parsedOptions[key]; + return result; + }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix + // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 + + const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; + + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + + return request(requestOptions).then(response => { + if (response.data.errors) { + const headers = {}; + + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + + throw new GraphqlResponseError(requestOptions, headers, response.data); + } + + return response.data.data; + }); +} + +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); + + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.request.endpoint + }); +} + +const graphql$1 = withDefaults(request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} + +exports.GraphqlResponseError = GraphqlResponseError; +exports.graphql = graphql$1; +exports.withCustomRequest = withCustomRequest; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/graphql/dist-node/index.js.map b/node_modules/@octokit/graphql/dist-node/index.js.map new file mode 100644 index 0000000..873a8d4 --- /dev/null +++ b/node_modules/@octokit/graphql/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/error.js","../dist-src/graphql.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"4.8.0\";\n","function _buildMessageForResponseErrors(data) {\n return (`Request failed due to following response errors:\\n` +\n data.errors.map((e) => ` - ${e.message}`).join(\"\\n\"));\n}\nexport class GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n // Expose the errors and response data in their shorthand properties.\n this.errors = response.errors;\n this.data = response.data;\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\n","import { GraphqlResponseError } from \"./error\";\nconst NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\",\n];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nexport function graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n return response.data.data;\n });\n}\n","import { request as Request } from \"@octokit/request\";\nimport { graphql } from \"./graphql\";\nexport function withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: Request.endpoint,\n });\n}\n","import { request } from \"@octokit/request\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport { withDefaults } from \"./with-defaults\";\nexport const graphql = withDefaults(request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${getUserAgent()}`,\n },\n method: \"POST\",\n url: \"/graphql\",\n});\nexport { GraphqlResponseError } from \"./error\";\nexport function withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\",\n });\n}\n"],"names":["VERSION","_buildMessageForResponseErrors","data","errors","map","e","message","join","GraphqlResponseError","Error","constructor","request","headers","response","name","captureStackTrace","NON_VARIABLE_OPTIONS","FORBIDDEN_VARIABLE_OPTIONS","GHES_V3_SUFFIX_REGEX","graphql","query","options","Promise","reject","key","includes","parsedOptions","Object","assign","requestOptions","keys","reduce","result","variables","baseUrl","endpoint","DEFAULTS","test","url","replace","then","withDefaults","newDefaults","newRequest","defaults","newApi","bind","Request","getUserAgent","method","withCustomRequest","customRequest"],"mappings":";;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;ACAP,SAASC,8BAAT,CAAwCC,IAAxC,EAA8C;AAC1C,SAAS,oDAAD,GACJA,IAAI,CAACC,MAAL,CAAYC,GAAZ,CAAiBC,CAAD,IAAQ,MAAKA,CAAC,CAACC,OAAQ,EAAvC,EAA0CC,IAA1C,CAA+C,IAA/C,CADJ;AAEH;;AACD,AAAO,MAAMC,oBAAN,SAAmCC,KAAnC,CAAyC;AAC5CC,EAAAA,WAAW,CAACC,OAAD,EAAUC,OAAV,EAAmBC,QAAnB,EAA6B;AACpC,UAAMZ,8BAA8B,CAACY,QAAD,CAApC;AACA,SAAKF,OAAL,GAAeA,OAAf;AACA,SAAKC,OAAL,GAAeA,OAAf;AACA,SAAKC,QAAL,GAAgBA,QAAhB;AACA,SAAKC,IAAL,GAAY,sBAAZ,CALoC;;AAOpC,SAAKX,MAAL,GAAcU,QAAQ,CAACV,MAAvB;AACA,SAAKD,IAAL,GAAYW,QAAQ,CAACX,IAArB,CARoC;;AAUpC;;AACA,QAAIO,KAAK,CAACM,iBAAV,EAA6B;AACzBN,MAAAA,KAAK,CAACM,iBAAN,CAAwB,IAAxB,EAA8B,KAAKL,WAAnC;AACH;AACJ;;AAf2C;;ACHhD,MAAMM,oBAAoB,GAAG,CACzB,QADyB,EAEzB,SAFyB,EAGzB,KAHyB,EAIzB,SAJyB,EAKzB,SALyB,EAMzB,OANyB,EAOzB,WAPyB,CAA7B;AASA,MAAMC,0BAA0B,GAAG,CAAC,OAAD,EAAU,QAAV,EAAoB,KAApB,CAAnC;AACA,MAAMC,oBAAoB,GAAG,eAA7B;AACA,AAAO,SAASC,OAAT,CAAiBR,OAAjB,EAA0BS,KAA1B,EAAiCC,OAAjC,EAA0C;AAC7C,MAAIA,OAAJ,EAAa;AACT,QAAI,OAAOD,KAAP,KAAiB,QAAjB,IAA6B,WAAWC,OAA5C,EAAqD;AACjD,aAAOC,OAAO,CAACC,MAAR,CAAe,IAAId,KAAJ,CAAW,4DAAX,CAAf,CAAP;AACH;;AACD,SAAK,MAAMe,GAAX,IAAkBH,OAAlB,EAA2B;AACvB,UAAI,CAACJ,0BAA0B,CAACQ,QAA3B,CAAoCD,GAApC,CAAL,EACI;AACJ,aAAOF,OAAO,CAACC,MAAR,CAAe,IAAId,KAAJ,CAAW,uBAAsBe,GAAI,mCAArC,CAAf,CAAP;AACH;AACJ;;AACD,QAAME,aAAa,GAAG,OAAON,KAAP,KAAiB,QAAjB,GAA4BO,MAAM,CAACC,MAAP,CAAc;AAAER,IAAAA;AAAF,GAAd,EAAyBC,OAAzB,CAA5B,GAAgED,KAAtF;AACA,QAAMS,cAAc,GAAGF,MAAM,CAACG,IAAP,CAAYJ,aAAZ,EAA2BK,MAA3B,CAAkC,CAACC,MAAD,EAASR,GAAT,KAAiB;AACtE,QAAIR,oBAAoB,CAACS,QAArB,CAA8BD,GAA9B,CAAJ,EAAwC;AACpCQ,MAAAA,MAAM,CAACR,GAAD,CAAN,GAAcE,aAAa,CAACF,GAAD,CAA3B;AACA,aAAOQ,MAAP;AACH;;AACD,QAAI,CAACA,MAAM,CAACC,SAAZ,EAAuB;AACnBD,MAAAA,MAAM,CAACC,SAAP,GAAmB,EAAnB;AACH;;AACDD,IAAAA,MAAM,CAACC,SAAP,CAAiBT,GAAjB,IAAwBE,aAAa,CAACF,GAAD,CAArC;AACA,WAAOQ,MAAP;AACH,GAVsB,EAUpB,EAVoB,CAAvB,CAZ6C;AAwB7C;;AACA,QAAME,OAAO,GAAGR,aAAa,CAACQ,OAAd,IAAyBvB,OAAO,CAACwB,QAAR,CAAiBC,QAAjB,CAA0BF,OAAnE;;AACA,MAAIhB,oBAAoB,CAACmB,IAArB,CAA0BH,OAA1B,CAAJ,EAAwC;AACpCL,IAAAA,cAAc,CAACS,GAAf,GAAqBJ,OAAO,CAACK,OAAR,CAAgBrB,oBAAhB,EAAsC,cAAtC,CAArB;AACH;;AACD,SAAOP,OAAO,CAACkB,cAAD,CAAP,CAAwBW,IAAxB,CAA8B3B,QAAD,IAAc;AAC9C,QAAIA,QAAQ,CAACX,IAAT,CAAcC,MAAlB,EAA0B;AACtB,YAAMS,OAAO,GAAG,EAAhB;;AACA,WAAK,MAAMY,GAAX,IAAkBG,MAAM,CAACG,IAAP,CAAYjB,QAAQ,CAACD,OAArB,CAAlB,EAAiD;AAC7CA,QAAAA,OAAO,CAACY,GAAD,CAAP,GAAeX,QAAQ,CAACD,OAAT,CAAiBY,GAAjB,CAAf;AACH;;AACD,YAAM,IAAIhB,oBAAJ,CAAyBqB,cAAzB,EAAyCjB,OAAzC,EAAkDC,QAAQ,CAACX,IAA3D,CAAN;AACH;;AACD,WAAOW,QAAQ,CAACX,IAAT,CAAcA,IAArB;AACH,GATM,CAAP;AAUH;;ACjDM,SAASuC,YAAT,CAAsB9B,SAAtB,EAA+B+B,WAA/B,EAA4C;AAC/C,QAAMC,UAAU,GAAGhC,SAAO,CAACiC,QAAR,CAAiBF,WAAjB,CAAnB;;AACA,QAAMG,MAAM,GAAG,CAACzB,KAAD,EAAQC,OAAR,KAAoB;AAC/B,WAAOF,OAAO,CAACwB,UAAD,EAAavB,KAAb,EAAoBC,OAApB,CAAd;AACH,GAFD;;AAGA,SAAOM,MAAM,CAACC,MAAP,CAAciB,MAAd,EAAsB;AACzBD,IAAAA,QAAQ,EAAEH,YAAY,CAACK,IAAb,CAAkB,IAAlB,EAAwBH,UAAxB,CADe;AAEzBR,IAAAA,QAAQ,EAAEY,eAAO,CAACZ;AAFO,GAAtB,CAAP;AAIH;;MCPYhB,SAAO,GAAGsB,YAAY,CAAC9B,eAAD,EAAU;AACzCC,EAAAA,OAAO,EAAE;AACL,kBAAe,sBAAqBZ,OAAQ,IAAGgD,+BAAY,EAAG;AADzD,GADgC;AAIzCC,EAAAA,MAAM,EAAE,MAJiC;AAKzCX,EAAAA,GAAG,EAAE;AALoC,CAAV,CAA5B;AAOP,AACO,SAASY,iBAAT,CAA2BC,aAA3B,EAA0C;AAC7C,SAAOV,YAAY,CAACU,aAAD,EAAgB;AAC/BF,IAAAA,MAAM,EAAE,MADuB;AAE/BX,IAAAA,GAAG,EAAE;AAF0B,GAAhB,CAAnB;AAIH;;;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/graphql/dist-src/error.js b/node_modules/@octokit/graphql/dist-src/error.js new file mode 100644 index 0000000..182f967 --- /dev/null +++ b/node_modules/@octokit/graphql/dist-src/error.js @@ -0,0 +1,21 @@ +function _buildMessageForResponseErrors(data) { + return (`Request failed due to following response errors:\n` + + data.errors.map((e) => ` - ${e.message}`).join("\n")); +} +export class GraphqlResponseError extends Error { + constructor(request, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + // Expose the errors and response data in their shorthand properties. + this.errors = response.errors; + this.data = response.data; + // Maintains proper stack trace (only available on V8) + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +} diff --git a/node_modules/@octokit/graphql/dist-src/graphql.js b/node_modules/@octokit/graphql/dist-src/graphql.js new file mode 100644 index 0000000..8297f84 --- /dev/null +++ b/node_modules/@octokit/graphql/dist-src/graphql.js @@ -0,0 +1,52 @@ +import { GraphqlResponseError } from "./error"; +const NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType", +]; +const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +export function graphql(request, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix + // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 + const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError(requestOptions, headers, response.data); + } + return response.data.data; + }); +} diff --git a/node_modules/@octokit/graphql/dist-src/index.js b/node_modules/@octokit/graphql/dist-src/index.js new file mode 100644 index 0000000..2a7d06b --- /dev/null +++ b/node_modules/@octokit/graphql/dist-src/index.js @@ -0,0 +1,18 @@ +import { request } from "@octokit/request"; +import { getUserAgent } from "universal-user-agent"; +import { VERSION } from "./version"; +import { withDefaults } from "./with-defaults"; +export const graphql = withDefaults(request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${getUserAgent()}`, + }, + method: "POST", + url: "/graphql", +}); +export { GraphqlResponseError } from "./error"; +export function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql", + }); +} diff --git a/node_modules/@octokit/graphql/dist-src/types.js b/node_modules/@octokit/graphql/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/graphql/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/graphql/dist-src/version.js b/node_modules/@octokit/graphql/dist-src/version.js new file mode 100644 index 0000000..3a4f8ff --- /dev/null +++ b/node_modules/@octokit/graphql/dist-src/version.js @@ -0,0 +1 @@ +export const VERSION = "4.8.0"; diff --git a/node_modules/@octokit/graphql/dist-src/with-defaults.js b/node_modules/@octokit/graphql/dist-src/with-defaults.js new file mode 100644 index 0000000..6ea309e --- /dev/null +++ b/node_modules/@octokit/graphql/dist-src/with-defaults.js @@ -0,0 +1,12 @@ +import { request as Request } from "@octokit/request"; +import { graphql } from "./graphql"; +export function withDefaults(request, newDefaults) { + const newRequest = request.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: Request.endpoint, + }); +} diff --git a/node_modules/@octokit/graphql/dist-types/error.d.ts b/node_modules/@octokit/graphql/dist-types/error.d.ts new file mode 100644 index 0000000..3bd37ad --- /dev/null +++ b/node_modules/@octokit/graphql/dist-types/error.d.ts @@ -0,0 +1,13 @@ +import { ResponseHeaders } from "@octokit/types"; +import { GraphQlEndpointOptions, GraphQlQueryResponse } from "./types"; +declare type ServerResponseData = Required>; +export declare class GraphqlResponseError extends Error { + readonly request: GraphQlEndpointOptions; + readonly headers: ResponseHeaders; + readonly response: ServerResponseData; + name: string; + readonly errors: GraphQlQueryResponse["errors"]; + readonly data: ResponseData; + constructor(request: GraphQlEndpointOptions, headers: ResponseHeaders, response: ServerResponseData); +} +export {}; diff --git a/node_modules/@octokit/graphql/dist-types/graphql.d.ts b/node_modules/@octokit/graphql/dist-types/graphql.d.ts new file mode 100644 index 0000000..2942b8b --- /dev/null +++ b/node_modules/@octokit/graphql/dist-types/graphql.d.ts @@ -0,0 +1,3 @@ +import { request as Request } from "@octokit/request"; +import { RequestParameters, GraphQlQueryResponseData } from "./types"; +export declare function graphql(request: typeof Request, query: string | RequestParameters, options?: RequestParameters): Promise; diff --git a/node_modules/@octokit/graphql/dist-types/index.d.ts b/node_modules/@octokit/graphql/dist-types/index.d.ts new file mode 100644 index 0000000..282f98a --- /dev/null +++ b/node_modules/@octokit/graphql/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { request } from "@octokit/request"; +export declare const graphql: import("./types").graphql; +export { GraphQlQueryResponseData } from "./types"; +export { GraphqlResponseError } from "./error"; +export declare function withCustomRequest(customRequest: typeof request): import("./types").graphql; diff --git a/node_modules/@octokit/graphql/dist-types/types.d.ts b/node_modules/@octokit/graphql/dist-types/types.d.ts new file mode 100644 index 0000000..b266bdb --- /dev/null +++ b/node_modules/@octokit/graphql/dist-types/types.d.ts @@ -0,0 +1,55 @@ +import { EndpointOptions, RequestParameters as RequestParametersType, EndpointInterface } from "@octokit/types"; +export declare type GraphQlEndpointOptions = EndpointOptions & { + variables?: { + [key: string]: unknown; + }; +}; +export declare type RequestParameters = RequestParametersType; +export declare type Query = string; +export interface graphql { + /** + * Sends a GraphQL query request based on endpoint options + * The GraphQL query must be specified in `options`. + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: RequestParameters): GraphQlResponse; + /** + * Sends a GraphQL query request based on endpoint options + * + * @param {string} query GraphQL query. Example: `'query { viewer { login } }'`. + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (query: Query, parameters?: RequestParameters): GraphQlResponse; + /** + * Returns a new `endpoint` with updated route and parameters + */ + defaults: (newDefaults: RequestParameters) => graphql; + /** + * Octokit endpoint API, see {@link https://github.com/octokit/endpoint.js|@octokit/endpoint} + */ + endpoint: EndpointInterface; +} +export declare type GraphQlResponse = Promise; +export declare type GraphQlQueryResponseData = { + [key: string]: any; +}; +export declare type GraphQlQueryResponse = { + data: ResponseData; + errors?: [ + { + type: string; + message: string; + path: [string]; + extensions: { + [key: string]: any; + }; + locations: [ + { + line: number; + column: number; + } + ]; + } + ]; +}; diff --git a/node_modules/@octokit/graphql/dist-types/version.d.ts b/node_modules/@octokit/graphql/dist-types/version.d.ts new file mode 100644 index 0000000..e80848e --- /dev/null +++ b/node_modules/@octokit/graphql/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "4.8.0"; diff --git a/node_modules/@octokit/graphql/dist-types/with-defaults.d.ts b/node_modules/@octokit/graphql/dist-types/with-defaults.d.ts new file mode 100644 index 0000000..03edc32 --- /dev/null +++ b/node_modules/@octokit/graphql/dist-types/with-defaults.d.ts @@ -0,0 +1,3 @@ +import { request as Request } from "@octokit/request"; +import { graphql as ApiInterface, RequestParameters } from "./types"; +export declare function withDefaults(request: typeof Request, newDefaults: RequestParameters): ApiInterface; diff --git a/node_modules/@octokit/graphql/dist-web/index.js b/node_modules/@octokit/graphql/dist-web/index.js new file mode 100644 index 0000000..5307e26 --- /dev/null +++ b/node_modules/@octokit/graphql/dist-web/index.js @@ -0,0 +1,106 @@ +import { request } from '@octokit/request'; +import { getUserAgent } from 'universal-user-agent'; + +const VERSION = "4.8.0"; + +function _buildMessageForResponseErrors(data) { + return (`Request failed due to following response errors:\n` + + data.errors.map((e) => ` - ${e.message}`).join("\n")); +} +class GraphqlResponseError extends Error { + constructor(request, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + // Expose the errors and response data in their shorthand properties. + this.errors = response.errors; + this.data = response.data; + // Maintains proper stack trace (only available on V8) + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +} + +const NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType", +]; +const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix + // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 + const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError(requestOptions, headers, response.data); + } + return response.data.data; + }); +} + +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.endpoint, + }); +} + +const graphql$1 = withDefaults(request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${getUserAgent()}`, + }, + method: "POST", + url: "/graphql", +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql", + }); +} + +export { GraphqlResponseError, graphql$1 as graphql, withCustomRequest }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/graphql/dist-web/index.js.map b/node_modules/@octokit/graphql/dist-web/index.js.map new file mode 100644 index 0000000..3c6a6ed --- /dev/null +++ b/node_modules/@octokit/graphql/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/error.js","../dist-src/graphql.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"4.8.0\";\n","function _buildMessageForResponseErrors(data) {\n return (`Request failed due to following response errors:\\n` +\n data.errors.map((e) => ` - ${e.message}`).join(\"\\n\"));\n}\nexport class GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n // Expose the errors and response data in their shorthand properties.\n this.errors = response.errors;\n this.data = response.data;\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\n","import { GraphqlResponseError } from \"./error\";\nconst NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\",\n];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nexport function graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n return response.data.data;\n });\n}\n","import { request as Request } from \"@octokit/request\";\nimport { graphql } from \"./graphql\";\nexport function withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: Request.endpoint,\n });\n}\n","import { request } from \"@octokit/request\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport { withDefaults } from \"./with-defaults\";\nexport const graphql = withDefaults(request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${getUserAgent()}`,\n },\n method: \"POST\",\n url: \"/graphql\",\n});\nexport { GraphqlResponseError } from \"./error\";\nexport function withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\",\n });\n}\n"],"names":["request","Request","graphql"],"mappings":";;;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACA1C,SAAS,8BAA8B,CAAC,IAAI,EAAE;AAC9C,IAAI,QAAQ,CAAC,kDAAkD,CAAC;AAChE,QAAQ,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AAC9D,CAAC;AACD,AAAO,MAAM,oBAAoB,SAAS,KAAK,CAAC;AAChD,IAAI,WAAW,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE;AAC5C,QAAQ,KAAK,CAAC,8BAA8B,CAAC,QAAQ,CAAC,CAAC,CAAC;AACxD,QAAQ,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AAC/B,QAAQ,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AAC/B,QAAQ,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACjC,QAAQ,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAC;AAC3C;AACA,QAAQ,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;AACtC,QAAQ,IAAI,CAAC,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC;AAClC;AACA;AACA,QAAQ,IAAI,KAAK,CAAC,iBAAiB,EAAE;AACrC,YAAY,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AAC5D,SAAS;AACT,KAAK;AACL,CAAC;;ACnBD,MAAM,oBAAoB,GAAG;AAC7B,IAAI,QAAQ;AACZ,IAAI,SAAS;AACb,IAAI,KAAK;AACT,IAAI,SAAS;AACb,IAAI,SAAS;AACb,IAAI,OAAO;AACX,IAAI,WAAW;AACf,CAAC,CAAC;AACF,MAAM,0BAA0B,GAAG,CAAC,OAAO,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;AAC9D,MAAM,oBAAoB,GAAG,eAAe,CAAC;AAC7C,AAAO,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE;AACjD,IAAI,IAAI,OAAO,EAAE;AACjB,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,IAAI,OAAO,EAAE;AAC7D,YAAY,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,CAAC,0DAA0D,CAAC,CAAC,CAAC,CAAC;AAC3G,SAAS;AACT,QAAQ,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;AACnC,YAAY,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,GAAG,CAAC;AACzD,gBAAgB,SAAS;AACzB,YAAY,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,CAAC,oBAAoB,EAAE,GAAG,CAAC,iCAAiC,CAAC,CAAC,CAAC,CAAC;AAC5G,SAAS;AACT,KAAK;AACL,IAAI,MAAM,aAAa,GAAG,OAAO,KAAK,KAAK,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,EAAE,OAAO,CAAC,GAAG,KAAK,CAAC;AAChG,IAAI,MAAM,cAAc,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,GAAG,KAAK;AAC9E,QAAQ,IAAI,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;AAChD,YAAY,MAAM,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AAC7C,YAAY,OAAO,MAAM,CAAC;AAC1B,SAAS;AACT,QAAQ,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;AAC/B,YAAY,MAAM,CAAC,SAAS,GAAG,EAAE,CAAC;AAClC,SAAS;AACT,QAAQ,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AACnD,QAAQ,OAAO,MAAM,CAAC;AACtB,KAAK,EAAE,EAAE,CAAC,CAAC;AACX;AACA;AACA,IAAI,MAAM,OAAO,GAAG,aAAa,CAAC,OAAO,IAAI,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC;AAC/E,IAAI,IAAI,oBAAoB,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;AAC5C,QAAQ,cAAc,CAAC,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC,oBAAoB,EAAE,cAAc,CAAC,CAAC;AACnF,KAAK;AACL,IAAI,OAAO,OAAO,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,KAAK;AACtD,QAAQ,IAAI,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC,YAAY,MAAM,OAAO,GAAG,EAAE,CAAC;AAC/B,YAAY,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;AAC7D,gBAAgB,OAAO,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACrD,aAAa;AACb,YAAY,MAAM,IAAI,oBAAoB,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC;AACnF,SAAS;AACT,QAAQ,OAAO,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC;AAClC,KAAK,CAAC,CAAC;AACP,CAAC;;ACjDM,SAAS,YAAY,CAACA,SAAO,EAAE,WAAW,EAAE;AACnD,IAAI,MAAM,UAAU,GAAGA,SAAO,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;AACrD,IAAI,MAAM,MAAM,GAAG,CAAC,KAAK,EAAE,OAAO,KAAK;AACvC,QAAQ,OAAO,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AACnD,KAAK,CAAC;AACN,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE;AACjC,QAAQ,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,UAAU,CAAC;AACrD,QAAQ,QAAQ,EAAEC,OAAO,CAAC,QAAQ;AAClC,KAAK,CAAC,CAAC;AACP,CAAC;;ACPW,MAACC,SAAO,GAAG,YAAY,CAAC,OAAO,EAAE;AAC7C,IAAI,OAAO,EAAE;AACb,QAAQ,YAAY,EAAE,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;AACvE,KAAK;AACL,IAAI,MAAM,EAAE,MAAM;AAClB,IAAI,GAAG,EAAE,UAAU;AACnB,CAAC,CAAC,CAAC;AACH,AACO,SAAS,iBAAiB,CAAC,aAAa,EAAE;AACjD,IAAI,OAAO,YAAY,CAAC,aAAa,EAAE;AACvC,QAAQ,MAAM,EAAE,MAAM;AACtB,QAAQ,GAAG,EAAE,UAAU;AACvB,KAAK,CAAC,CAAC;AACP,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/graphql/package.json b/node_modules/@octokit/graphql/package.json new file mode 100644 index 0000000..9ba3cb7 --- /dev/null +++ b/node_modules/@octokit/graphql/package.json @@ -0,0 +1,47 @@ +{ + "name": "@octokit/graphql", + "description": "GitHub GraphQL API client for browsers and Node", + "version": "4.8.0", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "pika": true, + "sideEffects": false, + "keywords": [ + "octokit", + "github", + "api", + "graphql" + ], + "repository": "github:octokit/graphql.js", + "dependencies": { + "@octokit/request": "^5.6.0", + "@octokit/types": "^6.0.3", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@pika/pack": "^0.5.0", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/fetch-mock": "^7.2.5", + "@types/jest": "^27.0.0", + "@types/node": "^14.0.4", + "fetch-mock": "^9.0.0", + "jest": "^27.0.0", + "prettier": "2.3.2", + "semantic-release": "^17.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^27.0.0-next.12", + "typescript": "^4.0.0" + }, + "publishConfig": { + "access": "public" + }, + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js" +} diff --git a/node_modules/@octokit/openapi-types/LICENSE b/node_modules/@octokit/openapi-types/LICENSE new file mode 100644 index 0000000..c61fbbe --- /dev/null +++ b/node_modules/@octokit/openapi-types/LICENSE @@ -0,0 +1,7 @@ +Copyright 2020 Gregor Martynus + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@octokit/openapi-types/README.md b/node_modules/@octokit/openapi-types/README.md new file mode 100644 index 0000000..9da833c --- /dev/null +++ b/node_modules/@octokit/openapi-types/README.md @@ -0,0 +1,17 @@ +# @octokit/openapi-types + +> Generated TypeScript definitions based on GitHub's OpenAPI spec + +This package is continously updated based on [GitHub's OpenAPI specification](https://github.com/github/rest-api-description/) + +## Usage + +```ts +import { components } from "@octokit/openapi-types"; + +type Repository = components["schemas"]["full-repository"]; +``` + +## License + +[MIT](LICENSE) diff --git a/node_modules/@octokit/openapi-types/package.json b/node_modules/@octokit/openapi-types/package.json new file mode 100644 index 0000000..24484a5 --- /dev/null +++ b/node_modules/@octokit/openapi-types/package.json @@ -0,0 +1,20 @@ +{ + "name": "@octokit/openapi-types", + "description": "Generated TypeScript definitions based on GitHub's OpenAPI spec for api.github.com", + "repository": { + "type": "git", + "url": "https://github.com/octokit/openapi-types.ts.git", + "directory": "packages/openapi-types" + }, + "publishConfig": { + "access": "public" + }, + "version": "12.11.0", + "main": "", + "types": "types.d.ts", + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "octokit": { + "openapi-version": "6.8.0" + } +} diff --git a/node_modules/@octokit/openapi-types/types.d.ts b/node_modules/@octokit/openapi-types/types.d.ts new file mode 100644 index 0000000..88dc62d --- /dev/null +++ b/node_modules/@octokit/openapi-types/types.d.ts @@ -0,0 +1,47095 @@ +/** + * This file was auto-generated by openapi-typescript. + * Do not make direct changes to the file. + */ + +export interface paths { + "/": { + /** Get Hypermedia links to resources accessible in GitHub's REST API */ + get: operations["meta/root"]; + }; + "/app": { + /** + * Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-authenticated"]; + }; + "/app-manifests/{code}/conversions": { + /** Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. */ + post: operations["apps/create-from-manifest"]; + }; + "/app/hook/config": { + /** + * Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-webhook-config-for-app"]; + /** + * Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + patch: operations["apps/update-webhook-config-for-app"]; + }; + "/app/hook/deliveries": { + /** + * Returns a list of webhook deliveries for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/list-webhook-deliveries"]; + }; + "/app/hook/deliveries/{delivery_id}": { + /** + * Returns a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-webhook-delivery"]; + }; + "/app/hook/deliveries/{delivery_id}/attempts": { + /** + * Redeliver a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + post: operations["apps/redeliver-webhook-delivery"]; + }; + "/app/installations": { + /** + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + * + * The permissions the installation has are included under the `permissions` key. + */ + get: operations["apps/list-installations"]; + }; + "/app/installations/{installation_id}": { + /** + * Enables an authenticated GitHub App to find an installation's information using the installation id. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-installation"]; + /** + * Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + delete: operations["apps/delete-installation"]; + }; + "/app/installations/{installation_id}/access_tokens": { + /** + * Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + post: operations["apps/create-installation-access-token"]; + }; + "/app/installations/{installation_id}/suspended": { + /** + * Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + put: operations["apps/suspend-installation"]; + /** + * Removes a GitHub App installation suspension. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + delete: operations["apps/unsuspend-installation"]; + }; + "/applications/grants": { + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * You can use this API to list the set of OAuth applications that have been granted access to your account. Unlike the [list your authorizations](https://docs.github.com/rest/reference/oauth-authorizations#list-your-authorizations) API, this API does not manage individual tokens. This API will return one entry for each OAuth application that has been granted access to your account, regardless of the number of tokens an application has generated for your user. The list of OAuth applications returned matches what is shown on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). The `scopes` returned are the union of scopes authorized for the application. For example, if an application has one token with `repo` scope and another token with `user` scope, the grant will return `["repo", "user"]`. + */ + get: operations["oauth-authorizations/list-grants"]; + }; + "/applications/grants/{grant_id}": { + /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ + get: operations["oauth-authorizations/get-grant"]; + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for your user. Once deleted, the application has no access to your account and is no longer listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + delete: operations["oauth-authorizations/delete-grant"]; + }; + "/applications/{client_id}/grant": { + /** + * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. + * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + delete: operations["apps/delete-authorization"]; + }; + "/applications/{client_id}/token": { + /** OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the OAuth application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. */ + post: operations["apps/check-token"]; + /** OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. */ + delete: operations["apps/delete-token"]; + /** OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ + patch: operations["apps/reset-token"]; + }; + "/applications/{client_id}/token/scoped": { + /** Use a non-scoped user-to-server OAuth access token to create a repository scoped and/or permission scoped user-to-server OAuth access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ + post: operations["apps/scope-token"]; + }; + "/apps/{app_slug}": { + /** + * **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). + * + * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + get: operations["apps/get-by-slug"]; + }; + "/authorizations": { + /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ + get: operations["oauth-authorizations/list-authorizations"]; + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). + * + * Creates OAuth tokens using [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication). If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." + * + * To create tokens for a particular OAuth application using this endpoint, you must authenticate as the user you want to create an authorization for and provide the app's client ID and secret, found on your OAuth application's settings page. If your OAuth application intends to create multiple tokens for one user, use `fingerprint` to differentiate between them. + * + * You can also create tokens on GitHub from the [personal access tokens settings](https://github.com/settings/tokens) page. Read more about these tokens in [the GitHub Help documentation](https://docs.github.com/articles/creating-an-access-token-for-command-line-use). + * + * Organizations that enforce SAML SSO require personal access tokens to be allowed. Read more about allowing tokens in [the GitHub Help documentation](https://docs.github.com/articles/about-identity-and-access-management-with-saml-single-sign-on). + */ + post: operations["oauth-authorizations/create-authorization"]; + }; + "/authorizations/clients/{client_id}": { + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). + * + * Creates a new authorization for the specified OAuth application, only if an authorization for that application doesn't already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one. + * + * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." + * + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + */ + put: operations["oauth-authorizations/get-or-create-authorization-for-app"]; + }; + "/authorizations/clients/{client_id}/{fingerprint}": { + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). + * + * This method will create a new authorization for the specified OAuth application, only if an authorization for that application and fingerprint do not already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. `fingerprint` is a unique string to distinguish an authorization from others created for the same client ID and user. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one. + * + * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." + */ + put: operations["oauth-authorizations/get-or-create-authorization-for-app-and-fingerprint"]; + }; + "/authorizations/{authorization_id}": { + /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ + get: operations["oauth-authorizations/get-authorization"]; + /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ + delete: operations["oauth-authorizations/delete-authorization"]; + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." + * + * You can only send one of these scope keys at a time. + */ + patch: operations["oauth-authorizations/update-authorization"]; + }; + "/codes_of_conduct": { + get: operations["codes-of-conduct/get-all-codes-of-conduct"]; + }; + "/codes_of_conduct/{key}": { + get: operations["codes-of-conduct/get-conduct-code"]; + }; + "/emojis": { + /** Lists all the emojis available to use on GitHub. */ + get: operations["emojis/get"]; + }; + "/enterprise-installation/{enterprise_or_org}/server-statistics": { + /** + * Returns aggregate usage metrics for your GitHub Enterprise Server 3.5+ instance for a specified time period up to 365 days. + * + * To use this endpoint, your GitHub Enterprise Server instance must be connected to GitHub Enterprise Cloud using GitHub Connect. You must enable Server Statistics, and for the API request provide your enterprise account name or organization name connected to the GitHub Enterprise Server. For more information, see "[Enabling Server Statistics for your enterprise](/admin/configuration/configuring-github-connect/enabling-server-statistics-for-your-enterprise)" in the GitHub Enterprise Server documentation. + * + * You'll need to use a personal access token: + * - If you connected your GitHub Enterprise Server to an enterprise account and enabled Server Statistics, you'll need a personal access token with the `read:enterprise` permission. + * - If you connected your GitHub Enterprise Server to an organization account and enabled Server Statistics, you'll need a personal access token with the `read:org` permission. + * + * For more information on creating a personal access token, see "[Creating a personal access token](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token)." + */ + get: operations["enterprise-admin/get-server-statistics"]; + }; + "/enterprises/{enterprise}/actions/cache/usage": { + /** + * Gets the total GitHub Actions cache usage for an enterprise. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/oidc/customization/issuer": { + /** + * Sets the GitHub Actions OpenID Connect (OIDC) custom issuer policy for an enterprise. + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + put: operations["actions/set-actions-oidc-custom-issuer-policy-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions": { + /** + * Gets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/get-github-actions-permissions-enterprise"]; + /** + * Sets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-github-actions-permissions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions/organizations": { + /** + * Lists the organizations that are selected to have GitHub Actions enabled in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-selected-organizations-enabled-github-actions-enterprise"]; + /** + * Replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-selected-organizations-enabled-github-actions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions/organizations/{org_id}": { + /** + * Adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/enable-selected-organization-github-actions-enterprise"]; + /** + * Removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/disable-selected-organization-github-actions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions/selected-actions": { + /** + * Gets the selected actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/get-allowed-actions-enterprise"]; + /** + * Sets the actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-allowed-actions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions/workflow": { + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + get: operations["actions/get-github-actions-default-workflow-permissions-enterprise"]; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, and sets + * whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + put: operations["actions/set-github-actions-default-workflow-permissions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups": { + /** + * Lists all self-hosted runner groups for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-self-hosted-runner-groups-for-enterprise"]; + /** + * Creates a new self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + post: operations["enterprise-admin/create-self-hosted-runner-group-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": { + /** + * Gets a specific self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/get-self-hosted-runner-group-for-enterprise"]; + /** + * Deletes a self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/delete-self-hosted-runner-group-from-enterprise"]; + /** + * Updates the `name` and `visibility` of a self-hosted runner group in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + patch: operations["enterprise-admin/update-self-hosted-runner-group-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations": { + /** + * Lists the organizations with access to a self-hosted runner group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-org-access-to-self-hosted-runner-group-in-enterprise"]; + /** + * Replaces the list of organizations that have access to a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-org-access-to-self-hosted-runner-group-in-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}": { + /** + * Adds an organization to the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/add-org-access-to-self-hosted-runner-group-in-enterprise"]; + /** + * Removes an organization from the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/remove-org-access-to-self-hosted-runner-group-in-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners": { + /** + * Lists the self-hosted runners that are in a specific enterprise group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-self-hosted-runners-in-group-for-enterprise"]; + /** + * Replaces the list of self-hosted runners that are part of an enterprise runner group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-self-hosted-runners-in-group-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": { + /** + * Adds a self-hosted runner to a runner group configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` + * scope to use this endpoint. + */ + put: operations["enterprise-admin/add-self-hosted-runner-to-group-for-enterprise"]; + /** + * Removes a self-hosted runner from a group configured in an enterprise. The runner is then returned to the default group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/remove-self-hosted-runner-from-group-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners": { + /** + * Lists all self-hosted runners configured for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-self-hosted-runners-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/downloads": { + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-runner-applications-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/registration-token": { + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/enterprises/octo-enterprise --token TOKEN + * ``` + */ + post: operations["enterprise-admin/create-registration-token-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/remove-token": { + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an enterprise. The token expires after one hour. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an enterprise, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + post: operations["enterprise-admin/create-remove-token-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/{runner_id}": { + /** + * Gets a specific self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/get-self-hosted-runner-for-enterprise"]; + /** + * Forces the removal of a self-hosted runner from an enterprise. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/delete-self-hosted-runner-from-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/{runner_id}/labels": { + /** + * Lists all labels for a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-labels-for-self-hosted-runner-for-enterprise"]; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-custom-labels-for-self-hosted-runner-for-enterprise"]; + /** + * Add custom labels to a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + post: operations["enterprise-admin/add-custom-labels-to-self-hosted-runner-for-enterprise"]; + /** + * Remove all custom labels from a self-hosted runner configured in an + * enterprise. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/remove-all-custom-labels-from-self-hosted-runner-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}": { + /** + * Remove a custom label from a self-hosted runner configured + * in an enterprise. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/remove-custom-label-from-self-hosted-runner-for-enterprise"]; + }; + "/enterprises/{enterprise}/audit-log": { + /** Gets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the `admin:enterprise` scope. */ + get: operations["enterprise-admin/get-audit-log"]; + }; + "/enterprises/{enterprise}/code-scanning/alerts": { + /** + * Lists code scanning alerts for the default branch for all eligible repositories in an enterprise. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be a member of the enterprise, + * and you must use an access token with the `repo` scope or `security_events` scope. + */ + get: operations["code-scanning/list-alerts-for-enterprise"]; + }; + "/enterprises/{enterprise}/secret-scanning/alerts": { + /** + * Lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. + * To use this endpoint, you must be a member of the enterprise, and you must use an access token with the `repo` scope or `security_events` scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a [security manager](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization). + */ + get: operations["secret-scanning/list-alerts-for-enterprise"]; + }; + "/enterprises/{enterprise}/settings/billing/actions": { + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * The authenticated user must be an enterprise admin. + */ + get: operations["billing/get-github-actions-billing-ghe"]; + }; + "/enterprises/{enterprise}/settings/billing/advanced-security": { + /** + * Gets the GitHub Advanced Security active committers for an enterprise per repository. + * + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the `total_advanced_security_committers` is not the sum of active_users for each repository. + * + * The total number of repositories with committer information is tracked by the `total_count` field. + */ + get: operations["billing/get-github-advanced-security-billing-ghe"]; + }; + "/enterprises/{enterprise}/settings/billing/packages": { + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * The authenticated user must be an enterprise admin. + */ + get: operations["billing/get-github-packages-billing-ghe"]; + }; + "/enterprises/{enterprise}/settings/billing/shared-storage": { + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * The authenticated user must be an enterprise admin. + */ + get: operations["billing/get-shared-storage-billing-ghe"]; + }; + "/events": { + /** We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. */ + get: operations["activity/list-public-events"]; + }; + "/feeds": { + /** + * GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: + * + * * **Timeline**: The GitHub global public timeline + * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) + * * **Current user public**: The public timeline for the authenticated user + * * **Current user**: The private timeline for the authenticated user + * * **Current user actor**: The private timeline for activity created by the authenticated user + * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. + * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. + * + * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. + */ + get: operations["activity/get-feeds"]; + }; + "/gists": { + /** Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: */ + get: operations["gists/list"]; + /** + * Allows you to add a new gist with one or more files. + * + * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. + */ + post: operations["gists/create"]; + }; + "/gists/public": { + /** + * List public gists sorted by most recently updated to least recently updated. + * + * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. + */ + get: operations["gists/list-public"]; + }; + "/gists/starred": { + /** List the authenticated user's starred gists: */ + get: operations["gists/list-starred"]; + }; + "/gists/{gist_id}": { + get: operations["gists/get"]; + delete: operations["gists/delete"]; + /** Allows you to update or delete a gist file and rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. */ + patch: operations["gists/update"]; + }; + "/gists/{gist_id}/comments": { + get: operations["gists/list-comments"]; + post: operations["gists/create-comment"]; + }; + "/gists/{gist_id}/comments/{comment_id}": { + get: operations["gists/get-comment"]; + delete: operations["gists/delete-comment"]; + patch: operations["gists/update-comment"]; + }; + "/gists/{gist_id}/commits": { + get: operations["gists/list-commits"]; + }; + "/gists/{gist_id}/forks": { + get: operations["gists/list-forks"]; + /** **Note**: This was previously `/gists/:gist_id/fork`. */ + post: operations["gists/fork"]; + }; + "/gists/{gist_id}/star": { + get: operations["gists/check-is-starred"]; + /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ + put: operations["gists/star"]; + delete: operations["gists/unstar"]; + }; + "/gists/{gist_id}/{sha}": { + get: operations["gists/get-revision"]; + }; + "/gitignore/templates": { + /** List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). */ + get: operations["gitignore/get-all-templates"]; + }; + "/gitignore/templates/{name}": { + /** + * The API also allows fetching the source of a single template. + * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. + */ + get: operations["gitignore/get-template"]; + }; + "/installation/repositories": { + /** + * List repositories that an app installation can access. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + get: operations["apps/list-repos-accessible-to-installation"]; + }; + "/installation/token": { + /** + * Revokes the installation token you're using to authenticate as an installation and access this endpoint. + * + * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + delete: operations["apps/revoke-installation-access-token"]; + }; + "/issues": { + /** + * List issues assigned to the authenticated user across all visible repositories including owned repositories, member + * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not + * necessarily assigned to you. + * + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list"]; + }; + "/licenses": { + get: operations["licenses/get-all-commonly-used"]; + }; + "/licenses/{license}": { + get: operations["licenses/get"]; + }; + "/markdown": { + post: operations["markdown/render"]; + }; + "/markdown/raw": { + /** You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. */ + post: operations["markdown/render-raw"]; + }; + "/marketplace_listing/accounts/{account_id}": { + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/get-subscription-plan-for-account"]; + }; + "/marketplace_listing/plans": { + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-plans"]; + }; + "/marketplace_listing/plans/{plan_id}/accounts": { + /** + * Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-accounts-for-plan"]; + }; + "/marketplace_listing/stubbed/accounts/{account_id}": { + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/get-subscription-plan-for-account-stubbed"]; + }; + "/marketplace_listing/stubbed/plans": { + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-plans-stubbed"]; + }; + "/marketplace_listing/stubbed/plans/{plan_id}/accounts": { + /** + * Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-accounts-for-plan-stubbed"]; + }; + "/meta": { + /** + * Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://docs.github.com/articles/about-github-s-ip-addresses/)." + * + * **Note:** The IP addresses shown in the documentation's response are only example values. You must always query the API directly to get the latest list of IP addresses. + */ + get: operations["meta/get"]; + }; + "/networks/{owner}/{repo}/events": { + get: operations["activity/list-public-events-for-repo-network"]; + }; + "/notifications": { + /** List all notifications for the current user, sorted by most recently updated. */ + get: operations["activity/list-notifications-for-authenticated-user"]; + /** Marks all notifications as "read" removes it from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ + put: operations["activity/mark-notifications-as-read"]; + }; + "/notifications/threads/{thread_id}": { + get: operations["activity/get-thread"]; + patch: operations["activity/mark-thread-as-read"]; + }; + "/notifications/threads/{thread_id}/subscription": { + /** + * This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). + * + * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. + */ + get: operations["activity/get-thread-subscription-for-authenticated-user"]; + /** + * If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. + * + * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. + * + * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. + */ + put: operations["activity/set-thread-subscription"]; + /** Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. */ + delete: operations["activity/delete-thread-subscription"]; + }; + "/octocat": { + /** Get the octocat as ASCII art */ + get: operations["meta/get-octocat"]; + }; + "/organizations": { + /** + * Lists all organizations, in the order that they were created on GitHub. + * + * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of organizations. + */ + get: operations["orgs/list"]; + }; + "/organizations/{organization_id}/custom_roles": { + /** + * List the custom repository roles available in this organization. In order to see custom + * repository roles in an organization, the authenticated user must be an organization owner. + * + * For more information on custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)". + */ + get: operations["orgs/list-custom-roles"]; + }; + "/orgs/{org}": { + /** + * To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://docs.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). + * + * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." + */ + get: operations["orgs/get"]; + /** + * **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). + * + * Enables an authenticated organization owner with the `admin:org` scope to update the organization's profile and member privileges. + */ + patch: operations["orgs/update"]; + }; + "/orgs/{org}/actions/cache/usage": { + /** + * Gets the total GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage-for-org"]; + }; + "/orgs/{org}/actions/cache/usage-by-repository": { + /** + * Lists repositories and their GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage-by-repo-for-org"]; + }; + "/orgs/{org}/actions/oidc/customization/sub": { + /** + * Gets the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * GitHub Apps must have the `organization_administration:write` permission to use this endpoint. + */ + get: operations["oidc/get-oidc-custom-sub-template-for-org"]; + /** + * Creates or updates the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `write:org` scope to use this endpoint. + * GitHub Apps must have the `admin:org` permission to use this endpoint. + */ + put: operations["oidc/update-oidc-custom-sub-template-for-org"]; + }; + "/orgs/{org}/actions/permissions": { + /** + * Gets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/get-github-actions-permissions-organization"]; + /** + * Sets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * If the organization belongs to an enterprise that has set restrictive permissions at the enterprise level, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-github-actions-permissions-organization"]; + }; + "/orgs/{org}/actions/permissions/repositories": { + /** + * Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/list-selected-repositories-enabled-github-actions-organization"]; + /** + * Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-selected-repositories-enabled-github-actions-organization"]; + }; + "/orgs/{org}/actions/permissions/repositories/{repository_id}": { + /** + * Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/enable-selected-repository-github-actions-organization"]; + /** + * Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + delete: operations["actions/disable-selected-repository-github-actions-organization"]; + }; + "/orgs/{org}/actions/permissions/selected-actions": { + /** + * Gets the selected actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/get-allowed-actions-organization"]; + /** + * Sets the actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * If the organization belongs to an enterprise that has `selected` actions and reusable workflows set at the enterprise level, then you cannot override any of the enterprise's allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the organization must belong to an enterprise. If the organization does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories in the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-allowed-actions-organization"]; + }; + "/orgs/{org}/actions/permissions/workflow": { + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/get-github-actions-default-workflow-permissions-organization"]; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, and sets if GitHub Actions + * can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-github-actions-default-workflow-permissions-organization"]; + }; + "/orgs/{org}/actions/runner-groups": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists all self-hosted runner groups configured in an organization and inherited from an enterprise. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-self-hosted-runner-groups-for-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Creates a new self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["actions/create-self-hosted-runner-group-for-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Gets a specific self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/get-self-hosted-runner-group-for-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Deletes a self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/delete-self-hosted-runner-group-from-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Updates the `name` and `visibility` of a self-hosted runner group in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + patch: operations["actions/update-self-hosted-runner-group-for-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists the repositories with access to a self-hosted runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-repo-access-to-self-hosted-runner-group-in-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Replaces the list of repositories that have access to a self-hosted runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["actions/set-repo-access-to-self-hosted-runner-group-in-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Adds a repository to the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` + * scope to use this endpoint. + */ + put: operations["actions/add-repo-access-to-self-hosted-runner-group-in-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Removes a repository from the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-repo-access-to-self-hosted-runner-group-in-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists self-hosted runners that are in a specific organization group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-self-hosted-runners-in-group-for-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Replaces the list of self-hosted runners that are part of an organization runner group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["actions/set-self-hosted-runners-in-group-for-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Adds a self-hosted runner to a runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` + * scope to use this endpoint. + */ + put: operations["actions/add-self-hosted-runner-to-group-for-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Removes a self-hosted runner from a group configured in an organization. The runner is then returned to the default group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-self-hosted-runner-from-group-for-org"]; + }; + "/orgs/{org}/actions/runners": { + /** + * Lists all self-hosted runners configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-self-hosted-runners-for-org"]; + }; + "/orgs/{org}/actions/runners/downloads": { + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-runner-applications-for-org"]; + }; + "/orgs/{org}/actions/runners/registration-token": { + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org --token TOKEN + * ``` + */ + post: operations["actions/create-registration-token-for-org"]; + }; + "/orgs/{org}/actions/runners/remove-token": { + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + post: operations["actions/create-remove-token-for-org"]; + }; + "/orgs/{org}/actions/runners/{runner_id}": { + /** + * Gets a specific self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/get-self-hosted-runner-for-org"]; + /** + * Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/delete-self-hosted-runner-from-org"]; + }; + "/orgs/{org}/actions/runners/{runner_id}/labels": { + /** + * Lists all labels for a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-labels-for-self-hosted-runner-for-org"]; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["actions/set-custom-labels-for-self-hosted-runner-for-org"]; + /** + * Add custom labels to a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["actions/add-custom-labels-to-self-hosted-runner-for-org"]; + /** + * Remove all custom labels from a self-hosted runner configured in an + * organization. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-all-custom-labels-from-self-hosted-runner-for-org"]; + }; + "/orgs/{org}/actions/runners/{runner_id}/labels/{name}": { + /** + * Remove a custom label from a self-hosted runner configured + * in an organization. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-custom-label-from-self-hosted-runner-for-org"]; + }; + "/orgs/{org}/actions/secrets": { + /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + get: operations["actions/list-org-secrets"]; + }; + "/orgs/{org}/actions/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + get: operations["actions/get-org-public-key"]; + }; + "/orgs/{org}/actions/secrets/{secret_name}": { + /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + get: operations["actions/get-org-secret"]; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to + * use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["actions/create-or-update-org-secret"]; + /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + delete: operations["actions/delete-org-secret"]; + }; + "/orgs/{org}/actions/secrets/{secret_name}/repositories": { + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + get: operations["actions/list-selected-repos-for-org-secret"]; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + put: operations["actions/set-selected-repos-for-org-secret"]; + }; + "/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": { + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + put: operations["actions/add-selected-repo-to-org-secret"]; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + delete: operations["actions/remove-selected-repo-from-org-secret"]; + }; + "/orgs/{org}/audit-log": { + /** + * Gets the audit log for an organization. For more information, see "[Reviewing the audit log for your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization)." + * + * This endpoint is available for organizations on GitHub Enterprise Cloud. To use this endpoint, you must be an organization owner, and you must use an access token with the `admin:org` scope. GitHub Apps must have the `organization_administration` read permission to use this endpoint. + * + * By default, the response includes up to 30 events from the past three months. Use the `phrase` parameter to filter results and retrieve older events. For example, use the `phrase` parameter with the `created` qualifier to filter events based on when the events occurred. For more information, see "[Reviewing the audit log for your organization](https://docs.github.com/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/reviewing-the-audit-log-for-your-organization#searching-the-audit-log)." + * + * Use pagination to retrieve fewer or more than 30 events. For more information, see "[Resources in the REST API](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination)." + */ + get: operations["orgs/get-audit-log"]; + }; + "/orgs/{org}/blocks": { + /** List the users blocked by an organization. */ + get: operations["orgs/list-blocked-users"]; + }; + "/orgs/{org}/blocks/{username}": { + get: operations["orgs/check-blocked-user"]; + put: operations["orgs/block-user"]; + delete: operations["orgs/unblock-user"]; + }; + "/orgs/{org}/code-scanning/alerts": { + /** + * Lists code scanning alerts for the default branch for all eligible repositories in an organization. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + get: operations["code-scanning/list-alerts-for-org"]; + }; + "/orgs/{org}/codespaces": { + /** + * Lists the codespaces associated to a specified organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/list-in-organization"]; + }; + "/orgs/{org}/credential-authorizations": { + /** + * Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products). + * + * An authenticated organization owner with the `read:org` scope can list all credential authorizations for an organization that uses SAML single sign-on (SSO). The credentials are either personal access tokens or SSH keys that organization members have authorized for the organization. For more information, see [About authentication with SAML single sign-on](https://docs.github.com/en/articles/about-authentication-with-saml-single-sign-on). + */ + get: operations["orgs/list-saml-sso-authorizations"]; + }; + "/orgs/{org}/credential-authorizations/{credential_id}": { + /** + * Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products). + * + * An authenticated organization owner with the `admin:org` scope can remove a credential authorization for an organization that uses SAML SSO. Once you remove someone's credential authorization, they will need to create a new personal access token or SSH key and authorize it for the organization they want to access. + */ + delete: operations["orgs/remove-saml-sso-authorization"]; + }; + "/orgs/{org}/dependabot/secrets": { + /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + get: operations["dependabot/list-org-secrets"]; + }; + "/orgs/{org}/dependabot/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + get: operations["dependabot/get-org-public-key"]; + }; + "/orgs/{org}/dependabot/secrets/{secret_name}": { + /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + get: operations["dependabot/get-org-secret"]; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["dependabot/create-or-update-org-secret"]; + /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + delete: operations["dependabot/delete-org-secret"]; + }; + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories": { + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + get: operations["dependabot/list-selected-repos-for-org-secret"]; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + put: operations["dependabot/set-selected-repos-for-org-secret"]; + }; + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": { + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + put: operations["dependabot/add-selected-repo-to-org-secret"]; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + delete: operations["dependabot/remove-selected-repo-from-org-secret"]; + }; + "/orgs/{org}/events": { + get: operations["activity/list-public-org-events"]; + }; + "/orgs/{org}/external-group/{group_id}": { + /** + * Displays information about the specific group's usage. Provides a list of the group's external members as well as a list of teams that this group is connected to. + * + * You can manage team membership with your identity provider using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)" in the GitHub Help documentation. + */ + get: operations["teams/external-idp-group-info-for-org"]; + }; + "/orgs/{org}/external-groups": { + /** + * Lists external groups available in an organization. You can query the groups using the `display_name` parameter, only groups with a `group_name` containing the text provided in the `display_name` parameter will be returned. You can also limit your page results using the `per_page` parameter. GitHub generates a url-encoded `page` token using a cursor value for where the next page begins. For more information on cursor pagination, see "[Offset and Cursor Pagination explained](https://dev.to/jackmarchant/offset-and-cursor-pagination-explained-b89)." + * + * You can manage team membership with your identity provider using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)" in the GitHub Help documentation. + */ + get: operations["teams/list-external-idp-groups-for-org"]; + }; + "/orgs/{org}/failed_invitations": { + /** The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. */ + get: operations["orgs/list-failed-invitations"]; + }; + "/orgs/{org}/hooks": { + get: operations["orgs/list-webhooks"]; + /** Here's how you can create a hook that posts payloads in JSON format: */ + post: operations["orgs/create-webhook"]; + }; + "/orgs/{org}/hooks/{hook_id}": { + /** Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." */ + get: operations["orgs/get-webhook"]; + delete: operations["orgs/delete-webhook"]; + /** Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." */ + patch: operations["orgs/update-webhook"]; + }; + "/orgs/{org}/hooks/{hook_id}/config": { + /** + * Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. + */ + get: operations["orgs/get-webhook-config-for-org"]; + /** + * Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. + */ + patch: operations["orgs/update-webhook-config-for-org"]; + }; + "/orgs/{org}/hooks/{hook_id}/deliveries": { + /** Returns a list of webhook deliveries for a webhook configured in an organization. */ + get: operations["orgs/list-webhook-deliveries"]; + }; + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}": { + /** Returns a delivery for a webhook configured in an organization. */ + get: operations["orgs/get-webhook-delivery"]; + }; + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": { + /** Redeliver a delivery for a webhook configured in an organization. */ + post: operations["orgs/redeliver-webhook-delivery"]; + }; + "/orgs/{org}/hooks/{hook_id}/pings": { + /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ + post: operations["orgs/ping-webhook"]; + }; + "/orgs/{org}/installation": { + /** + * Enables an authenticated GitHub App to find the organization's installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-org-installation"]; + }; + "/orgs/{org}/installations": { + /** Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. */ + get: operations["orgs/list-app-installations"]; + }; + "/orgs/{org}/interaction-limits": { + /** Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. */ + get: operations["interactions/get-restrictions-for-org"]; + /** Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. */ + put: operations["interactions/set-restrictions-for-org"]; + /** Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. */ + delete: operations["interactions/remove-restrictions-for-org"]; + }; + "/orgs/{org}/invitations": { + /** The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. */ + get: operations["orgs/list-pending-invitations"]; + /** + * Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["orgs/create-invitation"]; + }; + "/orgs/{org}/invitations/{invitation_id}": { + /** + * Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). + */ + delete: operations["orgs/cancel-invitation"]; + }; + "/orgs/{org}/invitations/{invitation_id}/teams": { + /** List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. */ + get: operations["orgs/list-invitation-teams"]; + }; + "/orgs/{org}/issues": { + /** + * List issues in an organization assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list-for-org"]; + }; + "/orgs/{org}/members": { + /** List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. */ + get: operations["orgs/list-members"]; + }; + "/orgs/{org}/members/{username}": { + /** Check if a user is, publicly or privately, a member of the organization. */ + get: operations["orgs/check-membership-for-user"]; + /** Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. */ + delete: operations["orgs/remove-member"]; + }; + "/orgs/{org}/members/{username}/codespaces/{codespace_name}": { + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["codespaces/delete-from-organization"]; + }; + "/orgs/{org}/members/{username}/codespaces/{codespace_name}/stop": { + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["codespaces/stop-in-organization"]; + }; + "/orgs/{org}/memberships/{username}": { + /** In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. */ + get: operations["orgs/get-membership-for-user"]; + /** + * Only authenticated organization owners can add a member to the organization or update the member's role. + * + * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. + * + * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. + * + * **Rate limits** + * + * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. + */ + put: operations["orgs/set-membership-for-user"]; + /** + * In order to remove a user's membership with an organization, the authenticated user must be an organization owner. + * + * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. + */ + delete: operations["orgs/remove-membership-for-user"]; + }; + "/orgs/{org}/migrations": { + /** Lists the most recent migrations. */ + get: operations["migrations/list-for-org"]; + /** Initiates the generation of a migration archive. */ + post: operations["migrations/start-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}": { + /** + * Fetches the status of a migration. + * + * The `state` of a migration can be one of the following values: + * + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + get: operations["migrations/get-status-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}/archive": { + /** Fetches the URL to a migration archive. */ + get: operations["migrations/download-archive-for-org"]; + /** Deletes a previous migration archive. Migration archives are automatically deleted after seven days. */ + delete: operations["migrations/delete-archive-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock": { + /** Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/reference/repos#delete-a-repository) when the migration is complete and you no longer need the source data. */ + delete: operations["migrations/unlock-repo-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}/repositories": { + /** List all the repositories for this organization migration. */ + get: operations["migrations/list-repos-for-org"]; + }; + "/orgs/{org}/outside_collaborators": { + /** List all users who are outside collaborators of an organization. */ + get: operations["orgs/list-outside-collaborators"]; + }; + "/orgs/{org}/outside_collaborators/{username}": { + /** When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://docs.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". Converting an organization member to an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." */ + put: operations["orgs/convert-member-to-outside-collaborator"]; + /** Removing a user from this list will remove them from all the organization's repositories. */ + delete: operations["orgs/remove-outside-collaborator"]; + }; + "/orgs/{org}/packages": { + /** + * Lists all packages in an organization readable by the user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/list-packages-for-organization"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}": { + /** + * Gets a specific package in an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-for-organization"]; + /** + * Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + delete: operations["packages/delete-package-for-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/restore": { + /** + * Restores an entire package in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + post: operations["packages/restore-package-for-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/versions": { + /** + * Returns all package versions for a package owned by an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-all-package-versions-for-package-owned-by-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": { + /** + * Gets a specific package version in an organization. + * + * You must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-version-for-organization"]; + /** + * Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + delete: operations["packages/delete-package-version-for-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { + /** + * Restores a specific package version in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + post: operations["packages/restore-package-version-for-org"]; + }; + "/orgs/{org}/projects": { + /** Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + get: operations["projects/list-for-org"]; + /** Creates an organization project board. Returns a `410 Gone` status if projects are disabled in the organization or if the organization does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + post: operations["projects/create-for-org"]; + }; + "/orgs/{org}/public_members": { + /** Members of an organization can choose to have their membership publicized or not. */ + get: operations["orgs/list-public-members"]; + }; + "/orgs/{org}/public_members/{username}": { + get: operations["orgs/check-public-membership-for-user"]; + /** + * The user can publicize their own membership. (A user cannot publicize the membership for another user.) + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["orgs/set-public-membership-for-authenticated-user"]; + delete: operations["orgs/remove-public-membership-for-authenticated-user"]; + }; + "/orgs/{org}/repos": { + /** Lists repositories for the specified organization. */ + get: operations["repos/list-for-org"]; + /** + * Creates a new repository in the specified organization. The authenticated user must be a member of the organization. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + post: operations["repos/create-in-org"]; + }; + "/orgs/{org}/secret-scanning/alerts": { + /** + * Lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/list-alerts-for-org"]; + }; + "/orgs/{org}/settings/billing/actions": { + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + get: operations["billing/get-github-actions-billing-org"]; + }; + "/orgs/{org}/settings/billing/advanced-security": { + /** + * Gets the GitHub Advanced Security active committers for an organization per repository. + * + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the `total_advanced_security_committers` is not the sum of advanced_security_committers for each repository. + * + * If this organization defers to an enterprise for billing, the `total_advanced_security_committers` returned from the organization API may include some users that are in more than one organization, so they will only consume a single Advanced Security seat at the enterprise level. + * + * The total number of repositories with committer information is tracked by the `total_count` field. + */ + get: operations["billing/get-github-advanced-security-billing-org"]; + }; + "/orgs/{org}/settings/billing/packages": { + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + get: operations["billing/get-github-packages-billing-org"]; + }; + "/orgs/{org}/settings/billing/shared-storage": { + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + get: operations["billing/get-shared-storage-billing-org"]; + }; + "/orgs/{org}/team-sync/groups": { + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * List IdP groups available in an organization. You can limit your page results using the `per_page` parameter. GitHub generates a url-encoded `page` token using a cursor value for where the next page begins. For more information on cursor pagination, see "[Offset and Cursor Pagination explained](https://dev.to/jackmarchant/offset-and-cursor-pagination-explained-b89)." + */ + get: operations["teams/list-idp-groups-for-org"]; + }; + "/orgs/{org}/teams": { + /** Lists all teams in an organization that are visible to the authenticated user. */ + get: operations["teams/list"]; + /** + * To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://docs.github.com/en/articles/setting-team-creation-permissions-in-your-organization)." + * + * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/about-teams)". + */ + post: operations["teams/create"]; + }; + "/orgs/{org}/teams/{team_slug}": { + /** + * Gets a team using the team's `slug`. GitHub generates the `slug` from the team `name`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. + */ + get: operations["teams/get-by-name"]; + /** + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. + */ + delete: operations["teams/delete-in-org"]; + /** + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. + */ + patch: operations["teams/update-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions": { + /** + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. + */ + get: operations["teams/list-discussions-in-org"]; + /** + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. + */ + post: operations["teams/create-discussion-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": { + /** + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + get: operations["teams/get-discussion-in-org"]; + /** + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + delete: operations["teams/delete-discussion-in-org"]; + /** + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + patch: operations["teams/update-discussion-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": { + /** + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + get: operations["teams/list-discussion-comments-in-org"]; + /** + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + post: operations["teams/create-discussion-comment-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": { + /** + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + get: operations["teams/get-discussion-comment-in-org"]; + /** + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + delete: operations["teams/delete-discussion-comment-in-org"]; + /** + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + patch: operations["teams/update-discussion-comment-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + /** + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + get: operations["reactions/list-for-team-discussion-comment-in-org"]; + /** + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + post: operations["reactions/create-for-team-discussion-comment-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["reactions/delete-for-team-discussion-comment"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": { + /** + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + get: operations["reactions/list-for-team-discussion-in-org"]; + /** + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + post: operations["reactions/create-for-team-discussion-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["reactions/delete-for-team-discussion"]; + }; + "/orgs/{org}/teams/{team_slug}/external-groups": { + /** + * Lists a connection between a team and an external group. + * + * You can manage team membership with your identity provider using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)" in the GitHub Help documentation. + */ + get: operations["teams/list-linked-external-idp-groups-to-team-for-org"]; + /** + * Deletes a connection between a team and an external group. + * + * You can manage team membership with your IdP using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + delete: operations["teams/unlink-external-idp-group-from-team-for-org"]; + /** + * Creates a connection between a team and an external group. Only one external group can be linked to a team. + * + * You can manage team membership with your identity provider using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)" in the GitHub Help documentation. + */ + patch: operations["teams/link-external-idp-group-to-team-for-org"]; + }; + "/orgs/{org}/teams/{team_slug}/invitations": { + /** + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. + */ + get: operations["teams/list-pending-invitations-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/members": { + /** + * Team members will include the members of child teams. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + get: operations["teams/list-members-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/memberships/{username}": { + /** + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + get: operations["teams/get-membership-for-user-in-org"]; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + put: operations["teams/add-or-update-membership-for-user-in-org"]; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + delete: operations["teams/remove-membership-for-user-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/projects": { + /** + * Lists the organization projects for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. + */ + get: operations["teams/list-projects-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/projects/{project_id}": { + /** + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + get: operations["teams/check-permissions-for-project-in-org"]; + /** + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + put: operations["teams/add-or-update-project-permissions-in-org"]; + /** + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + delete: operations["teams/remove-project-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/repos": { + /** + * Lists a team's repositories visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. + */ + get: operations["teams/list-repos-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": { + /** + * Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. + * + * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + get: operations["teams/check-permissions-for-repo-in-org"]; + /** + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + * + * For more information about the permission levels, see "[Repository permission levels for an organization](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". + */ + put: operations["teams/add-or-update-repo-permissions-in-org"]; + /** + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + delete: operations["teams/remove-repo-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/team-sync/group-mappings": { + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * List IdP groups connected to a team on GitHub. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`. + */ + get: operations["teams/list-idp-groups-in-org"]; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`. + */ + patch: operations["teams/create-or-update-idp-group-connections-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/teams": { + /** + * Lists the child teams of the team specified by `{team_slug}`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. + */ + get: operations["teams/list-child-in-org"]; + }; + "/projects/columns/cards/{card_id}": { + get: operations["projects/get-card"]; + delete: operations["projects/delete-card"]; + patch: operations["projects/update-card"]; + }; + "/projects/columns/cards/{card_id}/moves": { + post: operations["projects/move-card"]; + }; + "/projects/columns/{column_id}": { + get: operations["projects/get-column"]; + delete: operations["projects/delete-column"]; + patch: operations["projects/update-column"]; + }; + "/projects/columns/{column_id}/cards": { + get: operations["projects/list-cards"]; + post: operations["projects/create-card"]; + }; + "/projects/columns/{column_id}/moves": { + post: operations["projects/move-column"]; + }; + "/projects/{project_id}": { + /** Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + get: operations["projects/get"]; + /** Deletes a project board. Returns a `404 Not Found` status if projects are disabled. */ + delete: operations["projects/delete"]; + /** Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + patch: operations["projects/update"]; + }; + "/projects/{project_id}/collaborators": { + /** Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. */ + get: operations["projects/list-collaborators"]; + }; + "/projects/{project_id}/collaborators/{username}": { + /** Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. */ + put: operations["projects/add-collaborator"]; + /** Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. */ + delete: operations["projects/remove-collaborator"]; + }; + "/projects/{project_id}/collaborators/{username}/permission": { + /** Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. */ + get: operations["projects/get-permission-for-user"]; + }; + "/projects/{project_id}/columns": { + get: operations["projects/list-columns"]; + post: operations["projects/create-column"]; + }; + "/rate_limit": { + /** + * **Note:** Accessing this endpoint does not count against your REST API rate limit. + * + * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. + */ + get: operations["rate-limit/get"]; + }; + "/repos/{owner}/{repo}": { + /** The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. */ + get: operations["repos/get"]; + /** + * Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. + * + * If an organization owner has configured the organization to prevent members from deleting organization-owned + * repositories, you will get a `403 Forbidden` response. + */ + delete: operations["repos/delete"]; + /** **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. */ + patch: operations["repos/update"]; + }; + "/repos/{owner}/{repo}/actions/artifacts": { + /** Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/list-artifacts-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}": { + /** Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-artifact"]; + /** Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + delete: operations["actions/delete-artifact"]; + }; + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}": { + /** + * Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in + * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-artifact"]; + }; + "/repos/{owner}/{repo}/actions/cache/usage": { + /** + * Gets GitHub Actions cache usage for a repository. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage"]; + }; + "/repos/{owner}/{repo}/actions/caches": { + /** + * Lists the GitHub Actions caches for a repository. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-list"]; + /** + * Deletes one or more GitHub Actions caches for a repository, using a complete cache key. By default, all caches that match the provided key are deleted, but you can optionally provide a Git ref to restrict deletions to caches that match both the provided key and the Git ref. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + delete: operations["actions/delete-actions-cache-by-key"]; + }; + "/repos/{owner}/{repo}/actions/caches/{cache_id}": { + /** + * Deletes a GitHub Actions cache for a repository, using a cache ID. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + delete: operations["actions/delete-actions-cache-by-id"]; + }; + "/repos/{owner}/{repo}/actions/jobs/{job_id}": { + /** Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-job-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/jobs/{job_id}/logs": { + /** + * Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look + * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can + * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must + * have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-job-logs-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/jobs/{job_id}/rerun": { + /** Re-run a job and its dependent jobs in a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + post: operations["actions/re-run-job-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/oidc/customization/sub": { + /** + * Gets the `opt-out` flag of a GitHub Actions OpenID Connect (OIDC) subject claim customization for a repository. + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. GitHub Apps must have the `organization_administration:read` permission to use this endpoint. + */ + get: operations["actions/get-custom-oidc-sub-claim-for-repo"]; + /** + * Sets the `opt-out` flag of a GitHub Actions OpenID Connect (OIDC) subject claim customization for a repository. + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + put: operations["actions/set-custom-oidc-sub-claim-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/permissions": { + /** + * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + get: operations["actions/get-github-actions-permissions-repository"]; + /** + * Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions and reusable workflows in the repository. + * + * If the repository belongs to an organization or enterprise that has set restrictive permissions at the organization or enterprise levels, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + put: operations["actions/set-github-actions-permissions-repository"]; + }; + "/repos/{owner}/{repo}/actions/permissions/access": { + /** + * Gets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + get: operations["actions/get-workflow-access-to-repository"]; + /** + * Sets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + put: operations["actions/set-workflow-access-to-repository"]; + }; + "/repos/{owner}/{repo}/actions/permissions/selected-actions": { + /** + * Gets the settings for selected actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + get: operations["actions/get-allowed-actions-repository"]; + /** + * Sets the actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * If the repository belongs to an organization or enterprise that has `selected` actions and reusable workflows set at the organization or enterprise levels, then you cannot override any of the allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the repository must belong to an enterprise. If the repository does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + put: operations["actions/set-allowed-actions-repository"]; + }; + "/repos/{owner}/{repo}/actions/permissions/workflow": { + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, + * as well as if GitHub Actions can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + get: operations["actions/get-github-actions-default-workflow-permissions-repository"]; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, and sets if GitHub Actions + * can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + put: operations["actions/set-github-actions-default-workflow-permissions-repository"]; + }; + "/repos/{owner}/{repo}/actions/runners": { + /** Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. */ + get: operations["actions/list-self-hosted-runners-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/downloads": { + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + get: operations["actions/list-runner-applications-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/registration-token": { + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate + * using an access token with the `repo` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN + * ``` + */ + post: operations["actions/create-registration-token-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/remove-token": { + /** + * Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + post: operations["actions/create-remove-token-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/{runner_id}": { + /** + * Gets a specific self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + get: operations["actions/get-self-hosted-runner-for-repo"]; + /** + * Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `repo` + * scope to use this endpoint. + */ + delete: operations["actions/delete-self-hosted-runner-from-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels": { + /** + * Lists all labels for a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + get: operations["actions/list-labels-for-self-hosted-runner-for-repo"]; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + put: operations["actions/set-custom-labels-for-self-hosted-runner-for-repo"]; + /** + * Add custom labels to a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + post: operations["actions/add-custom-labels-to-self-hosted-runner-for-repo"]; + /** + * Remove all custom labels from a self-hosted runner configured in a + * repository. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + delete: operations["actions/remove-all-custom-labels-from-self-hosted-runner-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}": { + /** + * Remove a custom label from a self-hosted runner configured + * in a repository. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + delete: operations["actions/remove-custom-label-from-self-hosted-runner-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runs": { + /** + * Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/list-workflow-runs-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}": { + /** Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-workflow-run"]; + /** + * Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is + * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use + * this endpoint. + */ + delete: operations["actions/delete-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/approvals": { + /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-reviews-for-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/approve": { + /** + * Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + post: operations["actions/approve-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": { + /** Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/list-workflow-run-artifacts"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}": { + /** + * Gets a specific workflow run attempt. Anyone with read access to the repository + * can use this endpoint. If the repository is private you must use an access token + * with the `repo` scope. GitHub Apps must have the `actions:read` permission to + * use this endpoint. + */ + get: operations["actions/get-workflow-run-attempt"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs": { + /** Lists jobs for a specific workflow run attempt. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ + get: operations["actions/list-jobs-for-workflow-run-attempt"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs": { + /** + * Gets a redirect URL to download an archive of log files for a specific workflow run attempt. This link expires after + * 1 minute. Look for `Location:` in the response header to find the URL for the download. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-workflow-run-attempt-logs"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/cancel": { + /** Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + post: operations["actions/cancel-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/jobs": { + /** Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ + get: operations["actions/list-jobs-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/logs": { + /** + * Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for + * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use + * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have + * the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-workflow-run-logs"]; + /** Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + delete: operations["actions/delete-workflow-run-logs"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": { + /** + * Get all deployment environments for a workflow run that are waiting for protection rules to pass. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-pending-deployments-for-run"]; + /** + * Approve or reject pending deployments that are waiting on approval by a required reviewer. + * + * Required reviewers with read access to the repository contents and deployments can use this endpoint. Required reviewers must authenticate using an access token with the `repo` scope to use this endpoint. + */ + post: operations["actions/review-pending-deployments-for-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun": { + /** Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + post: operations["actions/re-run-workflow"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs": { + /** Re-run all of the failed jobs and their dependent jobs in a workflow run using the `id` of the workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. */ + post: operations["actions/re-run-workflow-failed-jobs"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/timing": { + /** + * Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-workflow-run-usage"]; + }; + "/repos/{owner}/{repo}/actions/secrets": { + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/list-repo-secrets"]; + }; + "/repos/{owner}/{repo}/actions/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/get-repo-public-key"]; + }; + "/repos/{owner}/{repo}/actions/secrets/{secret_name}": { + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/get-repo-secret"]; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["actions/create-or-update-repo-secret"]; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + delete: operations["actions/delete-repo-secret"]; + }; + "/repos/{owner}/{repo}/actions/workflows": { + /** Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/list-repo-workflows"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}": { + /** Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-workflow"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable": { + /** + * Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + put: operations["actions/disable-workflow"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches": { + /** + * You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)." + */ + post: operations["actions/create-workflow-dispatch"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable": { + /** + * Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + put: operations["actions/enable-workflow"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": { + /** + * List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + */ + get: operations["actions/list-workflow-runs"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing": { + /** + * Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-workflow-usage"]; + }; + "/repos/{owner}/{repo}/assignees": { + /** Lists the [available assignees](https://docs.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. */ + get: operations["issues/list-assignees"]; + }; + "/repos/{owner}/{repo}/assignees/{assignee}": { + /** + * Checks if a user has permission to be assigned to an issue in this repository. + * + * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + get: operations["issues/check-user-can-be-assigned"]; + }; + "/repos/{owner}/{repo}/autolinks": { + /** + * This returns a list of autolinks configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + get: operations["repos/list-autolinks"]; + /** Users with admin access to the repository can create an autolink. */ + post: operations["repos/create-autolink"]; + }; + "/repos/{owner}/{repo}/autolinks/{autolink_id}": { + /** + * This returns a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + get: operations["repos/get-autolink"]; + /** + * This deletes a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + delete: operations["repos/delete-autolink"]; + }; + "/repos/{owner}/{repo}/automated-security-fixes": { + /** Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". */ + put: operations["repos/enable-automated-security-fixes"]; + /** Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". */ + delete: operations["repos/disable-automated-security-fixes"]; + }; + "/repos/{owner}/{repo}/branches": { + get: operations["repos/list-branches"]; + }; + "/repos/{owner}/{repo}/branches/{branch}": { + get: operations["repos/get-branch"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-branch-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Protecting a branch requires admin or owner permissions to the repository. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + * + * **Note**: The list of users, apps, and teams in total is limited to 100 items. + */ + put: operations["repos/update-branch-protection"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + delete: operations["repos/delete-branch-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-admin-branch-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + post: operations["repos/set-admin-branch-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + delete: operations["repos/delete-admin-branch-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-pull-request-review-protection"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + delete: operations["repos/delete-pull-request-review-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + */ + patch: operations["repos/update-pull-request-review-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://docs.github.com/articles/signing-commits-with-gpg) in GitHub Help. + * + * **Note**: You must enable branch protection to require signed commits. + */ + get: operations["repos/get-commit-signature-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. + */ + post: operations["repos/create-commit-signature-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. + */ + delete: operations["repos/delete-commit-signature-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-status-checks-protection"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + delete: operations["repos/remove-status-check-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + */ + patch: operations["repos/update-status-check-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-all-status-check-contexts"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + put: operations["repos/set-status-check-contexts"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + post: operations["repos/add-status-check-contexts"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + delete: operations["repos/remove-status-check-contexts"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists who has access to this protected branch. + * + * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. + */ + get: operations["repos/get-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Disables the ability to restrict who can push to this branch. + */ + delete: operations["repos/delete-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + get: operations["repos/get-apps-with-access-to-protected-branch"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + put: operations["repos/set-app-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + post: operations["repos/add-app-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + delete: operations["repos/remove-app-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the teams who have push access to this branch. The list includes child teams. + */ + get: operations["repos/get-teams-with-access-to-protected-branch"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + put: operations["repos/set-team-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified teams push access for this branch. You can also give push access to child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + post: operations["repos/add-team-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a team to push to this branch. You can also remove push access for child teams. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Teams that should no longer have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + delete: operations["repos/remove-team-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the people who have push access to this branch. + */ + get: operations["repos/get-users-with-access-to-protected-branch"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + put: operations["repos/set-user-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified people push access for this branch. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + post: operations["repos/add-user-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a user to push to this branch. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + delete: operations["repos/remove-user-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/rename": { + /** + * Renames a branch in a repository. + * + * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". + * + * The permissions required to use this endpoint depends on whether you are renaming the default branch. + * + * To rename a non-default branch: + * + * * Users must have push access. + * * GitHub Apps must have the `contents:write` repository permission. + * + * To rename the default branch: + * + * * Users must have admin or owner permissions. + * * GitHub Apps must have the `administration:write` repository permission. + */ + post: operations["repos/rename-branch"]; + }; + "/repos/{owner}/{repo}/check-runs": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. + * + * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. + */ + post: operations["checks/create"]; + }; + "/repos/{owner}/{repo}/check-runs/{check_run_id}": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: operations["checks/get"]; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. + */ + patch: operations["checks/update"]; + }; + "/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": { + /** Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. */ + get: operations["checks/list-annotations"]; + }; + "/repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest": { + /** + * Triggers GitHub to rerequest an existing check run, without pushing new code to a repository. This endpoint will trigger the [`check_run` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) event with the action `rerequested`. When a check run is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check run, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + post: operations["checks/rerequest-run"]; + }; + "/repos/{owner}/{repo}/check-suites": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. + */ + post: operations["checks/create-suite"]; + }; + "/repos/{owner}/{repo}/check-suites/preferences": { + /** Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. */ + patch: operations["checks/set-suites-preferences"]; + }; + "/repos/{owner}/{repo}/check-suites/{check_suite_id}": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + get: operations["checks/get-suite"]; + }; + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: operations["checks/list-for-suite"]; + }; + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest": { + /** + * Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + post: operations["checks/rerequest-suite"]; + }; + "/repos/{owner}/{repo}/code-scanning/alerts": { + /** + * Lists all open code scanning alerts for the default branch (usually `main` + * or `master`). You must use an access token with the `security_events` scope to use + * this endpoint with private repos, the `public_repo` scope also grants permission to read + * security events on public repos only. GitHub Apps must have the `security_events` read + * permission to use this endpoint. + * + * The response includes a `most_recent_instance` object. + * This provides details of the most recent instance of this alert + * for the default branch or for the specified Git reference + * (if you used `ref` in the request). + */ + get: operations["code-scanning/list-alerts-for-repo"]; + }; + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": { + /** + * Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The instances field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The same information can now be retrieved via a GET request to the URL specified by `instances_url`. + */ + get: operations["code-scanning/get-alert"]; + /** Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. */ + patch: operations["code-scanning/update-alert"]; + }; + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": { + /** + * Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + get: operations["code-scanning/list-alert-instances"]; + }; + "/repos/{owner}/{repo}/code-scanning/analyses": { + /** + * Lists the details of all code scanning analyses for a repository, + * starting with the most recent. + * The response is paginated and you can use the `page` and `per_page` parameters + * to list the analyses you're interested in. + * By default 30 analyses are listed per page. + * + * The `rules_count` field in the response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. + */ + get: operations["code-scanning/list-recent-analyses"]; + }; + "/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}": { + /** + * Gets a specified code scanning analysis for a repository. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * The default JSON response contains fields that describe the analysis. + * This includes the Git reference and commit SHA to which the analysis relates, + * the datetime of the analysis, the name of the code scanning tool, + * and the number of alerts. + * + * The `rules_count` field in the default response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * If you use the Accept header `application/sarif+json`, + * the response contains the analysis data that was uploaded. + * This is formatted as + * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). + */ + get: operations["code-scanning/get-analysis"]; + /** + * Deletes a specified code scanning analysis from a repository. For + * private repositories, you must use an access token with the `repo` scope. For public repositories, + * you must use an access token with `public_repo` scope. + * GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * You can delete one analysis at a time. + * To delete a series of analyses, start with the most recent analysis and work backwards. + * Conceptually, the process is similar to the undo function in a text editor. + * + * When you list the analyses for a repository, + * one or more will be identified as deletable in the response: + * + * ``` + * "deletable": true + * ``` + * + * An analysis is deletable when it's the most recent in a set of analyses. + * Typically, a repository will have multiple sets of analyses + * for each enabled code scanning tool, + * where a set is determined by a unique combination of analysis values: + * + * * `ref` + * * `tool` + * * `analysis_key` + * * `environment` + * + * If you attempt to delete an analysis that is not the most recent in a set, + * you'll get a 400 response with the message: + * + * ``` + * Analysis specified is not deletable. + * ``` + * + * The response from a successful `DELETE` operation provides you with + * two alternative URLs for deleting the next analysis in the set: + * `next_analysis_url` and `confirm_delete_url`. + * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis + * in a set. This is a useful option if you want to preserve at least one analysis + * for the specified tool in your repository. + * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. + * When you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url` + * in the 200 response is `null`. + * + * As an example of the deletion process, + * let's imagine that you added a workflow that configured a particular code scanning tool + * to analyze the code in a repository. This tool has added 15 analyses: + * 10 on the default branch, and another 5 on a topic branch. + * You therefore have two separate sets of analyses for this tool. + * You've now decided that you want to remove all of the analyses for the tool. + * To do this you must make 15 separate deletion requests. + * To start, you must find an analysis that's identified as deletable. + * Each set of analyses always has one that's identified as deletable. + * Having found the deletable analysis for one of the two sets, + * delete this analysis and then continue deleting the next analysis in the set until they're all deleted. + * Then repeat the process for the second set. + * The procedure therefore consists of a nested loop: + * + * **Outer loop**: + * * List the analyses for the repository, filtered by tool. + * * Parse this list to find a deletable analysis. If found: + * + * **Inner loop**: + * * Delete the identified analysis. + * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. + * + * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. + */ + delete: operations["code-scanning/delete-analysis"]; + }; + "/repos/{owner}/{repo}/code-scanning/sarifs": { + /** + * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint for private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * There are two places where you can upload code scanning results. + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * + * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: + * + * ``` + * gzip -c analysis-data.sarif | base64 -w0 + * ``` + * + * SARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries. + * + * The `202 Accepted`, response includes an `id` value. + * You can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint. + * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." + */ + post: operations["code-scanning/upload-sarif"]; + }; + "/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}": { + /** Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. */ + get: operations["code-scanning/get-sarif"]; + }; + "/repos/{owner}/{repo}/codeowners/errors": { + /** + * List any syntax errors that are detected in the CODEOWNERS + * file. + * + * For more information about the correct CODEOWNERS syntax, + * see "[About code owners](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)." + */ + get: operations["repos/codeowners-errors"]; + }; + "/repos/{owner}/{repo}/codespaces": { + /** + * Lists the codespaces associated to a specified repository and the authenticated user. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/list-in-repository-for-authenticated-user"]; + /** + * Creates a codespace owned by the authenticated user in the specified repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/create-with-repo-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/devcontainers": { + /** + * Lists the devcontainer.json files associated with a specified repository and the authenticated user. These files + * specify launchpoint configurations for codespaces created within the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + get: operations["codespaces/list-devcontainers-in-repository-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/machines": { + /** + * List the machine types available for a given repository based on its configuration. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_metadata` repository permission to use this endpoint. + */ + get: operations["codespaces/repo-machines-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/new": { + /** + * Gets the default attributes for codespaces created by the user with the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/pre-flight-with-repo-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/secrets": { + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + get: operations["codespaces/list-repo-secrets"]; + }; + "/repos/{owner}/{repo}/codespaces/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + get: operations["codespaces/get-repo-public-key"]; + }; + "/repos/{owner}/{repo}/codespaces/secrets/{secret_name}": { + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + get: operations["codespaces/get-repo-secret"]; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository + * permission to use this endpoint. + * + * #### Example of encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example of encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example of encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example of encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["codespaces/create-or-update-repo-secret"]; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + delete: operations["codespaces/delete-repo-secret"]; + }; + "/repos/{owner}/{repo}/collaborators": { + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * Organization members with write, maintain, or admin privileges on the organization-owned repository can use this endpoint. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + get: operations["repos/list-collaborators"]; + }; + "/repos/{owner}/{repo}/collaborators/{username}": { + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + get: operations["repos/check-collaborator"]; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Adding an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + * + * For more information on permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". There are restrictions on which permissions can be granted to organization members when an organization base role is in place. In this case, the permission being given must be equal to or higher than the org base permission. Otherwise, the request will fail with: + * + * ``` + * Cannot assign {member} permission of {role name} + * ``` + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). + * + * **Updating an existing collaborator's permission level** + * + * The endpoint can also be used to change the permissions of an existing collaborator without first removing and re-adding the collaborator. To change the permissions, use the same endpoint and pass a different `permission` parameter. The response will be a `204`, with no other indication that the permission level changed. + * + * **Rate limits** + * + * You are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. + */ + put: operations["repos/add-collaborator"]; + delete: operations["repos/remove-collaborator"]; + }; + "/repos/{owner}/{repo}/collaborators/{username}/permission": { + /** Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. */ + get: operations["repos/get-collaborator-permission-level"]; + }; + "/repos/{owner}/{repo}/comments": { + /** + * Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). + * + * Comments are ordered by ascending ID. + */ + get: operations["repos/list-commit-comments-for-repo"]; + }; + "/repos/{owner}/{repo}/comments/{comment_id}": { + get: operations["repos/get-commit-comment"]; + delete: operations["repos/delete-commit-comment"]; + patch: operations["repos/update-commit-comment"]; + }; + "/repos/{owner}/{repo}/comments/{comment_id}/reactions": { + /** List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). */ + get: operations["reactions/list-for-commit-comment"]; + /** Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with an HTTP `200` status means that you already added the reaction type to this commit comment. */ + post: operations["reactions/create-for-commit-comment"]; + }; + "/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + delete: operations["reactions/delete-for-commit-comment"]; + }; + "/repos/{owner}/{repo}/commits": { + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/list-commits"]; + }; + "/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. + */ + get: operations["repos/list-branches-for-head-commit"]; + }; + "/repos/{owner}/{repo}/commits/{commit_sha}/comments": { + /** Use the `:commit_sha` to specify the commit that will have its comments listed. */ + get: operations["repos/list-comments-for-commit"]; + /** + * Create a comment for a commit using its `:commit_sha`. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["repos/create-commit-comment"]; + }; + "/repos/{owner}/{repo}/commits/{commit_sha}/pulls": { + /** Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, additionally returns open pull requests associated with the commit. The results may include open and closed pull requests. */ + get: operations["repos/list-pull-requests-associated-with-commit"]; + }; + "/repos/{owner}/{repo}/commits/{ref}": { + /** + * Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. + * + * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. + * + * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. + * + * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/get-commit"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/check-runs": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: operations["checks/list-for-ref"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/check-suites": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + get: operations["checks/list-suites-for-ref"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/status": { + /** + * Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. + * + * + * Additionally, a combined `state` is returned. The `state` is one of: + * + * * **failure** if any of the contexts report as `error` or `failure` + * * **pending** if there are no statuses or a context is `pending` + * * **success** if the latest status for all contexts is `success` + */ + get: operations["repos/get-combined-status-for-ref"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/statuses": { + /** + * Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. + * + * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. + */ + get: operations["repos/list-commit-statuses-for-ref"]; + }; + "/repos/{owner}/{repo}/community/profile": { + /** + * This endpoint will return all community profile metrics, including an + * overall health score, repository description, the presence of documentation, detected + * code of conduct, detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, + * README, and CONTRIBUTING files. + * + * The `health_percentage` score is defined as a percentage of how many of + * these four documents are present: README, CONTRIBUTING, LICENSE, and + * CODE_OF_CONDUCT. For example, if all four documents are present, then + * the `health_percentage` is `100`. If only one is present, then the + * `health_percentage` is `25`. + * + * `content_reports_enabled` is only returned for organization-owned repositories. + */ + get: operations["repos/get-community-profile-metrics"]; + }; + "/repos/{owner}/{repo}/compare/{basehead}": { + /** + * The `basehead` param is comprised of two parts: `base` and `head`. Both must be branch names in `repo`. To compare branches across other repositories in the same network as `repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/compare-commits-with-basehead"]; + }; + "/repos/{owner}/{repo}/contents/{path}": { + /** + * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit + * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. + * + * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for + * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media + * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent + * object format. + * + * **Note**: + * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). + * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees + * API](https://docs.github.com/rest/reference/git#get-a-tree). + * + * #### Size limits + * If the requested file's size is: + * * 1 MB or smaller: All features of this endpoint are supported. + * * Between 1-100 MB: Only the `raw` or `object` [custom media types](https://docs.github.com/rest/repos/contents#custom-media-types-for-repository-contents) are supported. Both will work as normal, except that when using the `object` media type, the `content` field will be an empty string and the `encoding` field will be `"none"`. To get the contents of these larger files, use the `raw` media type. + * * Greater than 100 MB: This endpoint is not supported. + * + * #### If the content is a directory + * The response will be an array of objects, one object for each item in the directory. + * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value + * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). + * In the next major version of the API, the type will be returned as "submodule". + * + * #### If the content is a symlink + * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the + * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object + * describing the symlink itself. + * + * #### If the content is a submodule + * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific + * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out + * the submodule at that specific commit. + * + * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the + * github.com URLs (`html_url` and `_links["html"]`) will have null values. + */ + get: operations["repos/get-content"]; + /** Creates a new file or replaces an existing file in a repository. */ + put: operations["repos/create-or-update-file-contents"]; + /** + * Deletes a file in a repository. + * + * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. + * + * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. + * + * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. + */ + delete: operations["repos/delete-file"]; + }; + "/repos/{owner}/{repo}/contributors": { + /** + * Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API v3 caches contributor data to improve performance. + * + * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. + */ + get: operations["repos/list-contributors"]; + }; + "/repos/{owner}/{repo}/dependabot/secrets": { + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + get: operations["dependabot/list-repo-secrets"]; + }; + "/repos/{owner}/{repo}/dependabot/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + get: operations["dependabot/get-repo-public-key"]; + }; + "/repos/{owner}/{repo}/dependabot/secrets/{secret_name}": { + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + get: operations["dependabot/get-repo-secret"]; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["dependabot/create-or-update-repo-secret"]; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + delete: operations["dependabot/delete-repo-secret"]; + }; + "/repos/{owner}/{repo}/dependency-graph/compare/{basehead}": { + /** Gets the diff of the dependency changes between two commits of a repository, based on the changes to the dependency manifests made in those commits. */ + get: operations["dependency-graph/diff-range"]; + }; + "/repos/{owner}/{repo}/dependency-graph/snapshots": { + /** Create a new snapshot of a repository's dependencies. You must authenticate using an access token with the `repo` scope to use this endpoint for a repository that the requesting user has access to. */ + post: operations["dependency-graph/create-repository-snapshot"]; + }; + "/repos/{owner}/{repo}/deployments": { + /** Simple filtering of deployments is available via query parameters: */ + get: operations["repos/list-deployments"]; + /** + * Deployments offer a few configurable parameters with certain defaults. + * + * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them + * before we merge a pull request. + * + * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have + * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter + * makes it easier to track which environments have requested deployments. The default environment is `production`. + * + * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If + * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, + * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will + * return a failure response. + * + * By default, [commit statuses](https://docs.github.com/rest/commits/statuses) for every submitted context must be in a `success` + * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to + * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do + * not require any contexts or create any commit statuses, the deployment will always succeed. + * + * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text + * field that will be passed on when a deployment event is dispatched. + * + * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might + * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an + * application with debugging enabled. + * + * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. + * + * #### Merged branch response + * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating + * a deployment. This auto-merge happens when: + * * Auto-merge option is enabled in the repository + * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example + * * There are no merge conflicts + * + * If there are no new commits in the base branch, a new request to create a deployment should give a successful + * response. + * + * #### Merge conflict response + * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't + * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. + * + * #### Failed commit status checks + * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` + * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. + */ + post: operations["repos/create-deployment"]; + }; + "/repos/{owner}/{repo}/deployments/{deployment_id}": { + get: operations["repos/get-deployment"]; + /** + * If the repository only has one deployment, you can delete the deployment regardless of its status. If the repository has more than one deployment, you can only delete inactive deployments. This ensures that repositories with multiple deployments will always have an active deployment. Anyone with `repo` or `repo_deployment` scopes can delete a deployment. + * + * To set a deployment as inactive, you must: + * + * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. + * * Mark the active deployment as inactive by adding any non-successful deployment status. + * + * For more information, see "[Create a deployment](https://docs.github.com/rest/reference/repos/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/reference/repos#create-a-deployment-status)." + */ + delete: operations["repos/delete-deployment"]; + }; + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses": { + /** Users with pull access can view deployment statuses for a deployment: */ + get: operations["repos/list-deployment-statuses"]; + /** + * Users with `push` access can create deployment statuses for a given deployment. + * + * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. + */ + post: operations["repos/create-deployment-status"]; + }; + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}": { + /** Users with pull access can view a deployment status for a deployment: */ + get: operations["repos/get-deployment-status"]; + }; + "/repos/{owner}/{repo}/dispatches": { + /** + * You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." + * + * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. + * + * This endpoint requires write access to the repository by providing either: + * + * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. + * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. + * + * This input example shows how you can use the `client_payload` as a test to debug your workflow. + */ + post: operations["repos/create-dispatch-event"]; + }; + "/repos/{owner}/{repo}/environments": { + /** + * Get all environments for a repository. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["repos/get-all-environments"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}": { + /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["repos/get-environment"]; + /** + * Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." + * + * **Note:** Although you can use this operation to specify that only branches that match specified name patterns can deploy to this environment, you must use the UI to set the name patterns. For more information, see "[Environments](/actions/reference/environments#deployment-branches)." + * + * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." + * + * You must authenticate using an access token with the repo scope to use this endpoint. + */ + put: operations["repos/create-or-update-environment"]; + /** You must authenticate using an access token with the repo scope to use this endpoint. */ + delete: operations["repos/delete-an-environment"]; + }; + "/repos/{owner}/{repo}/events": { + get: operations["activity/list-repo-events"]; + }; + "/repos/{owner}/{repo}/forks": { + get: operations["repos/list-forks"]; + /** + * Create a fork for the authenticated user. + * + * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + */ + post: operations["repos/create-fork"]; + }; + "/repos/{owner}/{repo}/git/blobs": { + post: operations["git/create-blob"]; + }; + "/repos/{owner}/{repo}/git/blobs/{file_sha}": { + /** + * The `content` in the response will always be Base64 encoded. + * + * _Note_: This API supports blobs up to 100 megabytes in size. + */ + get: operations["git/get-blob"]; + }; + "/repos/{owner}/{repo}/git/commits": { + /** + * Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + post: operations["git/create-commit"]; + }; + "/repos/{owner}/{repo}/git/commits/{commit_sha}": { + /** + * Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["git/get-commit"]; + }; + "/repos/{owner}/{repo}/git/matching-refs/{ref}": { + /** + * Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. + * + * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. + */ + get: operations["git/list-matching-refs"]; + }; + "/repos/{owner}/{repo}/git/ref/{ref}": { + /** + * Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + */ + get: operations["git/get-ref"]; + }; + "/repos/{owner}/{repo}/git/refs": { + /** Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. */ + post: operations["git/create-ref"]; + }; + "/repos/{owner}/{repo}/git/refs/{ref}": { + delete: operations["git/delete-ref"]; + patch: operations["git/update-ref"]; + }; + "/repos/{owner}/{repo}/git/tags": { + /** + * Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + post: operations["git/create-tag"]; + }; + "/repos/{owner}/{repo}/git/tags/{tag_sha}": { + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["git/get-tag"]; + }; + "/repos/{owner}/{repo}/git/trees": { + /** + * The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. + * + * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." + */ + post: operations["git/create-tree"]; + }; + "/repos/{owner}/{repo}/git/trees/{tree_sha}": { + /** + * Returns a single tree using the SHA1 value for that tree. + * + * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. + */ + get: operations["git/get-tree"]; + }; + "/repos/{owner}/{repo}/hooks": { + get: operations["repos/list-webhooks"]; + /** + * Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can + * share the same `config` as long as those webhooks do not have any `events` that overlap. + */ + post: operations["repos/create-webhook"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}": { + /** Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." */ + get: operations["repos/get-webhook"]; + delete: operations["repos/delete-webhook"]; + /** Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." */ + patch: operations["repos/update-webhook"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/config": { + /** + * Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." + * + * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. + */ + get: operations["repos/get-webhook-config-for-repo"]; + /** + * Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." + * + * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. + */ + patch: operations["repos/update-webhook-config-for-repo"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries": { + /** Returns a list of webhook deliveries for a webhook configured in a repository. */ + get: operations["repos/list-webhook-deliveries"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}": { + /** Returns a delivery for a webhook configured in a repository. */ + get: operations["repos/get-webhook-delivery"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": { + /** Redeliver a webhook delivery for a webhook configured in a repository. */ + post: operations["repos/redeliver-webhook-delivery"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/pings": { + /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ + post: operations["repos/ping-webhook"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/tests": { + /** + * This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. + * + * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` + */ + post: operations["repos/test-push-webhook"]; + }; + "/repos/{owner}/{repo}/import": { + /** + * View the progress of an import. + * + * **Import status** + * + * This section includes details about the possible values of the `status` field of the Import Progress response. + * + * An import that does not have errors will progress through these steps: + * + * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. + * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). + * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. + * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". + * * `complete` - the import is complete, and the repository is ready on GitHub. + * + * If there are problems, you will see one of these in the `status` field: + * + * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api) for more information. + * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/reference/migrations#cancel-an-import) and [retry](https://docs.github.com/rest/reference/migrations#start-an-import) with the correct URL. + * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * + * **The project_choices field** + * + * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. + * + * **Git LFS related fields** + * + * This section includes details about Git LFS related fields that may be present in the Import Progress response. + * + * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. + * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. + * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. + * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. + */ + get: operations["migrations/get-import-status"]; + /** Start a source import to a GitHub repository using GitHub Importer. */ + put: operations["migrations/start-import"]; + /** Stop an import for a repository. */ + delete: operations["migrations/cancel-import"]; + /** + * An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API + * request. If no parameters are provided, the import will be restarted. + * + * Some servers (e.g. TFS servers) can have several projects at a single URL. In those cases the import progress will + * have the status `detection_found_multiple` and the Import Progress response will include a `project_choices` array. + * You can select the project to import by providing one of the objects in the `project_choices` array in the update request. + */ + patch: operations["migrations/update-import"]; + }; + "/repos/{owner}/{repo}/import/authors": { + /** + * Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. + * + * This endpoint and the [Map a commit author](https://docs.github.com/rest/reference/migrations#map-a-commit-author) endpoint allow you to provide correct Git author information. + */ + get: operations["migrations/get-commit-authors"]; + }; + "/repos/{owner}/{repo}/import/authors/{author_id}": { + /** Update an author's identity for the import. Your application can continue updating authors any time before you push new commits to the repository. */ + patch: operations["migrations/map-commit-author"]; + }; + "/repos/{owner}/{repo}/import/large_files": { + /** List files larger than 100MB found during the import */ + get: operations["migrations/get-large-files"]; + }; + "/repos/{owner}/{repo}/import/lfs": { + /** You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability is powered by [Git LFS](https://git-lfs.github.com). You can learn more about our LFS feature and working with large files [on our help site](https://docs.github.com/articles/versioning-large-files/). */ + patch: operations["migrations/set-lfs-preference"]; + }; + "/repos/{owner}/{repo}/installation": { + /** + * Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-repo-installation"]; + }; + "/repos/{owner}/{repo}/interaction-limits": { + /** Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. */ + get: operations["interactions/get-restrictions-for-repo"]; + /** Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ + put: operations["interactions/set-restrictions-for-repo"]; + /** Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ + delete: operations["interactions/remove-restrictions-for-repo"]; + }; + "/repos/{owner}/{repo}/invitations": { + /** When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. */ + get: operations["repos/list-invitations"]; + }; + "/repos/{owner}/{repo}/invitations/{invitation_id}": { + delete: operations["repos/delete-invitation"]; + patch: operations["repos/update-invitation"]; + }; + "/repos/{owner}/{repo}/issues": { + /** + * List issues in a repository. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list-for-repo"]; + /** + * Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://docs.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["issues/create"]; + }; + "/repos/{owner}/{repo}/issues/comments": { + /** By default, Issue Comments are ordered by ascending ID. */ + get: operations["issues/list-comments-for-repo"]; + }; + "/repos/{owner}/{repo}/issues/comments/{comment_id}": { + get: operations["issues/get-comment"]; + delete: operations["issues/delete-comment"]; + patch: operations["issues/update-comment"]; + }; + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": { + /** List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). */ + get: operations["reactions/list-for-issue-comment"]; + /** Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with an HTTP `200` status means that you already added the reaction type to this issue comment. */ + post: operations["reactions/create-for-issue-comment"]; + }; + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + delete: operations["reactions/delete-for-issue-comment"]; + }; + "/repos/{owner}/{repo}/issues/events": { + get: operations["issues/list-events-for-repo"]; + }; + "/repos/{owner}/{repo}/issues/events/{event_id}": { + get: operations["issues/get-event"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}": { + /** + * The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was + * [transferred](https://docs.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If + * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API + * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read + * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe + * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/get"]; + /** Issue owners and users with push access can edit an issue. */ + patch: operations["issues/update"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/assignees": { + /** Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. */ + post: operations["issues/add-assignees"]; + /** Removes one or more assignees from an issue. */ + delete: operations["issues/remove-assignees"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/comments": { + /** Issue Comments are ordered by ascending ID. */ + get: operations["issues/list-comments"]; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + post: operations["issues/create-comment"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/events": { + get: operations["issues/list-events"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/labels": { + get: operations["issues/list-labels-on-issue"]; + /** Removes any previous labels and sets the new labels for an issue. */ + put: operations["issues/set-labels"]; + post: operations["issues/add-labels"]; + delete: operations["issues/remove-all-labels"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}": { + /** Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. */ + delete: operations["issues/remove-label"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/lock": { + /** + * Users with push access can lock an issue or pull request's conversation. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["issues/lock"]; + /** Users with push access can unlock an issue's conversation. */ + delete: operations["issues/unlock"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/reactions": { + /** List the reactions to an [issue](https://docs.github.com/rest/reference/issues). */ + get: operations["reactions/list-for-issue"]; + /** Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with an HTTP `200` status means that you already added the reaction type to this issue. */ + post: operations["reactions/create-for-issue"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. + * + * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). + */ + delete: operations["reactions/delete-for-issue"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/timeline": { + get: operations["issues/list-events-for-timeline"]; + }; + "/repos/{owner}/{repo}/keys": { + get: operations["repos/list-deploy-keys"]; + /** You can create a read-only deploy key. */ + post: operations["repos/create-deploy-key"]; + }; + "/repos/{owner}/{repo}/keys/{key_id}": { + get: operations["repos/get-deploy-key"]; + /** Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. */ + delete: operations["repos/delete-deploy-key"]; + }; + "/repos/{owner}/{repo}/labels": { + get: operations["issues/list-labels-for-repo"]; + post: operations["issues/create-label"]; + }; + "/repos/{owner}/{repo}/labels/{name}": { + get: operations["issues/get-label"]; + delete: operations["issues/delete-label"]; + patch: operations["issues/update-label"]; + }; + "/repos/{owner}/{repo}/languages": { + /** Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. */ + get: operations["repos/list-languages"]; + }; + "/repos/{owner}/{repo}/lfs": { + put: operations["repos/enable-lfs-for-repo"]; + delete: operations["repos/disable-lfs-for-repo"]; + }; + "/repos/{owner}/{repo}/license": { + /** + * This method returns the contents of the repository's license file, if one is detected. + * + * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. + */ + get: operations["licenses/get-for-repo"]; + }; + "/repos/{owner}/{repo}/merge-upstream": { + /** Sync a branch of a forked repository to keep it up-to-date with the upstream repository. */ + post: operations["repos/merge-upstream"]; + }; + "/repos/{owner}/{repo}/merges": { + post: operations["repos/merge"]; + }; + "/repos/{owner}/{repo}/milestones": { + get: operations["issues/list-milestones"]; + post: operations["issues/create-milestone"]; + }; + "/repos/{owner}/{repo}/milestones/{milestone_number}": { + get: operations["issues/get-milestone"]; + delete: operations["issues/delete-milestone"]; + patch: operations["issues/update-milestone"]; + }; + "/repos/{owner}/{repo}/milestones/{milestone_number}/labels": { + get: operations["issues/list-labels-for-milestone"]; + }; + "/repos/{owner}/{repo}/notifications": { + /** List all notifications for the current user. */ + get: operations["activity/list-repo-notifications-for-authenticated-user"]; + /** Marks all notifications in a repository as "read" removes them from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ + put: operations["activity/mark-repo-notifications-as-read"]; + }; + "/repos/{owner}/{repo}/pages": { + get: operations["repos/get-pages"]; + /** Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). */ + put: operations["repos/update-information-about-pages-site"]; + /** Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." */ + post: operations["repos/create-pages-site"]; + delete: operations["repos/delete-pages-site"]; + }; + "/repos/{owner}/{repo}/pages/builds": { + get: operations["repos/list-pages-builds"]; + /** + * You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. + * + * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. + */ + post: operations["repos/request-pages-build"]; + }; + "/repos/{owner}/{repo}/pages/builds/latest": { + get: operations["repos/get-latest-pages-build"]; + }; + "/repos/{owner}/{repo}/pages/builds/{build_id}": { + get: operations["repos/get-pages-build"]; + }; + "/repos/{owner}/{repo}/pages/health": { + /** + * Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. + * + * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. + * + * Users must have admin or owner permissions. GitHub Apps must have the `pages:write` and `administration:write` permission to use this endpoint. + */ + get: operations["repos/get-pages-health-check"]; + }; + "/repos/{owner}/{repo}/projects": { + /** Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + get: operations["projects/list-for-repo"]; + /** Creates a repository project board. Returns a `410 Gone` status if projects are disabled in the repository or if the repository does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + post: operations["projects/create-for-repo"]; + }; + "/repos/{owner}/{repo}/pulls": { + /** Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["pulls/list"]; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + * + * You can create a new pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. + */ + post: operations["pulls/create"]; + }; + "/repos/{owner}/{repo}/pulls/comments": { + /** Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. */ + get: operations["pulls/list-review-comments-for-repo"]; + }; + "/repos/{owner}/{repo}/pulls/comments/{comment_id}": { + /** Provides details for a review comment. */ + get: operations["pulls/get-review-comment"]; + /** Deletes a review comment. */ + delete: operations["pulls/delete-review-comment"]; + /** Enables you to edit a review comment. */ + patch: operations["pulls/update-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": { + /** List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). */ + get: operations["reactions/list-for-pull-request-review-comment"]; + /** Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with an HTTP `200` status means that you already added the reaction type to this pull request review comment. */ + post: operations["reactions/create-for-pull-request-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` + * + * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + delete: operations["reactions/delete-for-pull-request-comment"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}": { + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists details of a pull request by providing its number. + * + * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. + * + * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: + * + * * If merged as a [merge commit](https://docs.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. + * * If merged via a [squash](https://docs.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. + * * If [rebased](https://docs.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. + * + * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + */ + get: operations["pulls/get"]; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + */ + patch: operations["pulls/update"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/codespaces": { + /** + * Creates a codespace owned by the authenticated user for the specified pull request. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/create-with-pr-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/comments": { + /** Lists all review comments for a pull request. By default, review comments are in ascending order by ID. */ + get: operations["pulls/list-review-comments"]; + /** + * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. + * + * The `position` parameter is deprecated. If you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. + * + * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["pulls/create-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies": { + /** + * Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["pulls/create-reply-for-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/commits": { + /** Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. */ + get: operations["pulls/list-commits"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/files": { + /** **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. */ + get: operations["pulls/list-files"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/merge": { + get: operations["pulls/check-if-merged"]; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + put: operations["pulls/merge"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": { + /** Lists the users or teams whose review is requested for a pull request. Once a requested reviewer submits a review, they are no longer considered a requested reviewer. Their review will instead be returned by the [List reviews for a pull request](https://docs.github.com/rest/pulls/reviews#list-reviews-for-a-pull-request) operation. */ + get: operations["pulls/list-requested-reviewers"]; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + post: operations["pulls/request-reviewers"]; + delete: operations["pulls/remove-requested-reviewers"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews": { + /** The list of reviews returns in chronological order. */ + get: operations["pulls/list-reviews"]; + /** + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Pull request reviews created in the `PENDING` state do not include the `submitted_at` property in the response. + * + * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API v3 offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. + * + * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + */ + post: operations["pulls/create-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": { + get: operations["pulls/get-review"]; + /** Update the review summary comment with new text. */ + put: operations["pulls/update-review"]; + delete: operations["pulls/delete-pending-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": { + /** List comments for a specific pull request review. */ + get: operations["pulls/list-comments-for-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals": { + /** **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. */ + put: operations["pulls/dismiss-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events": { + post: operations["pulls/submit-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/update-branch": { + /** Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. */ + put: operations["pulls/update-branch"]; + }; + "/repos/{owner}/{repo}/readme": { + /** + * Gets the preferred README for a repository. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + get: operations["repos/get-readme"]; + }; + "/repos/{owner}/{repo}/readme/{dir}": { + /** + * Gets the README from a repository directory. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + get: operations["repos/get-readme-in-directory"]; + }; + "/repos/{owner}/{repo}/releases": { + /** + * This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). + * + * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. + */ + get: operations["repos/list-releases"]; + /** + * Users with push access to the repository can create a release. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["repos/create-release"]; + }; + "/repos/{owner}/{repo}/releases/assets/{asset_id}": { + /** To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. */ + get: operations["repos/get-release-asset"]; + delete: operations["repos/delete-release-asset"]; + /** Users with push access to the repository can edit a release asset. */ + patch: operations["repos/update-release-asset"]; + }; + "/repos/{owner}/{repo}/releases/generate-notes": { + /** Generate a name and body describing a [release](https://docs.github.com/rest/reference/repos#releases). The body content will be markdown formatted and contain information like the changes since last release and users who contributed. The generated release notes are not saved anywhere. They are intended to be generated and used when creating a new release. */ + post: operations["repos/generate-release-notes"]; + }; + "/repos/{owner}/{repo}/releases/latest": { + /** + * View the latest published full release for the repository. + * + * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. + */ + get: operations["repos/get-latest-release"]; + }; + "/repos/{owner}/{repo}/releases/tags/{tag}": { + /** Get a published release with the specified tag. */ + get: operations["repos/get-release-by-tag"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}": { + /** **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ + get: operations["repos/get-release"]; + /** Users with push access to the repository can delete a release. */ + delete: operations["repos/delete-release"]; + /** Users with push access to the repository can edit a release. */ + patch: operations["repos/update-release"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}/assets": { + get: operations["repos/list-release-assets"]; + /** + * This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in + * the response of the [Create a release endpoint](https://docs.github.com/rest/reference/repos#create-a-release) to upload a release asset. + * + * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. + * + * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: + * + * `application/zip` + * + * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, + * you'll still need to pass your authentication to be able to upload an asset. + * + * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. + * + * **Notes:** + * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" + * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. + */ + post: operations["repos/upload-release-asset"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}/reactions": { + /** List the reactions to a [release](https://docs.github.com/rest/reference/repos#releases). */ + get: operations["reactions/list-for-release"]; + /** Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. */ + post: operations["reactions/create-for-release"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/releases/:release_id/reactions/:reaction_id`. + * + * Delete a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + delete: operations["reactions/delete-for-release"]; + }; + "/repos/{owner}/{repo}/secret-scanning/alerts": { + /** + * Lists secret scanning alerts for an eligible repository, from newest to oldest. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/list-alerts-for-repo"]; + }; + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": { + /** + * Gets a single secret scanning alert detected in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/get-alert"]; + /** + * Updates the status of a secret scanning alert in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. + */ + patch: operations["secret-scanning/update-alert"]; + }; + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations": { + /** + * Lists all locations for a given secret scanning alert for an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/list-locations-for-alert"]; + }; + "/repos/{owner}/{repo}/stargazers": { + /** + * Lists the people that have starred the repository. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + get: operations["activity/list-stargazers-for-repo"]; + }; + "/repos/{owner}/{repo}/stats/code_frequency": { + /** Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ + get: operations["repos/get-code-frequency-stats"]; + }; + "/repos/{owner}/{repo}/stats/commit_activity": { + /** Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. */ + get: operations["repos/get-commit-activity-stats"]; + }; + "/repos/{owner}/{repo}/stats/contributors": { + /** + * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: + * + * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + * * `a` - Number of additions + * * `d` - Number of deletions + * * `c` - Number of commits + */ + get: operations["repos/get-contributors-stats"]; + }; + "/repos/{owner}/{repo}/stats/participation": { + /** + * Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. + * + * The array order is oldest week (index 0) to most recent week. + */ + get: operations["repos/get-participation-stats"]; + }; + "/repos/{owner}/{repo}/stats/punch_card": { + /** + * Each array contains the day number, hour number, and number of commits: + * + * * `0-6`: Sunday - Saturday + * * `0-23`: Hour of day + * * Number of commits + * + * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. + */ + get: operations["repos/get-punch-card-stats"]; + }; + "/repos/{owner}/{repo}/statuses/{sha}": { + /** + * Users with push access in a repository can create commit statuses for a given SHA. + * + * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. + */ + post: operations["repos/create-commit-status"]; + }; + "/repos/{owner}/{repo}/subscribers": { + /** Lists the people watching the specified repository. */ + get: operations["activity/list-watchers-for-repo"]; + }; + "/repos/{owner}/{repo}/subscription": { + get: operations["activity/get-repo-subscription"]; + /** If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. */ + put: operations["activity/set-repo-subscription"]; + /** This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). */ + delete: operations["activity/delete-repo-subscription"]; + }; + "/repos/{owner}/{repo}/tags": { + get: operations["repos/list-tags"]; + }; + "/repos/{owner}/{repo}/tags/protection": { + /** + * This returns the tag protection states of a repository. + * + * This information is only available to repository administrators. + */ + get: operations["repos/list-tag-protection"]; + /** + * This creates a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + post: operations["repos/create-tag-protection"]; + }; + "/repos/{owner}/{repo}/tags/protection/{tag_protection_id}": { + /** + * This deletes a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + delete: operations["repos/delete-tag-protection"]; + }; + "/repos/{owner}/{repo}/tarball/{ref}": { + /** + * Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + get: operations["repos/download-tarball-archive"]; + }; + "/repos/{owner}/{repo}/teams": { + get: operations["repos/list-teams"]; + }; + "/repos/{owner}/{repo}/topics": { + get: operations["repos/get-all-topics"]; + put: operations["repos/replace-all-topics"]; + }; + "/repos/{owner}/{repo}/traffic/clones": { + /** Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ + get: operations["repos/get-clones"]; + }; + "/repos/{owner}/{repo}/traffic/popular/paths": { + /** Get the top 10 popular contents over the last 14 days. */ + get: operations["repos/get-top-paths"]; + }; + "/repos/{owner}/{repo}/traffic/popular/referrers": { + /** Get the top 10 referrers over the last 14 days. */ + get: operations["repos/get-top-referrers"]; + }; + "/repos/{owner}/{repo}/traffic/views": { + /** Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ + get: operations["repos/get-views"]; + }; + "/repos/{owner}/{repo}/transfer": { + /** A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://docs.github.com/articles/about-repository-transfers/). */ + post: operations["repos/transfer"]; + }; + "/repos/{owner}/{repo}/vulnerability-alerts": { + /** Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin read access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + get: operations["repos/check-vulnerability-alerts"]; + /** Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + put: operations["repos/enable-vulnerability-alerts"]; + /** Disables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + delete: operations["repos/disable-vulnerability-alerts"]; + }; + "/repos/{owner}/{repo}/zipball/{ref}": { + /** + * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + get: operations["repos/download-zipball-archive"]; + }; + "/repos/{template_owner}/{template_repo}/generate": { + /** + * Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. The authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + post: operations["repos/create-using-template"]; + }; + "/repositories": { + /** + * Lists all public repositories in the order that they were created. + * + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. + * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. + */ + get: operations["repos/list-public"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/secrets": { + /** Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/list-environment-secrets"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/secrets/public-key": { + /** Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/get-environment-public-key"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": { + /** Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/get-environment-secret"]; + /** + * Creates or updates an environment secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["actions/create-or-update-environment-secret"]; + /** Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + delete: operations["actions/delete-environment-secret"]; + }; + "/scim/v2/enterprises/{enterprise}/Groups": { + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + get: operations["enterprise-admin/list-provisioned-groups-enterprise"]; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Provision an enterprise group, and invite users to the group. This sends invitation emails to the email address of the invited users to join the GitHub organization that the SCIM group corresponds to. + */ + post: operations["enterprise-admin/provision-and-invite-enterprise-group"]; + }; + "/scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}": { + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + get: operations["enterprise-admin/get-provisioning-information-for-enterprise-group"]; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Replaces an existing provisioned group’s information. You must provide all the information required for the group as if you were provisioning it for the first time. Any existing group information that you don't provide will be removed, including group membership. If you want to only update a specific attribute, use the [Update an attribute for a SCIM enterprise group](#update-an-attribute-for-a-scim-enterprise-group) endpoint instead. + */ + put: operations["enterprise-admin/set-information-for-provisioned-enterprise-group"]; + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + delete: operations["enterprise-admin/delete-scim-group-from-enterprise"]; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Allows you to change a provisioned group’s individual attributes. To change a group’s values, you must provide a specific Operations JSON format that contains at least one of the add, remove, or replace operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). + */ + patch: operations["enterprise-admin/update-attribute-for-enterprise-group"]; + }; + "/scim/v2/enterprises/{enterprise}/Users": { + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Retrieves a paginated list of all provisioned enterprise members, including pending invitations. + * + * When a user with a SAML-provisioned external identity leaves (or is removed from) an enterprise, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member: + * - When a user with a SCIM-provisioned external identity is removed from an enterprise, the account's metadata is preserved to allow the user to re-join the organization in the future. + * - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted). + * - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO. + * + * The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO: + * + * 1. The user is granted access by the IdP and is not a member of the GitHub enterprise. + * + * 1. The user attempts to access the GitHub enterprise and initiates the SAML SSO process, and is not currently signed in to their GitHub account. + * + * 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account: + * - If the user signs in, their GitHub account is linked to this entry. + * - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub enterprise, and the external identity `null` entry remains in place. + */ + get: operations["enterprise-admin/list-provisioned-identities-enterprise"]; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Provision enterprise membership for a user, and send organization invitation emails to the email address. + * + * You can optionally include the groups a user will be invited to join. If you do not provide a list of `groups`, the user is provisioned for the enterprise, but no organization invitation emails will be sent. + */ + post: operations["enterprise-admin/provision-and-invite-enterprise-user"]; + }; + "/scim/v2/enterprises/{enterprise}/Users/{scim_user_id}": { + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + get: operations["enterprise-admin/get-provisioning-information-for-enterprise-user"]; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](#update-an-attribute-for-an-enterprise-scim-user) endpoint instead. + * + * You must at least provide the required values for the user: `userName`, `name`, and `emails`. + * + * **Warning:** Setting `active: false` removes the user from the enterprise, deletes the external identity, and deletes the associated `{scim_user_id}`. + */ + put: operations["enterprise-admin/set-information-for-provisioned-enterprise-user"]; + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + delete: operations["enterprise-admin/delete-user-from-enterprise"]; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). + * + * **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work. + * + * **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the enterprise, deletes the external identity, and deletes the associated `:scim_user_id`. + * + * ``` + * { + * "Operations":[{ + * "op":"replace", + * "value":{ + * "active":false + * } + * }] + * } + * ``` + */ + patch: operations["enterprise-admin/update-attribute-for-enterprise-user"]; + }; + "/scim/v2/organizations/{org}/Users": { + /** + * Retrieves a paginated list of all provisioned organization members, including pending invitations. If you provide the `filter` parameter, the resources for all matching provisions members are returned. + * + * When a user with a SAML-provisioned external identity leaves (or is removed from) an organization, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member: + * - When a user with a SCIM-provisioned external identity is removed from an organization, the account's metadata is preserved to allow the user to re-join the organization in the future. + * - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted). + * - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO. + * + * The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO: + * + * 1. The user is granted access by the IdP and is not a member of the GitHub organization. + * + * 1. The user attempts to access the GitHub organization and initiates the SAML SSO process, and is not currently signed in to their GitHub account. + * + * 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account: + * - If the user signs in, their GitHub account is linked to this entry. + * - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub organization, and the external identity `null` entry remains in place. + */ + get: operations["scim/list-provisioned-identities"]; + /** Provision organization membership for a user, and send an activation email to the email address. */ + post: operations["scim/provision-and-invite-user"]; + }; + "/scim/v2/organizations/{org}/Users/{scim_user_id}": { + get: operations["scim/get-provisioning-information-for-user"]; + /** + * Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](https://docs.github.com/rest/reference/scim#update-an-attribute-for-a-scim-user) endpoint instead. + * + * You must at least provide the required values for the user: `userName`, `name`, and `emails`. + * + * **Warning:** Setting `active: false` removes the user from the organization, deletes the external identity, and deletes the associated `{scim_user_id}`. + */ + put: operations["scim/set-information-for-provisioned-user"]; + delete: operations["scim/delete-user-from-org"]; + /** + * Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). + * + * **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work. + * + * **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the organization, deletes the external identity, and deletes the associated `:scim_user_id`. + * + * ``` + * { + * "Operations":[{ + * "op":"replace", + * "value":{ + * "active":false + * } + * }] + * } + * ``` + */ + patch: operations["scim/update-attribute-for-user"]; + }; + "/search/code": { + /** + * Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: + * + * `q=addClass+in:file+language:js+repo:jquery/jquery` + * + * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. + * + * #### Considerations for code search + * + * Due to the complexity of searching code, there are a few restrictions on how searches are performed: + * + * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. + * * Only files smaller than 384 KB are searchable. + * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing + * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. + */ + get: operations["search/code"]; + }; + "/search/commits": { + /** + * Find commits via various criteria on the default branch (usually `master`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match + * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: + * + * `q=repo:octocat/Spoon-Knife+css` + */ + get: operations["search/commits"]; + }; + "/search/issues": { + /** + * Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted + * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. + * + * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` + * + * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. + * + * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." + */ + get: operations["search/issues-and-pull-requests"]; + }; + "/search/labels": { + /** + * Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: + * + * `q=bug+defect+enhancement&repository_id=64778136` + * + * The labels that best match the query appear first in the search results. + */ + get: operations["search/labels"]; + }; + "/search/repositories": { + /** + * Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: + * + * `q=tetris+language:assembly&sort=stars&order=desc` + * + * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. + */ + get: operations["search/repos"]; + }; + "/search/topics": { + /** + * Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://docs.github.com/articles/searching-topics/)" for a detailed list of qualifiers. + * + * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: + * + * `q=ruby+is:featured` + * + * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. + */ + get: operations["search/topics"]; + }; + "/search/users": { + /** + * Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for users, you can get text match metadata for the issue **login**, **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you're looking for a list of popular users, you might try this query: + * + * `q=tom+repos:%3E42+followers:%3E1000` + * + * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. + */ + get: operations["search/users"]; + }; + "/teams/{team_id}": { + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the [Get a team by name](https://docs.github.com/rest/reference/teams#get-a-team-by-name) endpoint. */ + get: operations["teams/get-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a team](https://docs.github.com/rest/reference/teams#delete-a-team) endpoint. + * + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + */ + delete: operations["teams/delete-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a team](https://docs.github.com/rest/reference/teams#update-a-team) endpoint. + * + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** With nested teams, the `privacy` for parent teams cannot be `secret`. + */ + patch: operations["teams/update-legacy"]; + }; + "/teams/{team_id}/discussions": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List discussions`](https://docs.github.com/rest/reference/teams#list-discussions) endpoint. + * + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/list-discussions-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create a discussion`](https://docs.github.com/rest/reference/teams#create-a-discussion) endpoint. + * + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["teams/create-discussion-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion](https://docs.github.com/rest/reference/teams#get-a-discussion) endpoint. + * + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/get-discussion-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Delete a discussion`](https://docs.github.com/rest/reference/teams#delete-a-discussion) endpoint. + * + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["teams/delete-discussion-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion](https://docs.github.com/rest/reference/teams#update-a-discussion) endpoint. + * + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + patch: operations["teams/update-discussion-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/comments": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List discussion comments](https://docs.github.com/rest/reference/teams#list-discussion-comments) endpoint. + * + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/list-discussion-comments-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Create a discussion comment](https://docs.github.com/rest/reference/teams#create-a-discussion-comment) endpoint. + * + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["teams/create-discussion-comment-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion comment](https://docs.github.com/rest/reference/teams#get-a-discussion-comment) endpoint. + * + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/get-discussion-comment-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a discussion comment](https://docs.github.com/rest/reference/teams#delete-a-discussion-comment) endpoint. + * + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["teams/delete-discussion-comment-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion comment](https://docs.github.com/rest/reference/teams#update-a-discussion-comment) endpoint. + * + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + patch: operations["teams/update-discussion-comment-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion comment`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment) endpoint. + * + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["reactions/list-for-team-discussion-comment-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Create reaction for a team discussion comment](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment)" endpoint. + * + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + */ + post: operations["reactions/create-for-team-discussion-comment-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/reactions": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion) endpoint. + * + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["reactions/list-for-team-discussion-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion`](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion) endpoint. + * + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + */ + post: operations["reactions/create-for-team-discussion-legacy"]; + }; + "/teams/{team_id}/invitations": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List pending team invitations`](https://docs.github.com/rest/reference/teams#list-pending-team-invitations) endpoint. + * + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + get: operations["teams/list-pending-invitations-legacy"]; + }; + "/teams/{team_id}/members": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team members`](https://docs.github.com/rest/reference/teams#list-team-members) endpoint. + * + * Team members will include the members of child teams. + */ + get: operations["teams/list-members-legacy"]; + }; + "/teams/{team_id}/members/{username}": { + /** + * The "Get team member" endpoint (described below) is deprecated. + * + * We recommend using the [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint instead. It allows you to get both active and pending memberships. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + get: operations["teams/get-member-legacy"]; + /** + * The "Add team member" endpoint (described below) is deprecated. + * + * We recommend using the [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint instead. It allows you to invite new organization members to your teams. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To add someone to a team, the authenticated user must be an organization owner or a team maintainer in the team they're changing. The person being added to the team must be a member of the team's organization. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["teams/add-member-legacy"]; + /** + * The "Remove team member" endpoint (described below) is deprecated. + * + * We recommend using the [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint instead. It allows you to remove both active and pending memberships. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a team member, the authenticated user must have 'admin' permissions to the team or be an owner of the org that the team is associated with. Removing a team member does not delete the user, it just removes them from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + delete: operations["teams/remove-member-legacy"]; + }; + "/teams/{team_id}/memberships/{username}": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint. + * + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + get: operations["teams/get-membership-for-user-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * If the user is already a member of the team's organization, this endpoint will add the user to the team. To add a membership between an organization member and a team, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * If the user is unaffiliated with the team's organization, this endpoint will send an invitation to the user via email. This newly-created membership will be in the "pending" state until the user accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. To add a membership between an unaffiliated user and a team, the authenticated user must be an organization owner. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + */ + put: operations["teams/add-or-update-membership-for-user-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + delete: operations["teams/remove-membership-for-user-legacy"]; + }; + "/teams/{team_id}/projects": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team projects`](https://docs.github.com/rest/reference/teams#list-team-projects) endpoint. + * + * Lists the organization projects for a team. + */ + get: operations["teams/list-projects-legacy"]; + }; + "/teams/{team_id}/projects/{project_id}": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a project](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project) endpoint. + * + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + */ + get: operations["teams/check-permissions-for-project-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team project permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions) endpoint. + * + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + */ + put: operations["teams/add-or-update-project-permissions-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a project from a team](https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team) endpoint. + * + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. **Note:** This endpoint removes the project from the team, but does not delete it. + */ + delete: operations["teams/remove-project-legacy"]; + }; + "/teams/{team_id}/repos": { + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List team repositories](https://docs.github.com/rest/reference/teams#list-team-repositories) endpoint. */ + get: operations["teams/list-repos-legacy"]; + }; + "/teams/{team_id}/repos/{owner}/{repo}": { + /** + * **Note**: Repositories inherited through a parent team will also be checked. + * + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a repository](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-repository) endpoint. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + get: operations["teams/check-permissions-for-repo-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Add or update team repository permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions)" endpoint. + * + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["teams/add-or-update-repo-permissions-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a repository from a team](https://docs.github.com/rest/reference/teams#remove-a-repository-from-a-team) endpoint. + * + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. NOTE: This does not delete the repository, it just removes it from the team. + */ + delete: operations["teams/remove-repo-legacy"]; + }; + "/teams/{team_id}/team-sync/group-mappings": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List IdP groups for a team`](https://docs.github.com/rest/reference/teams#list-idp-groups-for-a-team) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * List IdP groups connected to a team on GitHub. + */ + get: operations["teams/list-idp-groups-for-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create or update IdP group connections`](https://docs.github.com/rest/reference/teams#create-or-update-idp-group-connections) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team. + */ + patch: operations["teams/create-or-update-idp-group-connections-legacy"]; + }; + "/teams/{team_id}/teams": { + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List child teams`](https://docs.github.com/rest/reference/teams#list-child-teams) endpoint. */ + get: operations["teams/list-child-legacy"]; + }; + "/user": { + /** + * If the authenticated user is authenticated through basic authentication or OAuth with the `user` scope, then the response lists public and private profile information. + * + * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. + */ + get: operations["users/get-authenticated"]; + /** **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. */ + patch: operations["users/update-authenticated"]; + }; + "/user/blocks": { + /** List the users you've blocked on your personal account. */ + get: operations["users/list-blocked-by-authenticated-user"]; + }; + "/user/blocks/{username}": { + get: operations["users/check-blocked"]; + put: operations["users/block"]; + delete: operations["users/unblock"]; + }; + "/user/codespaces": { + /** + * Lists the authenticated user's codespaces. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/list-for-authenticated-user"]; + /** + * Creates a new codespace, owned by the authenticated user. + * + * This endpoint requires either a `repository_id` OR a `pull_request` but not both. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/create-for-authenticated-user"]; + }; + "/user/codespaces/secrets": { + /** + * Lists all secrets available for a user's Codespaces without revealing their + * encrypted values. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + get: operations["codespaces/list-secrets-for-authenticated-user"]; + }; + "/user/codespaces/secrets/public-key": { + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + get: operations["codespaces/get-public-key-for-authenticated-user"]; + }; + "/user/codespaces/secrets/{secret_name}": { + /** + * Gets a secret available to a user's codespaces without revealing its encrypted value. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + get: operations["codespaces/get-secret-for-authenticated-user"]; + /** + * Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must also have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["codespaces/create-or-update-secret-for-authenticated-user"]; + /** + * Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + delete: operations["codespaces/delete-secret-for-authenticated-user"]; + }; + "/user/codespaces/secrets/{secret_name}/repositories": { + /** + * List the repositories that have been granted the ability to use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + get: operations["codespaces/list-repositories-for-secret-for-authenticated-user"]; + /** + * Select the repositories that will use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + put: operations["codespaces/set-repositories-for-secret-for-authenticated-user"]; + }; + "/user/codespaces/secrets/{secret_name}/repositories/{repository_id}": { + /** + * Adds a repository to the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on the referenced repository to use this endpoint. + */ + put: operations["codespaces/add-repository-for-secret-for-authenticated-user"]; + /** + * Removes a repository from the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + delete: operations["codespaces/remove-repository-for-secret-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}": { + /** + * Gets information about a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/get-for-authenticated-user"]; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + delete: operations["codespaces/delete-for-authenticated-user"]; + /** + * Updates a codespace owned by the authenticated user. Currently only the codespace's machine type and recent folders can be modified using this endpoint. + * + * If you specify a new machine type it will be applied the next time your codespace is started. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + patch: operations["codespaces/update-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/exports": { + /** + * Triggers an export of the specified codespace and returns a URL and ID where the status of the export can be monitored. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + post: operations["codespaces/export-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/exports/{export_id}": { + /** + * Gets information about an export of a codespace. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + get: operations["codespaces/get-export-details-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/machines": { + /** + * List the machine types a codespace can transition to use. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + get: operations["codespaces/codespace-machines-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/start": { + /** + * Starts a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + post: operations["codespaces/start-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/stop": { + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + post: operations["codespaces/stop-for-authenticated-user"]; + }; + "/user/email/visibility": { + /** Sets the visibility for your primary email addresses. */ + patch: operations["users/set-primary-email-visibility-for-authenticated-user"]; + }; + "/user/emails": { + /** Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. */ + get: operations["users/list-emails-for-authenticated-user"]; + /** This endpoint is accessible with the `user` scope. */ + post: operations["users/add-email-for-authenticated-user"]; + /** This endpoint is accessible with the `user` scope. */ + delete: operations["users/delete-email-for-authenticated-user"]; + }; + "/user/followers": { + /** Lists the people following the authenticated user. */ + get: operations["users/list-followers-for-authenticated-user"]; + }; + "/user/following": { + /** Lists the people who the authenticated user follows. */ + get: operations["users/list-followed-by-authenticated-user"]; + }; + "/user/following/{username}": { + get: operations["users/check-person-is-followed-by-authenticated"]; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + put: operations["users/follow"]; + /** Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. */ + delete: operations["users/unfollow"]; + }; + "/user/gpg_keys": { + /** Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + get: operations["users/list-gpg-keys-for-authenticated-user"]; + /** Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + post: operations["users/create-gpg-key-for-authenticated-user"]; + }; + "/user/gpg_keys/{gpg_key_id}": { + /** View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + get: operations["users/get-gpg-key-for-authenticated-user"]; + /** Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + delete: operations["users/delete-gpg-key-for-authenticated-user"]; + }; + "/user/installations": { + /** + * Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You can find the permissions for the installation under the `permissions` key. + */ + get: operations["apps/list-installations-for-authenticated-user"]; + }; + "/user/installations/{installation_id}/repositories": { + /** + * List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The access the user has to each repository is included in the hash under the `permissions` key. + */ + get: operations["apps/list-installation-repos-for-authenticated-user"]; + }; + "/user/installations/{installation_id}/repositories/{repository_id}": { + /** + * Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + put: operations["apps/add-repo-to-installation-for-authenticated-user"]; + /** + * Remove a single repository from an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + delete: operations["apps/remove-repo-from-installation-for-authenticated-user"]; + }; + "/user/interaction-limits": { + /** Shows which type of GitHub user can interact with your public repositories and when the restriction expires. */ + get: operations["interactions/get-restrictions-for-authenticated-user"]; + /** Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. */ + put: operations["interactions/set-restrictions-for-authenticated-user"]; + /** Removes any interaction restrictions from your public repositories. */ + delete: operations["interactions/remove-restrictions-for-authenticated-user"]; + }; + "/user/issues": { + /** + * List issues across owned and member repositories assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list-for-authenticated-user"]; + }; + "/user/keys": { + /** Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + get: operations["users/list-public-ssh-keys-for-authenticated-user"]; + /** Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + post: operations["users/create-public-ssh-key-for-authenticated-user"]; + }; + "/user/keys/{key_id}": { + /** View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + get: operations["users/get-public-ssh-key-for-authenticated-user"]; + /** Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + delete: operations["users/delete-public-ssh-key-for-authenticated-user"]; + }; + "/user/marketplace_purchases": { + /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ + get: operations["apps/list-subscriptions-for-authenticated-user"]; + }; + "/user/marketplace_purchases/stubbed": { + /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ + get: operations["apps/list-subscriptions-for-authenticated-user-stubbed"]; + }; + "/user/memberships/orgs": { + get: operations["orgs/list-memberships-for-authenticated-user"]; + }; + "/user/memberships/orgs/{org}": { + get: operations["orgs/get-membership-for-authenticated-user"]; + patch: operations["orgs/update-membership-for-authenticated-user"]; + }; + "/user/migrations": { + /** Lists all migrations a user has started. */ + get: operations["migrations/list-for-authenticated-user"]; + /** Initiates the generation of a user migration archive. */ + post: operations["migrations/start-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}": { + /** + * Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: + * + * * `pending` - the migration hasn't started yet. + * * `exporting` - the migration is in progress. + * * `exported` - the migration finished successfully. + * * `failed` - the migration failed. + * + * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive). + */ + get: operations["migrations/get-status-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}/archive": { + /** + * Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: + * + * * attachments + * * bases + * * commit\_comments + * * issue\_comments + * * issue\_events + * * issues + * * milestones + * * organizations + * * projects + * * protected\_branches + * * pull\_request\_reviews + * * pull\_requests + * * releases + * * repositories + * * review\_comments + * * schema + * * users + * + * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. + */ + get: operations["migrations/get-archive-for-authenticated-user"]; + /** Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/reference/migrations#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/reference/migrations#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. */ + delete: operations["migrations/delete-archive-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}/repos/{repo_name}/lock": { + /** Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/reference/migrations#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/reference/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. */ + delete: operations["migrations/unlock-repo-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}/repositories": { + /** Lists all the repositories for this user migration. */ + get: operations["migrations/list-repos-for-authenticated-user"]; + }; + "/user/orgs": { + /** + * List organizations for the authenticated user. + * + * **OAuth scope requirements** + * + * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. + */ + get: operations["orgs/list-for-authenticated-user"]; + }; + "/user/packages": { + /** + * Lists packages owned by the authenticated user within the user's namespace. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/list-packages-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}": { + /** + * Gets a specific package for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-for-authenticated-user"]; + /** + * Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + delete: operations["packages/delete-package-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/restore": { + /** + * Restores a package owned by the authenticated user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + post: operations["packages/restore-package-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/versions": { + /** + * Returns all package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-all-package-versions-for-package-owned-by-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}": { + /** + * Gets a specific package version for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-version-for-authenticated-user"]; + /** + * Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + delete: operations["packages/delete-package-version-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { + /** + * Restores a package version owned by the authenticated user. + * + * You can restore a deleted package version under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + post: operations["packages/restore-package-version-for-authenticated-user"]; + }; + "/user/projects": { + /** Creates a user project board. Returns a `410 Gone` status if the user does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + post: operations["projects/create-for-authenticated-user"]; + }; + "/user/public_emails": { + /** Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. */ + get: operations["users/list-public-emails-for-authenticated-user"]; + }; + "/user/repos": { + /** + * Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + */ + get: operations["repos/list-for-authenticated-user"]; + /** + * Creates a new repository for the authenticated user. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository. + */ + post: operations["repos/create-for-authenticated-user"]; + }; + "/user/repository_invitations": { + /** When authenticating as a user, this endpoint will list all currently open repository invitations for that user. */ + get: operations["repos/list-invitations-for-authenticated-user"]; + }; + "/user/repository_invitations/{invitation_id}": { + delete: operations["repos/decline-invitation-for-authenticated-user"]; + patch: operations["repos/accept-invitation-for-authenticated-user"]; + }; + "/user/starred": { + /** + * Lists repositories the authenticated user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + get: operations["activity/list-repos-starred-by-authenticated-user"]; + }; + "/user/starred/{owner}/{repo}": { + get: operations["activity/check-repo-is-starred-by-authenticated-user"]; + /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ + put: operations["activity/star-repo-for-authenticated-user"]; + delete: operations["activity/unstar-repo-for-authenticated-user"]; + }; + "/user/subscriptions": { + /** Lists repositories the authenticated user is watching. */ + get: operations["activity/list-watched-repos-for-authenticated-user"]; + }; + "/user/teams": { + /** List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). */ + get: operations["teams/list-for-authenticated-user"]; + }; + "/users": { + /** + * Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. + * + * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of users. + */ + get: operations["users/list"]; + }; + "/users/{username}": { + /** + * Provides publicly available information about someone with a GitHub account. + * + * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" + * + * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). + * + * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". + */ + get: operations["users/get-by-username"]; + }; + "/users/{username}/events": { + /** If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. */ + get: operations["activity/list-events-for-authenticated-user"]; + }; + "/users/{username}/events/orgs/{org}": { + /** This is the user's organization dashboard. You must be authenticated as the user to view this. */ + get: operations["activity/list-org-events-for-authenticated-user"]; + }; + "/users/{username}/events/public": { + get: operations["activity/list-public-events-for-user"]; + }; + "/users/{username}/followers": { + /** Lists the people following the specified user. */ + get: operations["users/list-followers-for-user"]; + }; + "/users/{username}/following": { + /** Lists the people who the specified user follows. */ + get: operations["users/list-following-for-user"]; + }; + "/users/{username}/following/{target_user}": { + get: operations["users/check-following-for-user"]; + }; + "/users/{username}/gists": { + /** Lists public gists for the specified user: */ + get: operations["gists/list-for-user"]; + }; + "/users/{username}/gpg_keys": { + /** Lists the GPG keys for a user. This information is accessible by anyone. */ + get: operations["users/list-gpg-keys-for-user"]; + }; + "/users/{username}/hovercard": { + /** + * Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. + * + * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: + * + * ```shell + * curl -u username:token + * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 + * ``` + */ + get: operations["users/get-context-for-user"]; + }; + "/users/{username}/installation": { + /** + * Enables an authenticated GitHub App to find the user’s installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-user-installation"]; + }; + "/users/{username}/keys": { + /** Lists the _verified_ public SSH keys for a user. This is accessible by anyone. */ + get: operations["users/list-public-keys-for-user"]; + }; + "/users/{username}/orgs": { + /** + * List [public organization memberships](https://docs.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. + * + * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. + */ + get: operations["orgs/list-for-user"]; + }; + "/users/{username}/packages": { + /** + * Lists all packages in a user's namespace for which the requesting user has access. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/list-packages-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}": { + /** + * Gets a specific package metadata for a public package owned by a user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-for-user"]; + /** + * Deletes an entire package for a user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + delete: operations["packages/delete-package-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/restore": { + /** + * Restores an entire package for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + post: operations["packages/restore-package-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/versions": { + /** + * Returns all package versions for a public package owned by a specified user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-all-package-versions-for-package-owned-by-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": { + /** + * Gets a specific package version for a public package owned by a specified user. + * + * At this time, to use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-version-for-user"]; + /** + * Deletes a specific package version for a user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + delete: operations["packages/delete-package-version-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { + /** + * Restores a specific package version for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + post: operations["packages/restore-package-version-for-user"]; + }; + "/users/{username}/projects": { + get: operations["projects/list-for-user"]; + }; + "/users/{username}/received_events": { + /** These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. */ + get: operations["activity/list-received-events-for-user"]; + }; + "/users/{username}/received_events/public": { + get: operations["activity/list-received-public-events-for-user"]; + }; + "/users/{username}/repos": { + /** Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. */ + get: operations["repos/list-for-user"]; + }; + "/users/{username}/settings/billing/actions": { + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `user` scope. + */ + get: operations["billing/get-github-actions-billing-user"]; + }; + "/users/{username}/settings/billing/packages": { + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + get: operations["billing/get-github-packages-billing-user"]; + }; + "/users/{username}/settings/billing/shared-storage": { + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + get: operations["billing/get-shared-storage-billing-user"]; + }; + "/users/{username}/starred": { + /** + * Lists repositories a user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + get: operations["activity/list-repos-starred-by-user"]; + }; + "/users/{username}/subscriptions": { + /** Lists repositories a user is watching. */ + get: operations["activity/list-repos-watched-by-user"]; + }; + "/zen": { + /** Get a random sentence from the Zen of GitHub */ + get: operations["meta/get-zen"]; + }; + "/repos/{owner}/{repo}/compare/{base}...{head}": { + /** + * **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/compare-commits"]; + }; +} + +export interface components { + schemas: { + root: { + /** Format: uri-template */ + current_user_url: string; + /** Format: uri-template */ + current_user_authorizations_html_url: string; + /** Format: uri-template */ + authorizations_url: string; + /** Format: uri-template */ + code_search_url: string; + /** Format: uri-template */ + commit_search_url: string; + /** Format: uri-template */ + emails_url: string; + /** Format: uri-template */ + emojis_url: string; + /** Format: uri-template */ + events_url: string; + /** Format: uri-template */ + feeds_url: string; + /** Format: uri-template */ + followers_url: string; + /** Format: uri-template */ + following_url: string; + /** Format: uri-template */ + gists_url: string; + /** Format: uri-template */ + hub_url: string; + /** Format: uri-template */ + issue_search_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + label_search_url: string; + /** Format: uri-template */ + notifications_url: string; + /** Format: uri-template */ + organization_url: string; + /** Format: uri-template */ + organization_repositories_url: string; + /** Format: uri-template */ + organization_teams_url: string; + /** Format: uri-template */ + public_gists_url: string; + /** Format: uri-template */ + rate_limit_url: string; + /** Format: uri-template */ + repository_url: string; + /** Format: uri-template */ + repository_search_url: string; + /** Format: uri-template */ + current_user_repositories_url: string; + /** Format: uri-template */ + starred_url: string; + /** Format: uri-template */ + starred_gists_url: string; + /** Format: uri-template */ + topic_search_url?: string; + /** Format: uri-template */ + user_url: string; + /** Format: uri-template */ + user_organizations_url: string; + /** Format: uri-template */ + user_repositories_url: string; + /** Format: uri-template */ + user_search_url: string; + }; + /** + * Simple User + * @description Simple User + */ + "nullable-simple-user": { + name?: string | null; + email?: string | null; + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + /** @example "2020-07-09T00:17:55Z" */ + starred_at?: string; + } | null; + /** + * GitHub app + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + integration: { + /** + * @description Unique identifier of the GitHub app + * @example 37 + */ + id: number; + /** + * @description The slug name of the GitHub app + * @example probot-owners + */ + slug?: string; + /** @example MDExOkludGVncmF0aW9uMQ== */ + node_id: string; + owner: components["schemas"]["nullable-simple-user"]; + /** + * @description The name of the GitHub app + * @example Probot Owners + */ + name: string; + /** @example The description of the app. */ + description: string | null; + /** + * Format: uri + * @example https://example.com + */ + external_url: string; + /** + * Format: uri + * @example https://github.com/apps/super-ci + */ + html_url: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + created_at: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + updated_at: string; + /** + * @description The set of permissions for the GitHub app + * @example { + * "issues": "read", + * "deployments": "write" + * } + */ + permissions: { + issues?: string; + checks?: string; + metadata?: string; + contents?: string; + deployments?: string; + } & { [key: string]: string }; + /** + * @description The list of events for the GitHub app + * @example [ + * "label", + * "deployment" + * ] + */ + events: string[]; + /** + * @description The number of installations associated with the GitHub app + * @example 5 + */ + installations_count?: number; + /** @example "Iv1.25b5d1e65ffc4022" */ + client_id?: string; + /** @example "1d4b2097ac622ba702d19de498f005747a8b21d3" */ + client_secret?: string; + /** @example "6fba8f2fc8a7e8f2cca5577eddd82ca7586b3b6b" */ + webhook_secret?: string | null; + /** @example "-----BEGIN RSA PRIVATE KEY-----\nMIIEogIBAAKCAQEArYxrNYD/iT5CZVpRJu4rBKmmze3PVmT/gCo2ATUvDvZTPTey\nxcGJ3vvrJXazKk06pN05TN29o98jrYz4cengG3YGsXPNEpKsIrEl8NhbnxapEnM9\nJCMRe0P5JcPsfZlX6hmiT7136GRWiGOUba2X9+HKh8QJVLG5rM007TBER9/z9mWm\nrJuNh+m5l320oBQY/Qq3A7wzdEfZw8qm/mIN0FCeoXH1L6B8xXWaAYBwhTEh6SSn\nZHlO1Xu1JWDmAvBCi0RO5aRSKM8q9QEkvvHP4yweAtK3N8+aAbZ7ovaDhyGz8r6r\nzhU1b8Uo0Z2ysf503WqzQgIajr7Fry7/kUwpgQIDAQABAoIBADwJp80Ko1xHPZDy\nfcCKBDfIuPvkmSW6KumbsLMaQv1aGdHDwwTGv3t0ixSay8CGlxMRtRDyZPib6SvQ\n6OH/lpfpbMdW2ErkksgtoIKBVrDilfrcAvrNZu7NxRNbhCSvN8q0s4ICecjbbVQh\nnueSdlA6vGXbW58BHMq68uRbHkP+k+mM9U0mDJ1HMch67wlg5GbayVRt63H7R2+r\nVxcna7B80J/lCEjIYZznawgiTvp3MSanTglqAYi+m1EcSsP14bJIB9vgaxS79kTu\noiSo93leJbBvuGo8QEiUqTwMw4tDksmkLsoqNKQ1q9P7LZ9DGcujtPy4EZsamSJT\ny8OJt0ECgYEA2lxOxJsQk2kI325JgKFjo92mQeUObIvPfSNWUIZQDTjniOI6Gv63\nGLWVFrZcvQBWjMEQraJA9xjPbblV8PtfO87MiJGLWCHFxmPz2dzoedN+2Coxom8m\nV95CLz8QUShuao6u/RYcvUaZEoYs5bHcTmy5sBK80JyEmafJPtCQVxMCgYEAy3ar\nZr3yv4xRPEPMat4rseswmuMooSaK3SKub19WFI5IAtB/e7qR1Rj9JhOGcZz+OQrl\nT78O2OFYlgOIkJPvRMrPpK5V9lslc7tz1FSh3BZMRGq5jSyD7ETSOQ0c8T2O/s7v\nbeEPbVbDe4mwvM24XByH0GnWveVxaDl51ABD65sCgYB3ZAspUkOA5egVCh8kNpnd\nSd6SnuQBE3ySRlT2WEnCwP9Ph6oPgn+oAfiPX4xbRqkL8q/k0BdHQ4h+zNwhk7+h\nWtPYRAP1Xxnc/F+jGjb+DVaIaKGU18MWPg7f+FI6nampl3Q0KvfxwX0GdNhtio8T\nTj1E+SnFwh56SRQuxSh2gwKBgHKjlIO5NtNSflsUYFM+hyQiPiqnHzddfhSG+/3o\nm5nNaSmczJesUYreH5San7/YEy2UxAugvP7aSY2MxB+iGsiJ9WD2kZzTUlDZJ7RV\nUzWsoqBR+eZfVJ2FUWWvy8TpSG6trh4dFxImNtKejCR1TREpSiTV3Zb1dmahK9GV\nrK9NAoGAbBxRLoC01xfxCTgt5BDiBcFVh4fp5yYKwavJPLzHSpuDOrrI9jDn1oKN\nonq5sDU1i391zfQvdrbX4Ova48BN+B7p63FocP/MK5tyyBoT8zQEk2+vWDOw7H/Z\nu5dTCPxTIsoIwUw1I+7yIxqJzLPFgR2gVBwY1ra/8iAqCj+zeBw=\n-----END RSA PRIVATE KEY-----\n" */ + pem?: string; + }; + /** + * Basic Error + * @description Basic Error + */ + "basic-error": { + message?: string; + documentation_url?: string; + url?: string; + status?: string; + }; + /** + * Validation Error Simple + * @description Validation Error Simple + */ + "validation-error-simple": { + message: string; + documentation_url: string; + errors?: string[]; + }; + /** + * Format: uri + * @description The URL to which the payloads will be delivered. + * @example https://example.com/webhook + */ + "webhook-config-url": string; + /** + * @description The media type used to serialize the payloads. Supported values include `json` and `form`. The default is `form`. + * @example "json" + */ + "webhook-config-content-type": string; + /** + * @description If provided, the `secret` will be used as the `key` to generate the HMAC hex digest value for [delivery signature headers](https://docs.github.com/webhooks/event-payloads/#delivery-headers). + * @example "********" + */ + "webhook-config-secret": string; + "webhook-config-insecure-ssl": string | number; + /** + * Webhook Configuration + * @description Configuration object of the webhook + */ + "webhook-config": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + /** + * Simple webhook delivery + * @description Delivery made by a webhook, without request and response information. + */ + "hook-delivery-item": { + /** + * @description Unique identifier of the webhook delivery. + * @example 42 + */ + id: number; + /** + * @description Unique identifier for the event (shared with all deliveries for all webhooks that subscribe to this event). + * @example 58474f00-b361-11eb-836d-0e4f3503ccbe + */ + guid: string; + /** + * Format: date-time + * @description Time when the webhook delivery occurred. + * @example 2021-05-12T20:33:44Z + */ + delivered_at: string; + /** + * @description Whether the webhook delivery is a redelivery. + * @example false + */ + redelivery: boolean; + /** + * @description Time spent delivering. + * @example 0.03 + */ + duration: number; + /** + * @description Describes the response returned after attempting the delivery. + * @example failed to connect + */ + status: string; + /** + * @description Status code received when delivery was made. + * @example 502 + */ + status_code: number; + /** + * @description The event that triggered the delivery. + * @example issues + */ + event: string; + /** + * @description The type of activity for the event that triggered the delivery. + * @example opened + */ + action: string | null; + /** + * @description The id of the GitHub App installation associated with this event. + * @example 123 + */ + installation_id: number | null; + /** + * @description The id of the repository associated with this event. + * @example 123 + */ + repository_id: number | null; + }; + /** + * Scim Error + * @description Scim Error + */ + "scim-error": { + message?: string | null; + documentation_url?: string | null; + detail?: string | null; + status?: number; + scimType?: string | null; + schemas?: string[]; + }; + /** + * Validation Error + * @description Validation Error + */ + "validation-error": { + message: string; + documentation_url: string; + errors?: { + resource?: string; + field?: string; + message?: string; + code: string; + index?: number; + value?: (string | null) | (number | null) | (string[] | null); + }[]; + }; + /** + * Webhook delivery + * @description Delivery made by a webhook. + */ + "hook-delivery": { + /** + * @description Unique identifier of the delivery. + * @example 42 + */ + id: number; + /** + * @description Unique identifier for the event (shared with all deliveries for all webhooks that subscribe to this event). + * @example 58474f00-b361-11eb-836d-0e4f3503ccbe + */ + guid: string; + /** + * Format: date-time + * @description Time when the delivery was delivered. + * @example 2021-05-12T20:33:44Z + */ + delivered_at: string; + /** + * @description Whether the delivery is a redelivery. + * @example false + */ + redelivery: boolean; + /** + * @description Time spent delivering. + * @example 0.03 + */ + duration: number; + /** + * @description Description of the status of the attempted delivery + * @example failed to connect + */ + status: string; + /** + * @description Status code received when delivery was made. + * @example 502 + */ + status_code: number; + /** + * @description The event that triggered the delivery. + * @example issues + */ + event: string; + /** + * @description The type of activity for the event that triggered the delivery. + * @example opened + */ + action: string | null; + /** + * @description The id of the GitHub App installation associated with this event. + * @example 123 + */ + installation_id: number | null; + /** + * @description The id of the repository associated with this event. + * @example 123 + */ + repository_id: number | null; + /** + * @description The URL target of the delivery. + * @example https://www.example.com + */ + url?: string; + request: { + /** @description The request headers sent with the webhook delivery. */ + headers: { [key: string]: unknown } | null; + /** @description The webhook payload. */ + payload: { [key: string]: unknown } | null; + }; + response: { + /** @description The response headers received when the delivery was made. */ + headers: { [key: string]: unknown } | null; + /** @description The response payload received. */ + payload: string | null; + }; + }; + /** + * Simple User + * @description Simple User + */ + "simple-user": { + name?: string | null; + email?: string | null; + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + /** @example "2020-07-09T00:17:55Z" */ + starred_at?: string; + }; + /** + * Enterprise + * @description An enterprise account + */ + enterprise: { + /** @description A short description of the enterprise. */ + description?: string | null; + /** + * Format: uri + * @example https://github.com/enterprises/octo-business + */ + html_url: string; + /** + * Format: uri + * @description The enterprise's website URL. + */ + website_url?: string | null; + /** + * @description Unique identifier of the enterprise + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the enterprise. + * @example Octo Business + */ + name: string; + /** + * @description The slug url identifier for the enterprise. + * @example octo-business + */ + slug: string; + /** + * Format: date-time + * @example 2019-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2019-01-26T19:14:43Z + */ + updated_at: string | null; + /** Format: uri */ + avatar_url: string; + }; + /** + * App Permissions + * @description The permissions granted to the user-to-server access token. + * @example { + * "contents": "read", + * "issues": "read", + * "deployments": "write", + * "single_file": "read" + * } + */ + "app-permissions": { + /** + * @description The level of permission to grant the access token for GitHub Actions workflows, workflow runs, and artifacts. + * @enum {string} + */ + actions?: "read" | "write"; + /** + * @description The level of permission to grant the access token for repository creation, deletion, settings, teams, and collaborators creation. + * @enum {string} + */ + administration?: "read" | "write"; + /** + * @description The level of permission to grant the access token for checks on code. + * @enum {string} + */ + checks?: "read" | "write"; + /** + * @description The level of permission to grant the access token for repository contents, commits, branches, downloads, releases, and merges. + * @enum {string} + */ + contents?: "read" | "write"; + /** + * @description The level of permission to grant the access token for deployments and deployment statuses. + * @enum {string} + */ + deployments?: "read" | "write"; + /** + * @description The level of permission to grant the access token for managing repository environments. + * @enum {string} + */ + environments?: "read" | "write"; + /** + * @description The level of permission to grant the access token for issues and related comments, assignees, labels, and milestones. + * @enum {string} + */ + issues?: "read" | "write"; + /** + * @description The level of permission to grant the access token to search repositories, list collaborators, and access repository metadata. + * @enum {string} + */ + metadata?: "read" | "write"; + /** + * @description The level of permission to grant the access token for packages published to GitHub Packages. + * @enum {string} + */ + packages?: "read" | "write"; + /** + * @description The level of permission to grant the access token to retrieve Pages statuses, configuration, and builds, as well as create new builds. + * @enum {string} + */ + pages?: "read" | "write"; + /** + * @description The level of permission to grant the access token for pull requests and related comments, assignees, labels, milestones, and merges. + * @enum {string} + */ + pull_requests?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage the post-receive hooks for a repository. + * @enum {string} + */ + repository_hooks?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage repository projects, columns, and cards. + * @enum {string} + */ + repository_projects?: "read" | "write" | "admin"; + /** + * @description The level of permission to grant the access token to view and manage secret scanning alerts. + * @enum {string} + */ + secret_scanning_alerts?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage repository secrets. + * @enum {string} + */ + secrets?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage security events like code scanning alerts. + * @enum {string} + */ + security_events?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage just a single file. + * @enum {string} + */ + single_file?: "read" | "write"; + /** + * @description The level of permission to grant the access token for commit statuses. + * @enum {string} + */ + statuses?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage Dependabot alerts. + * @enum {string} + */ + vulnerability_alerts?: "read" | "write"; + /** + * @description The level of permission to grant the access token to update GitHub Actions workflow files. + * @enum {string} + */ + workflows?: "write"; + /** + * @description The level of permission to grant the access token for organization teams and members. + * @enum {string} + */ + members?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage access to an organization. + * @enum {string} + */ + organization_administration?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage the post-receive hooks for an organization. + * @enum {string} + */ + organization_hooks?: "read" | "write"; + /** + * @description The level of permission to grant the access token for viewing an organization's plan. + * @enum {string} + */ + organization_plan?: "read"; + /** + * @description The level of permission to grant the access token to manage organization projects and projects beta (where available). + * @enum {string} + */ + organization_projects?: "read" | "write" | "admin"; + /** + * @description The level of permission to grant the access token for organization packages published to GitHub Packages. + * @enum {string} + */ + organization_packages?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage organization secrets. + * @enum {string} + */ + organization_secrets?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage GitHub Actions self-hosted runners available to an organization. + * @enum {string} + */ + organization_self_hosted_runners?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage users blocked by the organization. + * @enum {string} + */ + organization_user_blocking?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage team discussions and related comments. + * @enum {string} + */ + team_discussions?: "read" | "write"; + }; + /** + * Installation + * @description Installation + */ + installation: { + /** + * @description The ID of the installation. + * @example 1 + */ + id: number; + account: + | (Partial & + Partial) + | null; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection: "all" | "selected"; + /** + * Format: uri + * @example https://api.github.com/installations/1/access_tokens + */ + access_tokens_url: string; + /** + * Format: uri + * @example https://api.github.com/installation/repositories + */ + repositories_url: string; + /** + * Format: uri + * @example https://github.com/organizations/github/settings/installations/1 + */ + html_url: string; + /** @example 1 */ + app_id: number; + /** @description The ID of the user or organization this token is being scoped to. */ + target_id: number; + /** @example Organization */ + target_type: string; + permissions: components["schemas"]["app-permissions"]; + events: string[]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** @example config.yaml */ + single_file_name: string | null; + /** @example true */ + has_multiple_single_files?: boolean; + /** + * @example [ + * "config.yml", + * ".github/issue_TEMPLATE.md" + * ] + */ + single_file_paths?: string[]; + /** @example github-actions */ + app_slug: string; + suspended_by: components["schemas"]["nullable-simple-user"]; + /** Format: date-time */ + suspended_at: string | null; + /** @example "test_13f1e99741e3e004@d7e1eb0bc0a1ba12.com" */ + contact_email?: string | null; + }; + /** + * License Simple + * @description License Simple + */ + "nullable-license-simple": { + /** @example mit */ + key: string; + /** @example MIT License */ + name: string; + /** + * Format: uri + * @example https://api.github.com/licenses/mit + */ + url: string | null; + /** @example MIT */ + spdx_id: string | null; + /** @example MDc6TGljZW5zZW1pdA== */ + node_id: string; + /** Format: uri */ + html_url?: string; + } | null; + /** + * Repository + * @description A git repository + */ + repository: { + /** + * @description Unique identifier of the repository + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + license: components["schemas"]["nullable-license-simple"]; + organization?: components["schemas"]["nullable-simple-user"]; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + }; + owner: components["schemas"]["simple-user"]; + /** + * @description Whether the repository is private or public. + * @default false + */ + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** @example 108 */ + size: number; + /** + * @description The default branch of the repository. + * @example master + */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + topics?: string[]; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki: boolean; + has_pages: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads: boolean; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @default public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string | null; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + template_repository?: { + id?: number; + node_id?: string; + name?: string; + full_name?: string; + owner?: { + login?: string; + id?: number; + node_id?: string; + avatar_url?: string; + gravatar_id?: string; + url?: string; + html_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + subscriptions_url?: string; + organizations_url?: string; + repos_url?: string; + events_url?: string; + received_events_url?: string; + type?: string; + site_admin?: boolean; + }; + private?: boolean; + html_url?: string; + description?: string; + fork?: boolean; + url?: string; + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + downloads_url?: string; + events_url?: string; + forks_url?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + git_url?: string; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + notifications_url?: string; + pulls_url?: string; + releases_url?: string; + ssh_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + clone_url?: string; + mirror_url?: string; + hooks_url?: string; + svn_url?: string; + homepage?: string; + language?: string; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + pushed_at?: string; + created_at?: string; + updated_at?: string; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + allow_rebase_merge?: boolean; + temp_clone_token?: string; + allow_squash_merge?: boolean; + allow_auto_merge?: boolean; + delete_branch_on_merge?: boolean; + allow_update_branch?: boolean; + use_squash_pr_title_as_default?: boolean; + allow_merge_commit?: boolean; + subscribers_count?: number; + network_count?: number; + } | null; + temp_clone_token?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether or not a pull request head branch that is behind its base branch can always be updated even if it is not required to be up to date before merging. + * @default false + * @example false + */ + allow_update_branch?: boolean; + /** + * @description Whether a squash merge commit can use the pull request title as default. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** @description Whether to allow forking this repo */ + allow_forking?: boolean; + subscribers_count?: number; + network_count?: number; + open_issues: number; + watchers: number; + master_branch?: string; + /** @example "2020-07-09T00:17:42Z" */ + starred_at?: string; + }; + /** + * Installation Token + * @description Authentication token for a GitHub App installed on a user or org. + */ + "installation-token": { + token: string; + expires_at: string; + permissions?: components["schemas"]["app-permissions"]; + /** @enum {string} */ + repository_selection?: "all" | "selected"; + repositories?: components["schemas"]["repository"][]; + /** @example README.md */ + single_file?: string; + /** @example true */ + has_multiple_single_files?: boolean; + /** + * @example [ + * "config.yml", + * ".github/issue_TEMPLATE.md" + * ] + */ + single_file_paths?: string[]; + }; + /** + * Application Grant + * @description The authorization associated with an OAuth Access. + */ + "application-grant": { + /** @example 1 */ + id: number; + /** + * Format: uri + * @example https://api.github.com/applications/grants/1 + */ + url: string; + app: { + client_id: string; + name: string; + /** Format: uri */ + url: string; + }; + /** + * Format: date-time + * @example 2011-09-06T17:26:27Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-09-06T20:39:23Z + */ + updated_at: string; + /** + * @example [ + * "public_repo" + * ] + */ + scopes: string[]; + user?: components["schemas"]["nullable-simple-user"]; + }; + /** Scoped Installation */ + "nullable-scoped-installation": { + permissions: components["schemas"]["app-permissions"]; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection: "all" | "selected"; + /** @example config.yaml */ + single_file_name: string | null; + /** @example true */ + has_multiple_single_files?: boolean; + /** + * @example [ + * "config.yml", + * ".github/issue_TEMPLATE.md" + * ] + */ + single_file_paths?: string[]; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repositories_url: string; + account: components["schemas"]["simple-user"]; + } | null; + /** + * Authorization + * @description The authorization for an OAuth app, GitHub App, or a Personal Access Token. + */ + authorization: { + id: number; + /** Format: uri */ + url: string; + /** @description A list of scopes that this authorization is in. */ + scopes: string[] | null; + token: string; + token_last_eight: string | null; + hashed_token: string | null; + app: { + client_id: string; + name: string; + /** Format: uri */ + url: string; + }; + note: string | null; + /** Format: uri */ + note_url: string | null; + /** Format: date-time */ + updated_at: string; + /** Format: date-time */ + created_at: string; + fingerprint: string | null; + user?: components["schemas"]["nullable-simple-user"]; + installation?: components["schemas"]["nullable-scoped-installation"]; + /** Format: date-time */ + expires_at: string | null; + }; + /** + * Code Of Conduct + * @description Code Of Conduct + */ + "code-of-conduct": { + /** @example contributor_covenant */ + key: string; + /** @example Contributor Covenant */ + name: string; + /** + * Format: uri + * @example https://api.github.com/codes_of_conduct/contributor_covenant + */ + url: string; + /** + * @example # Contributor Covenant Code of Conduct + * + * ## Our Pledge + * + * In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + * + * ## Our Standards + * + * Examples of behavior that contributes to creating a positive environment include: + * + * * Using welcoming and inclusive language + * * Being respectful of differing viewpoints and experiences + * * Gracefully accepting constructive criticism + * * Focusing on what is best for the community + * * Showing empathy towards other community members + * + * Examples of unacceptable behavior by participants include: + * + * * The use of sexualized language or imagery and unwelcome sexual attention or advances + * * Trolling, insulting/derogatory comments, and personal or political attacks + * * Public or private harassment + * * Publishing others' private information, such as a physical or electronic address, without explicit permission + * * Other conduct which could reasonably be considered inappropriate in a professional setting + * + * ## Our Responsibilities + * + * Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response + * to any instances of unacceptable behavior. + * + * Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + * + * ## Scope + * + * This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, + * posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + * + * ## Enforcement + * + * Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [EMAIL]. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + * + * Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + * + * ## Attribution + * + * This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + * + * [homepage]: http://contributor-covenant.org + * [version]: http://contributor-covenant.org/version/1/4/ + */ + body?: string; + /** Format: uri */ + html_url: string | null; + }; + /** + * Server Statistics Proxy Endpoint + * @description Response of S4 Proxy endpoint that provides GHES statistics + */ + "server-statistics": { + server_id?: string; + collection_date?: string; + schema_version?: string; + ghes_version?: string; + host_name?: string; + github_connect?: { + features_enabled?: string[]; + }; + ghe_stats?: { + comments?: { + total_commit_comments?: number; + total_gist_comments?: number; + total_issue_comments?: number; + total_pull_request_comments?: number; + }; + gists?: { + total_gists?: number; + private_gists?: number; + public_gists?: number; + }; + hooks?: { + total_hooks?: number; + active_hooks?: number; + inactive_hooks?: number; + }; + issues?: { + total_issues?: number; + open_issues?: number; + closed_issues?: number; + }; + milestones?: { + total_milestones?: number; + open_milestones?: number; + closed_milestones?: number; + }; + orgs?: { + total_orgs?: number; + disabled_orgs?: number; + total_teams?: number; + total_team_members?: number; + }; + pages?: { + total_pages?: number; + }; + pulls?: { + total_pulls?: number; + merged_pulls?: number; + mergeable_pulls?: number; + unmergeable_pulls?: number; + }; + repos?: { + total_repos?: number; + root_repos?: number; + fork_repos?: number; + org_repos?: number; + total_pushes?: number; + total_wikis?: number; + }; + users?: { + total_users?: number; + admin_users?: number; + suspended_users?: number; + }; + }; + dormant_users?: { + total_dormant_users?: number; + dormancy_threshold?: string; + }; + }; + "actions-cache-usage-org-enterprise": { + /** @description The count of active caches across all repositories of an enterprise or an organization. */ + total_active_caches_count: number; + /** @description The total size in bytes of all active cache items across all repositories of an enterprise or an organization. */ + total_active_caches_size_in_bytes: number; + }; + "actions-oidc-custom-issuer-policy-for-enterprise": { + /** + * @description Whether the enterprise customer requested a custom issuer URL. + * @example true + */ + include_enterprise_slug?: boolean; + }; + /** + * @description The policy that controls the organizations in the enterprise that are allowed to run GitHub Actions. + * @enum {string} + */ + "enabled-organizations": "all" | "none" | "selected"; + /** + * @description The permissions policy that controls the actions and reusable workflows that are allowed to run. + * @enum {string} + */ + "allowed-actions": "all" | "local_only" | "selected"; + /** @description The API URL to use to get or set the actions and reusable workflows that are allowed to run, when `allowed_actions` is set to `selected`. */ + "selected-actions-url": string; + "actions-enterprise-permissions": { + enabled_organizations: components["schemas"]["enabled-organizations"]; + /** @description The API URL to use to get or set the selected organizations that are allowed to run GitHub Actions, when `enabled_organizations` is set to `selected`. */ + selected_organizations_url?: string; + allowed_actions?: components["schemas"]["allowed-actions"]; + selected_actions_url?: components["schemas"]["selected-actions-url"]; + }; + /** + * Organization Simple + * @description Organization Simple + */ + "organization-simple": { + /** @example github */ + login: string; + /** @example 1 */ + id: number; + /** @example MDEyOk9yZ2FuaXphdGlvbjE= */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/repos + */ + repos_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/events + */ + events_url: string; + /** @example https://api.github.com/orgs/github/hooks */ + hooks_url: string; + /** @example https://api.github.com/orgs/github/issues */ + issues_url: string; + /** @example https://api.github.com/orgs/github/members{/member} */ + members_url: string; + /** @example https://api.github.com/orgs/github/public_members{/member} */ + public_members_url: string; + /** @example https://github.com/images/error/octocat_happy.gif */ + avatar_url: string; + /** @example A great organization */ + description: string | null; + }; + "selected-actions": { + /** @description Whether GitHub-owned actions are allowed. For example, this includes the actions in the `actions` organization. */ + github_owned_allowed?: boolean; + /** @description Whether actions from GitHub Marketplace verified creators are allowed. Set to `true` to allow all actions by GitHub Marketplace verified creators. */ + verified_allowed?: boolean; + /** @description Specifies a list of string-matching patterns to allow specific action(s) and reusable workflow(s). Wildcards, tags, and SHAs are allowed. For example, `monalisa/octocat@*`, `monalisa/octocat@v2`, `monalisa/*`." */ + patterns_allowed?: string[]; + }; + /** + * @description The default workflow permissions granted to the GITHUB_TOKEN when running workflows. + * @enum {string} + */ + "actions-default-workflow-permissions": "read" | "write"; + /** @description Whether GitHub Actions can approve pull requests. Enabling this can be a security risk. */ + "actions-can-approve-pull-request-reviews": boolean; + "actions-get-default-workflow-permissions": { + default_workflow_permissions: components["schemas"]["actions-default-workflow-permissions"]; + can_approve_pull_request_reviews: components["schemas"]["actions-can-approve-pull-request-reviews"]; + }; + "actions-set-default-workflow-permissions": { + default_workflow_permissions?: components["schemas"]["actions-default-workflow-permissions"]; + can_approve_pull_request_reviews?: components["schemas"]["actions-can-approve-pull-request-reviews"]; + }; + "runner-groups-enterprise": { + id: number; + name: string; + visibility: string; + default: boolean; + selected_organizations_url?: string; + runners_url: string; + allows_public_repositories: boolean; + /** + * @description If `true`, the `restricted_to_workflows` and `selected_workflows` fields cannot be modified. + * @default false + */ + workflow_restrictions_read_only?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + /** + * Self hosted runner label + * @description A label for a self hosted runner + */ + "runner-label": { + /** @description Unique identifier of the label. */ + id?: number; + /** @description Name of the label. */ + name: string; + /** + * @description The type of label. Read-only labels are applied automatically when the runner is configured. + * @enum {string} + */ + type?: "read-only" | "custom"; + }; + /** + * Self hosted runners + * @description A self hosted runner + */ + runner: { + /** + * @description The id of the runner. + * @example 5 + */ + id: number; + /** + * @description The name of the runner. + * @example iMac + */ + name: string; + /** + * @description The Operating System of the runner. + * @example macos + */ + os: string; + /** + * @description The status of the runner. + * @example online + */ + status: string; + busy: boolean; + labels: components["schemas"]["runner-label"][]; + }; + /** + * Runner Application + * @description Runner Application + */ + "runner-application": { + os: string; + architecture: string; + download_url: string; + filename: string; + /** @description A short lived bearer token used to download the runner, if needed. */ + temp_download_token?: string; + sha256_checksum?: string; + }; + /** + * Authentication Token + * @description Authentication Token + */ + "authentication-token": { + /** + * @description The token used for authentication + * @example v1.1f699f1069f60xxx + */ + token: string; + /** + * Format: date-time + * @description The time this token expires + * @example 2016-07-11T22:14:10Z + */ + expires_at: string; + /** + * @example { + * "issues": "read", + * "deployments": "write" + * } + */ + permissions?: { [key: string]: unknown }; + /** @description The repositories this token has access to */ + repositories?: components["schemas"]["repository"][]; + /** @example config.yaml */ + single_file?: string | null; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection?: "all" | "selected"; + }; + "audit-log-event": { + /** @description The time the audit log event occurred, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). */ + "@timestamp"?: number; + /** @description The name of the action that was performed, for example `user.login` or `repo.create`. */ + action?: string; + active?: boolean; + active_was?: boolean; + /** @description The actor who performed the action. */ + actor?: string; + /** @description The id of the actor who performed the action. */ + actor_id?: number; + actor_location?: { + country_name?: string; + }; + data?: { [key: string]: unknown }; + org_id?: number; + /** @description The username of the account being blocked. */ + blocked_user?: string; + business?: string; + config?: { [key: string]: unknown }[]; + config_was?: { [key: string]: unknown }[]; + content_type?: string; + /** @description The time the audit log event was recorded, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). */ + created_at?: number; + deploy_key_fingerprint?: string; + /** @description A unique identifier for an audit event. */ + _document_id?: string; + emoji?: string; + events?: { [key: string]: unknown }[]; + events_were?: { [key: string]: unknown }[]; + explanation?: string; + fingerprint?: string; + hook_id?: number; + limited_availability?: boolean; + message?: string; + name?: string; + old_user?: string; + openssh_public_key?: string; + org?: string; + previous_visibility?: string; + read_only?: boolean; + /** @description The name of the repository. */ + repo?: string; + /** @description The name of the repository. */ + repository?: string; + repository_public?: boolean; + target_login?: string; + team?: string; + /** @description The type of protocol (for example, HTTP or SSH) used to transfer Git data. */ + transport_protocol?: number; + /** @description A human readable name for the protocol (for example, HTTP or SSH) used to transfer Git data. */ + transport_protocol_name?: string; + /** @description The user that was affected by the action performed (if available). */ + user?: string; + /** @description The repository visibility, for example `public` or `private`. */ + visibility?: string; + }; + /** @description The name of the tool used to generate the code scanning analysis. */ + "code-scanning-analysis-tool-name": string; + /** @description The GUID of the tool used to generate the code scanning analysis, if provided in the uploaded SARIF data. */ + "code-scanning-analysis-tool-guid": string | null; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + "code-scanning-alert-state": "open" | "closed" | "dismissed" | "fixed"; + /** @description The security alert number. */ + "alert-number": number; + /** + * Format: date-time + * @description The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "alert-created-at": string; + /** + * Format: date-time + * @description The time that the alert was last updated in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "alert-updated-at": string; + /** + * Format: uri + * @description The REST API URL of the alert resource. + */ + "alert-url": string; + /** + * Format: uri + * @description The GitHub URL of the alert resource. + */ + "alert-html-url": string; + /** + * Format: uri + * @description The REST API URL for fetching the list of instances for an alert. + */ + "alert-instances-url": string; + /** + * Format: date-time + * @description The time that the alert was no longer detected and was considered fixed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "code-scanning-alert-fixed-at": string | null; + /** + * Format: date-time + * @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "code-scanning-alert-dismissed-at": string | null; + /** + * @description **Required when the state is dismissed.** The reason for dismissing or closing the alert. + * @enum {string|null} + */ + "code-scanning-alert-dismissed-reason": + | (null | "false positive" | "won't fix" | "used in tests") + | null; + /** @description The dismissal comment associated with the dismissal of the alert. */ + "code-scanning-alert-dismissed-comment": string | null; + "code-scanning-alert-rule": { + /** @description A unique identifier for the rule used to detect the alert. */ + id?: string | null; + /** @description The name of the rule used to detect the alert. */ + name?: string; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity?: ("none" | "note" | "warning" | "error") | null; + /** + * @description The security severity of the alert. + * @enum {string|null} + */ + security_severity_level?: ("low" | "medium" | "high" | "critical") | null; + /** @description A short description of the rule used to detect the alert. */ + description?: string; + /** @description description of the rule used to detect the alert. */ + full_description?: string; + /** @description A set of tags applicable for the rule. */ + tags?: string[] | null; + /** @description Detailed documentation for the rule as GitHub Flavored Markdown. */ + help?: string | null; + }; + /** @description The version of the tool used to generate the code scanning analysis. */ + "code-scanning-analysis-tool-version": string | null; + "code-scanning-analysis-tool": { + name?: components["schemas"]["code-scanning-analysis-tool-name"]; + version?: components["schemas"]["code-scanning-analysis-tool-version"]; + guid?: components["schemas"]["code-scanning-analysis-tool-guid"]; + }; + /** + * @description The full Git reference, formatted as `refs/heads/`, + * `refs/pull//merge`, or `refs/pull//head`. + */ + "code-scanning-ref": string; + /** @description Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ + "code-scanning-analysis-analysis-key": string; + /** @description Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ + "code-scanning-alert-environment": string; + /** @description Identifies the configuration under which the analysis was executed. Used to distinguish between multiple analyses for the same tool and commit, but performed on different languages or different parts of the code. */ + "code-scanning-analysis-category": string; + /** @description Describe a region within a file for the alert. */ + "code-scanning-alert-location": { + path?: string; + start_line?: number; + end_line?: number; + start_column?: number; + end_column?: number; + }; + /** + * @description A classification of the file. For example to identify it as generated. + * @enum {string|null} + */ + "code-scanning-alert-classification": + | ("source" | "generated" | "test" | "library") + | null; + "code-scanning-alert-instance": { + ref?: components["schemas"]["code-scanning-ref"]; + analysis_key?: components["schemas"]["code-scanning-analysis-analysis-key"]; + environment?: components["schemas"]["code-scanning-alert-environment"]; + category?: components["schemas"]["code-scanning-analysis-category"]; + state?: components["schemas"]["code-scanning-alert-state"]; + commit_sha?: string; + message?: { + text?: string; + }; + location?: components["schemas"]["code-scanning-alert-location"]; + html_url?: string; + /** + * @description Classifications that have been applied to the file that triggered the alert. + * For example identifying it as documentation, or a generated file. + */ + classifications?: components["schemas"]["code-scanning-alert-classification"][]; + }; + /** + * Simple Repository + * @description Simple Repository + */ + "simple-repository": { + /** + * @description A unique identifier of the repository. + * @example 1296269 + */ + id: number; + /** + * @description The GraphQL identifier of the repository. + * @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 + */ + node_id: string; + /** + * @description The name of the repository. + * @example Hello-World + */ + name: string; + /** + * @description The full, globally unique, name of the repository. + * @example octocat/Hello-World + */ + full_name: string; + owner: components["schemas"]["simple-user"]; + /** @description Whether the repository is private. */ + private: boolean; + /** + * Format: uri + * @description The URL to view the repository on GitHub.com. + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** + * @description The repository description. + * @example This your first repo! + */ + description: string | null; + /** @description Whether the repository is a fork. */ + fork: boolean; + /** + * Format: uri + * @description The URL to get more information about the repository from the GitHub API. + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** + * @description A template for the API URL to download the repository as an archive. + * @example https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} + */ + archive_url: string; + /** + * @description A template for the API URL to list the available assignees for issues in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/assignees{/user} + */ + assignees_url: string; + /** + * @description A template for the API URL to create or retrieve a raw Git blob in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} + */ + blobs_url: string; + /** + * @description A template for the API URL to get information about branches in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/branches{/branch} + */ + branches_url: string; + /** + * @description A template for the API URL to get information about collaborators of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} + */ + collaborators_url: string; + /** + * @description A template for the API URL to get information about comments on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/comments{/number} + */ + comments_url: string; + /** + * @description A template for the API URL to get information about commits on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/commits{/sha} + */ + commits_url: string; + /** + * @description A template for the API URL to compare two commits or refs. + * @example https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} + */ + compare_url: string; + /** + * @description A template for the API URL to get the contents of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/contents/{+path} + */ + contents_url: string; + /** + * Format: uri + * @description A template for the API URL to list the contributors to the repository. + * @example https://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @description The API URL to list the deployments of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @description The API URL to list the downloads on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @description The API URL to list the events of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @description The API URL to list the forks of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** + * @description A template for the API URL to get information about Git commits of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/commits{/sha} + */ + git_commits_url: string; + /** + * @description A template for the API URL to get information about Git refs of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/refs{/sha} + */ + git_refs_url: string; + /** + * @description A template for the API URL to get information about Git tags of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/tags{/sha} + */ + git_tags_url: string; + /** + * @description A template for the API URL to get information about issue comments on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/issues/comments{/number} + */ + issue_comment_url: string; + /** + * @description A template for the API URL to get information about issue events on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/issues/events{/number} + */ + issue_events_url: string; + /** + * @description A template for the API URL to get information about issues on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/issues{/number} + */ + issues_url: string; + /** + * @description A template for the API URL to get information about deploy keys on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/keys{/key_id} + */ + keys_url: string; + /** + * @description A template for the API URL to get information about labels of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/labels{/name} + */ + labels_url: string; + /** + * Format: uri + * @description The API URL to get information about the languages of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @description The API URL to merge branches in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** + * @description A template for the API URL to get information about milestones of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/milestones{/number} + */ + milestones_url: string; + /** + * @description A template for the API URL to get information about notifications on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} + */ + notifications_url: string; + /** + * @description A template for the API URL to get information about pull requests on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/pulls{/number} + */ + pulls_url: string; + /** + * @description A template for the API URL to get information about releases on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/releases{/id} + */ + releases_url: string; + /** + * Format: uri + * @description The API URL to list the stargazers on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** + * @description A template for the API URL to get information about statuses of a commit. + * @example https://api.github.com/repos/octocat/Hello-World/statuses/{sha} + */ + statuses_url: string; + /** + * Format: uri + * @description The API URL to list the subscribers on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @description The API URL to subscribe to notifications for this repository. + * @example https://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @description The API URL to get information about tags on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @description The API URL to list the teams on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** + * @description A template for the API URL to create or retrieve a raw Git tree of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/trees{/sha} + */ + trees_url: string; + /** + * Format: uri + * @description The API URL to list the hooks on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + }; + "code-scanning-organization-alert-items": { + number: components["schemas"]["alert-number"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + instances_url: components["schemas"]["alert-instances-url"]; + state: components["schemas"]["code-scanning-alert-state"]; + fixed_at?: components["schemas"]["code-scanning-alert-fixed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_at: components["schemas"]["code-scanning-alert-dismissed-at"]; + dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + rule: components["schemas"]["code-scanning-alert-rule"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; + repository: components["schemas"]["simple-repository"]; + }; + /** + * Format: date-time + * @description The time that the alert was last updated in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "nullable-alert-updated-at": string | null; + /** + * @description Sets the state of the secret scanning alert. Can be either `open` or `resolved`. You must provide `resolution` when you set the state to `resolved`. + * @enum {string} + */ + "secret-scanning-alert-state": "open" | "resolved"; + /** + * @description **Required when the `state` is `resolved`.** The reason for resolving the alert. + * @enum {string|null} + */ + "secret-scanning-alert-resolution": + | (null | "false_positive" | "wont_fix" | "revoked" | "used_in_tests") + | null; + "organization-secret-scanning-alert": { + number?: components["schemas"]["alert-number"]; + created_at?: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["nullable-alert-updated-at"]; + url?: components["schemas"]["alert-url"]; + html_url?: components["schemas"]["alert-html-url"]; + /** + * Format: uri + * @description The REST API URL of the code locations for this alert. + */ + locations_url?: string; + state?: components["schemas"]["secret-scanning-alert-state"]; + resolution?: components["schemas"]["secret-scanning-alert-resolution"]; + /** + * Format: date-time + * @description The time that the alert was resolved in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + resolved_at?: string | null; + resolved_by?: components["schemas"]["nullable-simple-user"]; + /** @description The type of secret that secret scanning detected. */ + secret_type?: string; + /** + * @description User-friendly name for the detected secret, matching the `secret_type`. + * For a list of built-in patterns, see "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)." + */ + secret_type_display_name?: string; + /** @description The secret that was detected. */ + secret?: string; + repository?: components["schemas"]["simple-repository"]; + /** @description Whether push protection was bypassed for the detected secret. */ + push_protection_bypassed?: boolean | null; + push_protection_bypassed_by?: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @description The time that push protection was bypassed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + push_protection_bypassed_at?: string | null; + }; + "actions-billing-usage": { + /** @description The sum of the free and paid GitHub Actions minutes used. */ + total_minutes_used: number; + /** @description The total paid GitHub Actions minutes used. */ + total_paid_minutes_used: number; + /** @description The amount of free GitHub Actions minutes available. */ + included_minutes: number; + minutes_used_breakdown: { + /** @description Total minutes used on Ubuntu runner machines. */ + UBUNTU?: number; + /** @description Total minutes used on macOS runner machines. */ + MACOS?: number; + /** @description Total minutes used on Windows runner machines. */ + WINDOWS?: number; + /** @description Total minutes used on Ubuntu 4 core runner machines. */ + ubuntu_4_core?: number; + /** @description Total minutes used on Ubuntu 8 core runner machines. */ + ubuntu_8_core?: number; + /** @description Total minutes used on Ubuntu 16 core runner machines. */ + ubuntu_16_core?: number; + /** @description Total minutes used on Ubuntu 32 core runner machines. */ + ubuntu_32_core?: number; + /** @description Total minutes used on Ubuntu 64 core runner machines. */ + ubuntu_64_core?: number; + /** @description Total minutes used on Windows 4 core runner machines. */ + windows_4_core?: number; + /** @description Total minutes used on Windows 8 core runner machines. */ + windows_8_core?: number; + /** @description Total minutes used on Windows 16 core runner machines. */ + windows_16_core?: number; + /** @description Total minutes used on Windows 32 core runner machines. */ + windows_32_core?: number; + /** @description Total minutes used on Windows 64 core runner machines. */ + windows_64_core?: number; + /** @description Total minutes used on all runner machines. */ + total?: number; + }; + }; + "advanced-security-active-committers-user": { + user_login: string; + /** @example 2021-11-03 */ + last_pushed_date: string; + }; + "advanced-security-active-committers-repository": { + /** @example octocat/Hello-World */ + name: string; + /** @example 25 */ + advanced_security_committers: number; + advanced_security_committers_breakdown: components["schemas"]["advanced-security-active-committers-user"][]; + }; + "advanced-security-active-committers": { + /** @example 25 */ + total_advanced_security_committers?: number; + /** @example 2 */ + total_count?: number; + repositories: components["schemas"]["advanced-security-active-committers-repository"][]; + }; + "packages-billing-usage": { + /** @description Sum of the free and paid storage space (GB) for GitHuub Packages. */ + total_gigabytes_bandwidth_used: number; + /** @description Total paid storage space (GB) for GitHuub Packages. */ + total_paid_gigabytes_bandwidth_used: number; + /** @description Free storage space (GB) for GitHub Packages. */ + included_gigabytes_bandwidth: number; + }; + "combined-billing-usage": { + /** @description Numbers of days left in billing cycle. */ + days_left_in_billing_cycle: number; + /** @description Estimated storage space (GB) used in billing cycle. */ + estimated_paid_storage_for_month: number; + /** @description Estimated sum of free and paid storage space (GB) used in billing cycle. */ + estimated_storage_for_month: number; + }; + /** + * Actor + * @description Actor + */ + actor: { + id: number; + login: string; + display_login?: string; + gravatar_id: string | null; + /** Format: uri */ + url: string; + /** Format: uri */ + avatar_url: string; + }; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + "nullable-milestone": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1 + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/milestones/v1.0 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1/labels + */ + labels_url: string; + /** @example 1002604 */ + id: number; + /** @example MDk6TWlsZXN0b25lMTAwMjYwNA== */ + node_id: string; + /** + * @description The number of the milestone. + * @example 42 + */ + number: number; + /** + * @description The state of the milestone. + * @default open + * @example open + * @enum {string} + */ + state: "open" | "closed"; + /** + * @description The title of the milestone. + * @example v1.0 + */ + title: string; + /** @example Tracking milestone for version 1.0 */ + description: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** @example 4 */ + open_issues: number; + /** @example 8 */ + closed_issues: number; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2013-02-12T13:22:01Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2012-10-09T23:39:01Z + */ + due_on: string | null; + } | null; + /** + * GitHub app + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + "nullable-integration": { + /** + * @description Unique identifier of the GitHub app + * @example 37 + */ + id: number; + /** + * @description The slug name of the GitHub app + * @example probot-owners + */ + slug?: string; + /** @example MDExOkludGVncmF0aW9uMQ== */ + node_id: string; + owner: components["schemas"]["nullable-simple-user"]; + /** + * @description The name of the GitHub app + * @example Probot Owners + */ + name: string; + /** @example The description of the app. */ + description: string | null; + /** + * Format: uri + * @example https://example.com + */ + external_url: string; + /** + * Format: uri + * @example https://github.com/apps/super-ci + */ + html_url: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + created_at: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + updated_at: string; + /** + * @description The set of permissions for the GitHub app + * @example { + * "issues": "read", + * "deployments": "write" + * } + */ + permissions: { + issues?: string; + checks?: string; + metadata?: string; + contents?: string; + deployments?: string; + } & { [key: string]: string }; + /** + * @description The list of events for the GitHub app + * @example [ + * "label", + * "deployment" + * ] + */ + events: string[]; + /** + * @description The number of installations associated with the GitHub app + * @example 5 + */ + installations_count?: number; + /** @example "Iv1.25b5d1e65ffc4022" */ + client_id?: string; + /** @example "1d4b2097ac622ba702d19de498f005747a8b21d3" */ + client_secret?: string; + /** @example "6fba8f2fc8a7e8f2cca5577eddd82ca7586b3b6b" */ + webhook_secret?: string | null; + /** @example "-----BEGIN RSA PRIVATE KEY-----\nMIIEogIBAAKCAQEArYxrNYD/iT5CZVpRJu4rBKmmze3PVmT/gCo2ATUvDvZTPTey\nxcGJ3vvrJXazKk06pN05TN29o98jrYz4cengG3YGsXPNEpKsIrEl8NhbnxapEnM9\nJCMRe0P5JcPsfZlX6hmiT7136GRWiGOUba2X9+HKh8QJVLG5rM007TBER9/z9mWm\nrJuNh+m5l320oBQY/Qq3A7wzdEfZw8qm/mIN0FCeoXH1L6B8xXWaAYBwhTEh6SSn\nZHlO1Xu1JWDmAvBCi0RO5aRSKM8q9QEkvvHP4yweAtK3N8+aAbZ7ovaDhyGz8r6r\nzhU1b8Uo0Z2ysf503WqzQgIajr7Fry7/kUwpgQIDAQABAoIBADwJp80Ko1xHPZDy\nfcCKBDfIuPvkmSW6KumbsLMaQv1aGdHDwwTGv3t0ixSay8CGlxMRtRDyZPib6SvQ\n6OH/lpfpbMdW2ErkksgtoIKBVrDilfrcAvrNZu7NxRNbhCSvN8q0s4ICecjbbVQh\nnueSdlA6vGXbW58BHMq68uRbHkP+k+mM9U0mDJ1HMch67wlg5GbayVRt63H7R2+r\nVxcna7B80J/lCEjIYZznawgiTvp3MSanTglqAYi+m1EcSsP14bJIB9vgaxS79kTu\noiSo93leJbBvuGo8QEiUqTwMw4tDksmkLsoqNKQ1q9P7LZ9DGcujtPy4EZsamSJT\ny8OJt0ECgYEA2lxOxJsQk2kI325JgKFjo92mQeUObIvPfSNWUIZQDTjniOI6Gv63\nGLWVFrZcvQBWjMEQraJA9xjPbblV8PtfO87MiJGLWCHFxmPz2dzoedN+2Coxom8m\nV95CLz8QUShuao6u/RYcvUaZEoYs5bHcTmy5sBK80JyEmafJPtCQVxMCgYEAy3ar\nZr3yv4xRPEPMat4rseswmuMooSaK3SKub19WFI5IAtB/e7qR1Rj9JhOGcZz+OQrl\nT78O2OFYlgOIkJPvRMrPpK5V9lslc7tz1FSh3BZMRGq5jSyD7ETSOQ0c8T2O/s7v\nbeEPbVbDe4mwvM24XByH0GnWveVxaDl51ABD65sCgYB3ZAspUkOA5egVCh8kNpnd\nSd6SnuQBE3ySRlT2WEnCwP9Ph6oPgn+oAfiPX4xbRqkL8q/k0BdHQ4h+zNwhk7+h\nWtPYRAP1Xxnc/F+jGjb+DVaIaKGU18MWPg7f+FI6nampl3Q0KvfxwX0GdNhtio8T\nTj1E+SnFwh56SRQuxSh2gwKBgHKjlIO5NtNSflsUYFM+hyQiPiqnHzddfhSG+/3o\nm5nNaSmczJesUYreH5San7/YEy2UxAugvP7aSY2MxB+iGsiJ9WD2kZzTUlDZJ7RV\nUzWsoqBR+eZfVJ2FUWWvy8TpSG6trh4dFxImNtKejCR1TREpSiTV3Zb1dmahK9GV\nrK9NAoGAbBxRLoC01xfxCTgt5BDiBcFVh4fp5yYKwavJPLzHSpuDOrrI9jDn1oKN\nonq5sDU1i391zfQvdrbX4Ova48BN+B7p63FocP/MK5tyyBoT8zQEk2+vWDOw7H/Z\nu5dTCPxTIsoIwUw1I+7yIxqJzLPFgR2gVBwY1ra/8iAqCj+zeBw=\n-----END RSA PRIVATE KEY-----\n" */ + pem?: string; + } | null; + /** + * author_association + * @description How the author is associated with the repository. + * @example OWNER + * @enum {string} + */ + "author-association": + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** Reaction Rollup */ + "reaction-rollup": { + /** Format: uri */ + url: string; + total_count: number; + "+1": number; + "-1": number; + laugh: number; + confused: number; + heart: number; + hooray: number; + eyes: number; + rocket: number; + }; + /** + * Issue + * @description Issues are a great way to keep track of tasks, enhancements, and bugs for your projects. + */ + issue: { + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue + * @example https://api.github.com/repositories/42/issues/1 + */ + url: string; + /** Format: uri */ + repository_url: string; + labels_url: string; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** + * @description Number uniquely identifying the issue within its repository + * @example 42 + */ + number: number; + /** + * @description State of the issue; either 'open' or 'closed' + * @example open + */ + state: string; + /** + * @description The reason for the current state + * @example not_planned + */ + state_reason?: string | null; + /** + * @description Title of the issue + * @example Widget creation fails in Safari on OS X 10.8 + */ + title: string; + /** + * @description Contents of the issue + * @example It looks like the new widget form is broken on Safari. When I try and create the widget, Safari crashes. This is reproducible on 10.8, but not 10.9. Maybe a browser bug? + */ + body?: string | null; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description Labels to associate with this issue; pass one or more label names to replace the set of labels on this issue; send an empty array to clear all labels from the issue; note that the labels are silently dropped for users without push access to the repository + * @example [ + * "bug", + * "registration" + * ] + */ + labels: ( + | string + | { + /** Format: int64 */ + id?: number; + node_id?: string; + /** Format: uri */ + url?: string; + name?: string; + description?: string | null; + color?: string | null; + default?: boolean; + } + )[]; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + milestone: components["schemas"]["nullable-milestone"]; + locked: boolean; + active_lock_reason?: string | null; + comments: number; + pull_request?: { + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + diff_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + patch_url: string | null; + /** Format: uri */ + url: string | null; + }; + /** Format: date-time */ + closed_at: string | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + draft?: boolean; + closed_by?: components["schemas"]["nullable-simple-user"]; + body_html?: string; + body_text?: string; + /** Format: uri */ + timeline_url?: string; + repository?: components["schemas"]["repository"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + author_association: components["schemas"]["author-association"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Issue Comment + * @description Comments provide a way for people to collaborate on an issue. + */ + "issue-comment": { + /** + * @description Unique identifier of the issue comment + * @example 42 + */ + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue comment + * @example https://api.github.com/repositories/42/issues/comments/1 + */ + url: string; + /** + * @description Contents of the issue comment + * @example What version of Safari were you using when you observed this bug? + */ + body?: string; + body_text?: string; + body_html?: string; + /** Format: uri */ + html_url: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** Format: uri */ + issue_url: string; + author_association: components["schemas"]["author-association"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Event + * @description Event + */ + event: { + id: string; + type: string | null; + actor: components["schemas"]["actor"]; + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + org?: components["schemas"]["actor"]; + payload: { + action?: string; + issue?: components["schemas"]["issue"]; + comment?: components["schemas"]["issue-comment"]; + pages?: { + page_name?: string; + title?: string; + summary?: string | null; + action?: string; + sha?: string; + html_url?: string; + }[]; + }; + public: boolean; + /** Format: date-time */ + created_at: string | null; + }; + /** + * Link With Type + * @description Hypermedia Link with Type + */ + "link-with-type": { + href: string; + type: string; + }; + /** + * Feed + * @description Feed + */ + feed: { + /** @example https://github.com/timeline */ + timeline_url: string; + /** @example https://github.com/{user} */ + user_url: string; + /** @example https://github.com/octocat */ + current_user_public_url?: string; + /** @example https://github.com/octocat.private?token=abc123 */ + current_user_url?: string; + /** @example https://github.com/octocat.private.actor?token=abc123 */ + current_user_actor_url?: string; + /** @example https://github.com/octocat-org */ + current_user_organization_url?: string; + /** + * @example [ + * "https://github.com/organizations/github/octocat.private.atom?token=abc123" + * ] + */ + current_user_organization_urls?: string[]; + /** @example https://github.com/security-advisories */ + security_advisories_url?: string; + _links: { + timeline: components["schemas"]["link-with-type"]; + user: components["schemas"]["link-with-type"]; + security_advisories?: components["schemas"]["link-with-type"]; + current_user?: components["schemas"]["link-with-type"]; + current_user_public?: components["schemas"]["link-with-type"]; + current_user_actor?: components["schemas"]["link-with-type"]; + current_user_organization?: components["schemas"]["link-with-type"]; + current_user_organizations?: components["schemas"]["link-with-type"][]; + }; + }; + /** + * Base Gist + * @description Base Gist + */ + "base-gist": { + /** Format: uri */ + url: string; + /** Format: uri */ + forks_url: string; + /** Format: uri */ + commits_url: string; + id: string; + node_id: string; + /** Format: uri */ + git_pull_url: string; + /** Format: uri */ + git_push_url: string; + /** Format: uri */ + html_url: string; + files: { + [key: string]: { + filename?: string; + type?: string; + language?: string; + raw_url?: string; + size?: number; + }; + }; + public: boolean; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + description: string | null; + comments: number; + user: components["schemas"]["nullable-simple-user"]; + /** Format: uri */ + comments_url: string; + owner?: components["schemas"]["simple-user"]; + truncated?: boolean; + forks?: unknown[]; + history?: unknown[]; + }; + /** + * Public User + * @description Public User + */ + "public-user": { + login: string; + id: number; + node_id: string; + /** Format: uri */ + avatar_url: string; + gravatar_id: string | null; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + repos_url: string; + events_url: string; + /** Format: uri */ + received_events_url: string; + type: string; + site_admin: boolean; + name: string | null; + company: string | null; + blog: string | null; + location: string | null; + /** Format: email */ + email: string | null; + hireable: boolean | null; + bio: string | null; + twitter_username?: string | null; + public_repos: number; + public_gists: number; + followers: number; + following: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + plan?: { + collaborators: number; + name: string; + space: number; + private_repos: number; + }; + /** Format: date-time */ + suspended_at?: string | null; + /** @example 1 */ + private_gists?: number; + /** @example 2 */ + total_private_repos?: number; + /** @example 2 */ + owned_private_repos?: number; + /** @example 1 */ + disk_usage?: number; + /** @example 3 */ + collaborators?: number; + }; + /** + * Gist History + * @description Gist History + */ + "gist-history": { + user?: components["schemas"]["nullable-simple-user"]; + version?: string; + /** Format: date-time */ + committed_at?: string; + change_status?: { + total?: number; + additions?: number; + deletions?: number; + }; + /** Format: uri */ + url?: string; + }; + /** + * Gist Simple + * @description Gist Simple + */ + "gist-simple": { + /** @deprecated */ + forks?: + | { + id?: string; + /** Format: uri */ + url?: string; + user?: components["schemas"]["public-user"]; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }[] + | null; + /** @deprecated */ + history?: components["schemas"]["gist-history"][] | null; + /** + * Gist + * @description Gist + */ + fork_of?: { + /** Format: uri */ + url: string; + /** Format: uri */ + forks_url: string; + /** Format: uri */ + commits_url: string; + id: string; + node_id: string; + /** Format: uri */ + git_pull_url: string; + /** Format: uri */ + git_push_url: string; + /** Format: uri */ + html_url: string; + files: { + [key: string]: { + filename?: string; + type?: string; + language?: string; + raw_url?: string; + size?: number; + }; + }; + public: boolean; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + description: string | null; + comments: number; + user: components["schemas"]["nullable-simple-user"]; + /** Format: uri */ + comments_url: string; + owner?: components["schemas"]["nullable-simple-user"]; + truncated?: boolean; + forks?: unknown[]; + history?: unknown[]; + } | null; + url?: string; + forks_url?: string; + commits_url?: string; + id?: string; + node_id?: string; + git_pull_url?: string; + git_push_url?: string; + html_url?: string; + files?: { + [key: string]: { + filename?: string; + type?: string; + language?: string; + raw_url?: string; + size?: number; + truncated?: boolean; + content?: string; + } | null; + }; + public?: boolean; + created_at?: string; + updated_at?: string; + description?: string | null; + comments?: number; + user?: string | null; + comments_url?: string; + owner?: components["schemas"]["simple-user"]; + truncated?: boolean; + }; + /** + * Gist Comment + * @description A comment made to a gist. + */ + "gist-comment": { + /** @example 1 */ + id: number; + /** @example MDExOkdpc3RDb21tZW50MQ== */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/gists/a6db0bec360bb87e9418/comments/1 + */ + url: string; + /** + * @description The comment text. + * @example Body of the attachment + */ + body: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2011-04-18T23:23:56Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-18T23:23:56Z + */ + updated_at: string; + author_association: components["schemas"]["author-association"]; + }; + /** + * Gist Commit + * @description Gist Commit + */ + "gist-commit": { + /** + * Format: uri + * @example https://api.github.com/gists/aa5a315d61ae9438b18d/57a7f021a713b1c5a6a199b54cc514735d2d462f + */ + url: string; + /** @example 57a7f021a713b1c5a6a199b54cc514735d2d462f */ + version: string; + user: components["schemas"]["nullable-simple-user"]; + change_status: { + total?: number; + additions?: number; + deletions?: number; + }; + /** + * Format: date-time + * @example 2010-04-14T02:15:15Z + */ + committed_at: string; + }; + /** + * Gitignore Template + * @description Gitignore Template + */ + "gitignore-template": { + /** @example C */ + name: string; + /** + * @example # Object files + * *.o + * + * # Libraries + * *.lib + * *.a + * + * # Shared objects (inc. Windows DLLs) + * *.dll + * *.so + * *.so.* + * *.dylib + * + * # Executables + * *.exe + * *.out + * *.app + */ + source: string; + }; + /** + * License Simple + * @description License Simple + */ + "license-simple": { + /** @example mit */ + key: string; + /** @example MIT License */ + name: string; + /** + * Format: uri + * @example https://api.github.com/licenses/mit + */ + url: string | null; + /** @example MIT */ + spdx_id: string | null; + /** @example MDc6TGljZW5zZW1pdA== */ + node_id: string; + /** Format: uri */ + html_url?: string; + }; + /** + * License + * @description License + */ + license: { + /** @example mit */ + key: string; + /** @example MIT License */ + name: string; + /** @example MIT */ + spdx_id: string | null; + /** + * Format: uri + * @example https://api.github.com/licenses/mit + */ + url: string | null; + /** @example MDc6TGljZW5zZW1pdA== */ + node_id: string; + /** + * Format: uri + * @example http://choosealicense.com/licenses/mit/ + */ + html_url: string; + /** @example A permissive license that is short and to the point. It lets people do anything with your code with proper attribution and without warranty. */ + description: string; + /** @example Create a text file (typically named LICENSE or LICENSE.txt) in the root of your source code and copy the text of the license into the file. Replace [year] with the current year and [fullname] with the name (or names) of the copyright holders. */ + implementation: string; + /** + * @example [ + * "commercial-use", + * "modifications", + * "distribution", + * "sublicense", + * "private-use" + * ] + */ + permissions: string[]; + /** + * @example [ + * "include-copyright" + * ] + */ + conditions: string[]; + /** + * @example [ + * "no-liability" + * ] + */ + limitations: string[]; + /** + * @example + * + * The MIT License (MIT) + * + * Copyright (c) [year] [fullname] + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + body: string; + /** @example true */ + featured: boolean; + }; + /** + * Marketplace Listing Plan + * @description Marketplace Listing Plan + */ + "marketplace-listing-plan": { + /** + * Format: uri + * @example https://api.github.com/marketplace_listing/plans/1313 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/marketplace_listing/plans/1313/accounts + */ + accounts_url: string; + /** @example 1313 */ + id: number; + /** @example 3 */ + number: number; + /** @example Pro */ + name: string; + /** @example A professional-grade CI solution */ + description: string; + /** @example 1099 */ + monthly_price_in_cents: number; + /** @example 11870 */ + yearly_price_in_cents: number; + /** @example flat-rate */ + price_model: string; + /** @example true */ + has_free_trial: boolean; + unit_name: string | null; + /** @example published */ + state: string; + /** + * @example [ + * "Up to 25 private repositories", + * "11 concurrent builds" + * ] + */ + bullets: string[]; + }; + /** + * Marketplace Purchase + * @description Marketplace Purchase + */ + "marketplace-purchase": { + url: string; + type: string; + id: number; + login: string; + organization_billing_email?: string; + email?: string | null; + marketplace_pending_change?: { + is_installed?: boolean; + effective_date?: string; + unit_count?: number | null; + id?: number; + plan?: components["schemas"]["marketplace-listing-plan"]; + } | null; + marketplace_purchase: { + billing_cycle?: string; + next_billing_date?: string | null; + is_installed?: boolean; + unit_count?: number | null; + on_free_trial?: boolean; + free_trial_ends_on?: string | null; + updated_at?: string; + plan?: components["schemas"]["marketplace-listing-plan"]; + }; + }; + /** + * Api Overview + * @description Api Overview + */ + "api-overview": { + /** @example true */ + verifiable_password_authentication: boolean; + ssh_key_fingerprints?: { + SHA256_RSA?: string; + SHA256_DSA?: string; + SHA256_ECDSA?: string; + SHA256_ED25519?: string; + }; + /** + * @example [ + * "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl" + * ] + */ + ssh_keys?: string[]; + /** + * @example [ + * "127.0.0.1/32" + * ] + */ + hooks?: string[]; + /** + * @example [ + * "127.0.0.1/32" + * ] + */ + web?: string[]; + /** + * @example [ + * "127.0.0.1/32" + * ] + */ + api?: string[]; + /** + * @example [ + * "127.0.0.1/32" + * ] + */ + git?: string[]; + /** + * @example [ + * "13.65.0.0/16", + * "157.55.204.33/32", + * "2a01:111:f403:f90c::/62" + * ] + */ + packages?: string[]; + /** + * @example [ + * "192.30.252.153/32", + * "192.30.252.154/32" + * ] + */ + pages?: string[]; + /** + * @example [ + * "54.158.161.132", + * "54.226.70.38" + * ] + */ + importer?: string[]; + /** + * @example [ + * "13.64.0.0/16", + * "13.65.0.0/16" + * ] + */ + actions?: string[]; + /** + * @example [ + * "192.168.7.15/32", + * "192.168.7.16/32" + * ] + */ + dependabot?: string[]; + }; + /** + * Repository + * @description A git repository + */ + "nullable-repository": { + /** + * @description Unique identifier of the repository + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + license: components["schemas"]["nullable-license-simple"]; + organization?: components["schemas"]["nullable-simple-user"]; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + }; + owner: components["schemas"]["simple-user"]; + /** + * @description Whether the repository is private or public. + * @default false + */ + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** @example 108 */ + size: number; + /** + * @description The default branch of the repository. + * @example master + */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + topics?: string[]; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki: boolean; + has_pages: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads: boolean; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @default public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string | null; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + template_repository?: { + id?: number; + node_id?: string; + name?: string; + full_name?: string; + owner?: { + login?: string; + id?: number; + node_id?: string; + avatar_url?: string; + gravatar_id?: string; + url?: string; + html_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + subscriptions_url?: string; + organizations_url?: string; + repos_url?: string; + events_url?: string; + received_events_url?: string; + type?: string; + site_admin?: boolean; + }; + private?: boolean; + html_url?: string; + description?: string; + fork?: boolean; + url?: string; + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + downloads_url?: string; + events_url?: string; + forks_url?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + git_url?: string; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + notifications_url?: string; + pulls_url?: string; + releases_url?: string; + ssh_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + clone_url?: string; + mirror_url?: string; + hooks_url?: string; + svn_url?: string; + homepage?: string; + language?: string; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + pushed_at?: string; + created_at?: string; + updated_at?: string; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + allow_rebase_merge?: boolean; + temp_clone_token?: string; + allow_squash_merge?: boolean; + allow_auto_merge?: boolean; + delete_branch_on_merge?: boolean; + allow_update_branch?: boolean; + use_squash_pr_title_as_default?: boolean; + allow_merge_commit?: boolean; + subscribers_count?: number; + network_count?: number; + } | null; + temp_clone_token?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether or not a pull request head branch that is behind its base branch can always be updated even if it is not required to be up to date before merging. + * @default false + * @example false + */ + allow_update_branch?: boolean; + /** + * @description Whether a squash merge commit can use the pull request title as default. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** @description Whether to allow forking this repo */ + allow_forking?: boolean; + subscribers_count?: number; + network_count?: number; + open_issues: number; + watchers: number; + master_branch?: string; + /** @example "2020-07-09T00:17:42Z" */ + starred_at?: string; + } | null; + /** + * Minimal Repository + * @description Minimal Repository + */ + "minimal-repository": { + /** @example 1296269 */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** @example Hello-World */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + owner: components["schemas"]["simple-user"]; + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + git_url?: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + ssh_url?: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + clone_url?: string; + mirror_url?: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + svn_url?: string; + homepage?: string | null; + language?: string | null; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at?: string | null; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + /** @example admin */ + role_name?: string; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string; + delete_branch_on_merge?: boolean; + subscribers_count?: number; + network_count?: number; + code_of_conduct?: components["schemas"]["code-of-conduct"]; + license?: { + key?: string; + name?: string; + spdx_id?: string; + url?: string; + node_id?: string; + } | null; + /** @example 0 */ + forks?: number; + /** @example 0 */ + open_issues?: number; + /** @example 0 */ + watchers?: number; + allow_forking?: boolean; + }; + /** + * Thread + * @description Thread + */ + thread: { + id: string; + repository: components["schemas"]["minimal-repository"]; + subject: { + title: string; + url: string; + latest_comment_url: string; + type: string; + }; + reason: string; + unread: boolean; + updated_at: string; + last_read_at: string | null; + url: string; + /** @example https://api.github.com/notifications/threads/2/subscription */ + subscription_url: string; + }; + /** + * Thread Subscription + * @description Thread Subscription + */ + "thread-subscription": { + /** @example true */ + subscribed: boolean; + ignored: boolean; + reason: string | null; + /** + * Format: date-time + * @example 2012-10-06T21:34:12Z + */ + created_at: string | null; + /** + * Format: uri + * @example https://api.github.com/notifications/threads/1/subscription + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/notifications/threads/1 + */ + thread_url?: string; + /** + * Format: uri + * @example https://api.github.com/repos/1 + */ + repository_url?: string; + }; + /** + * Organization Custom Repository Role + * @description Custom repository roles created by organization administrators + */ + "organization-custom-repository-role": { + id: number; + name: string; + }; + /** + * Organization Full + * @description Organization Full + */ + "organization-full": { + /** @example github */ + login: string; + /** @example 1 */ + id: number; + /** @example MDEyOk9yZ2FuaXphdGlvbjE= */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/repos + */ + repos_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/events + */ + events_url: string; + /** @example https://api.github.com/orgs/github/hooks */ + hooks_url: string; + /** @example https://api.github.com/orgs/github/issues */ + issues_url: string; + /** @example https://api.github.com/orgs/github/members{/member} */ + members_url: string; + /** @example https://api.github.com/orgs/github/public_members{/member} */ + public_members_url: string; + /** @example https://github.com/images/error/octocat_happy.gif */ + avatar_url: string; + /** @example A great organization */ + description: string | null; + /** @example github */ + name?: string; + /** @example GitHub */ + company?: string; + /** + * Format: uri + * @example https://github.com/blog + */ + blog?: string; + /** @example San Francisco */ + location?: string; + /** + * Format: email + * @example octocat@github.com + */ + email?: string; + /** @example github */ + twitter_username?: string | null; + /** @example true */ + is_verified?: boolean; + /** @example true */ + has_organization_projects: boolean; + /** @example true */ + has_repository_projects: boolean; + /** @example 2 */ + public_repos: number; + /** @example 1 */ + public_gists: number; + /** @example 20 */ + followers: number; + /** @example 0 */ + following: number; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + created_at: string; + /** @example Organization */ + type: string; + /** @example 100 */ + total_private_repos?: number; + /** @example 100 */ + owned_private_repos?: number; + /** @example 81 */ + private_gists?: number | null; + /** @example 10000 */ + disk_usage?: number | null; + /** @example 8 */ + collaborators?: number | null; + /** + * Format: email + * @example org@example.com + */ + billing_email?: string | null; + plan?: { + name: string; + space: number; + private_repos: number; + filled_seats?: number; + seats?: number; + }; + default_repository_permission?: string | null; + /** @example true */ + members_can_create_repositories?: boolean | null; + /** @example true */ + two_factor_requirement_enabled?: boolean | null; + /** @example all */ + members_allowed_repository_creation_type?: string; + /** @example true */ + members_can_create_public_repositories?: boolean; + /** @example true */ + members_can_create_private_repositories?: boolean; + /** @example true */ + members_can_create_internal_repositories?: boolean; + /** @example true */ + members_can_create_pages?: boolean; + /** @example true */ + members_can_create_public_pages?: boolean; + /** @example true */ + members_can_create_private_pages?: boolean; + /** @example false */ + members_can_fork_private_repositories?: boolean | null; + /** Format: date-time */ + updated_at: string; + }; + /** + * Actions Cache Usage by repository + * @description GitHub Actions Cache Usage by repository. + */ + "actions-cache-usage-by-repository": { + /** + * @description The repository owner and name for the cache usage being shown. + * @example octo-org/Hello-World + */ + full_name: string; + /** + * @description The sum of the size in bytes of all the active cache items in the repository. + * @example 2322142 + */ + active_caches_size_in_bytes: number; + /** + * @description The number of active caches in the repository. + * @example 3 + */ + active_caches_count: number; + }; + /** + * Actions OIDC Subject customization + * @description Actions OIDC Subject customization + */ + "oidc-custom-sub": { + include_claim_keys: string[]; + }; + /** + * Empty Object + * @description An object without any properties. + */ + "empty-object": { [key: string]: unknown }; + /** + * @description The policy that controls the repositories in the organization that are allowed to run GitHub Actions. + * @enum {string} + */ + "enabled-repositories": "all" | "none" | "selected"; + "actions-organization-permissions": { + enabled_repositories: components["schemas"]["enabled-repositories"]; + /** @description The API URL to use to get or set the selected repositories that are allowed to run GitHub Actions, when `enabled_repositories` is set to `selected`. */ + selected_repositories_url?: string; + allowed_actions?: components["schemas"]["allowed-actions"]; + selected_actions_url?: components["schemas"]["selected-actions-url"]; + }; + "runner-groups-org": { + id: number; + name: string; + visibility: string; + default: boolean; + /** @description Link to the selected repositories resource for this runner group. Not present unless visibility was set to `selected` */ + selected_repositories_url?: string; + runners_url: string; + inherited: boolean; + inherited_allows_public_repositories?: boolean; + allows_public_repositories: boolean; + /** + * @description If `true`, the `restricted_to_workflows` and `selected_workflows` fields cannot be modified. + * @default false + */ + workflow_restrictions_read_only?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + /** + * Actions Secret for an Organization + * @description Secrets for GitHub Actions for an organization. + */ + "organization-actions-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** + * @description Visibility of a secret + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @example https://api.github.com/organizations/org/secrets/my_secret/repositories + */ + selected_repositories_url?: string; + }; + /** + * ActionsPublicKey + * @description The public key used for setting Actions Secrets. + */ + "actions-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + /** @example 2 */ + id?: number; + /** @example https://api.github.com/user/keys/2 */ + url?: string; + /** @example ssh-rsa AAAAB3NzaC1yc2EAAA */ + title?: string; + /** @example 2011-01-26T19:01:12Z */ + created_at?: string; + }; + /** + * Codespace machine + * @description A description of the machine powering a codespace. + */ + "nullable-codespace-machine": { + /** + * @description The name of the machine. + * @example standardLinux + */ + name: string; + /** + * @description The display name of the machine includes cores, memory, and storage. + * @example 4 cores, 8 GB RAM, 64 GB storage + */ + display_name: string; + /** + * @description The operating system of the machine. + * @example linux + */ + operating_system: string; + /** + * @description How much storage is available to the codespace. + * @example 68719476736 + */ + storage_in_bytes: number; + /** + * @description How much memory is available to the codespace. + * @example 8589934592 + */ + memory_in_bytes: number; + /** + * @description How many cores are available to the codespace. + * @example 4 + */ + cpus: number; + /** + * @description Whether a prebuild is currently available when creating a codespace for this machine and repository. If a branch was not specified as a ref, the default branch will be assumed. Value will be "null" if prebuilds are not supported or prebuild availability could not be determined. Value will be "none" if no prebuild is available. Latest values "ready" and "in_progress" indicate the prebuild availability status. + * @example ready + * @enum {string|null} + */ + prebuild_availability: ("none" | "ready" | "in_progress") | null; + } | null; + /** + * Codespace + * @description A codespace. + */ + codespace: { + /** @example 1 */ + id: number; + /** + * @description Automatically generated name of this codespace. + * @example monalisa-octocat-hello-world-g4wpq6h95q + */ + name: string; + /** + * @description Display name for this codespace. + * @example bookish space pancake + */ + display_name?: string | null; + /** + * @description UUID identifying this codespace's environment. + * @example 26a7c758-7299-4a73-b978-5a92a7ae98a0 + */ + environment_id: string | null; + owner: components["schemas"]["simple-user"]; + billable_owner: components["schemas"]["simple-user"]; + repository: components["schemas"]["minimal-repository"]; + machine: components["schemas"]["nullable-codespace-machine"]; + /** + * @description Path to devcontainer.json from repo root used to create Codespace. + * @example .devcontainer/example/devcontainer.json + */ + devcontainer_path?: string | null; + /** + * @description Whether the codespace was created from a prebuild. + * @example false + */ + prebuild: boolean | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @description Last known time this codespace was started. + * @example 2011-01-26T19:01:12Z + */ + last_used_at: string; + /** + * @description State of this codespace. + * @example Available + * @enum {string} + */ + state: + | "Unknown" + | "Created" + | "Queued" + | "Provisioning" + | "Available" + | "Awaiting" + | "Unavailable" + | "Deleted" + | "Moved" + | "Shutdown" + | "Archived" + | "Starting" + | "ShuttingDown" + | "Failed" + | "Exporting" + | "Updating" + | "Rebuilding"; + /** + * Format: uri + * @description API URL for this codespace. + */ + url: string; + /** @description Details about the codespace's git repository. */ + git_status: { + /** + * @description The number of commits the local repository is ahead of the remote. + * @example 0 + */ + ahead?: number; + /** + * @description The number of commits the local repository is behind the remote. + * @example 0 + */ + behind?: number; + /** @description Whether the local repository has unpushed changes. */ + has_unpushed_changes?: boolean; + /** @description Whether the local repository has uncommitted changes. */ + has_uncommitted_changes?: boolean; + /** + * @description The current branch (or SHA if in detached HEAD state) of the local repository. + * @example main + */ + ref?: string; + }; + /** + * @description The Azure region where this codespace is located. + * @example WestUs2 + * @enum {string} + */ + location: "EastUs" | "SouthEastAsia" | "WestEurope" | "WestUs2"; + /** + * @description The number of minutes of inactivity after which this codespace will be automatically stopped. + * @example 60 + */ + idle_timeout_minutes: number | null; + /** + * Format: uri + * @description URL to access this codespace on the web. + */ + web_url: string; + /** + * Format: uri + * @description API URL to access available alternate machine types for this codespace. + */ + machines_url: string; + /** + * Format: uri + * @description API URL to start this codespace. + */ + start_url: string; + /** + * Format: uri + * @description API URL to stop this codespace. + */ + stop_url: string; + /** + * Format: uri + * @description API URL for the Pull Request associated with this codespace, if any. + */ + pulls_url: string | null; + recent_folders: string[]; + runtime_constraints?: { + /** @description The privacy settings a user can select from when forwarding a port. */ + allowed_port_privacy_settings?: string[] | null; + }; + /** @description Whether or not a codespace has a pending async operation. This would mean that the codespace is temporarily unavailable. The only thing that you can do with a codespace in this state is delete it. */ + pending_operation?: boolean | null; + /** @description Text to show user when codespace is disabled by a pending operation */ + pending_operation_disabled_reason?: string | null; + /** @description Text to show user when codespace idle timeout minutes has been overriden by an organization policy */ + idle_timeout_notice?: string | null; + }; + /** + * Credential Authorization + * @description Credential Authorization + */ + "credential-authorization": { + /** + * @description User login that owns the underlying credential. + * @example monalisa + */ + login: string; + /** + * @description Unique identifier for the credential. + * @example 1 + */ + credential_id: number; + /** + * @description Human-readable description of the credential type. + * @example SSH Key + */ + credential_type: string; + /** + * @description Last eight characters of the credential. Only included in responses with credential_type of personal access token. + * @example 12345678 + */ + token_last_eight?: string; + /** + * Format: date-time + * @description Date when the credential was authorized for use. + * @example 2011-01-26T19:06:43Z + */ + credential_authorized_at: string; + /** + * @description List of oauth scopes the token has been granted. + * @example [ + * "user", + * "repo" + * ] + */ + scopes?: string[]; + /** + * @description Unique string to distinguish the credential. Only included in responses with credential_type of SSH Key. + * @example jklmnop12345678 + */ + fingerprint?: string; + /** + * Format: date-time + * @description Date when the credential was last accessed. May be null if it was never accessed + * @example 2011-01-26T19:06:43Z + */ + credential_accessed_at: string | null; + /** @example 12345678 */ + authorized_credential_id: number | null; + /** + * @description The title given to the ssh key. This will only be present when the credential is an ssh key. + * @example my ssh key + */ + authorized_credential_title?: string | null; + /** + * @description The note given to the token. This will only be present when the credential is a token. + * @example my token + */ + authorized_credential_note?: string | null; + /** + * Format: date-time + * @description The expiry for the token. This will only be present when the credential is a token. + */ + authorized_credential_expires_at?: string | null; + }; + /** + * Dependabot Secret for an Organization + * @description Secrets for GitHub Dependabot for an organization. + */ + "organization-dependabot-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** + * @description Visibility of a secret + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @example https://api.github.com/organizations/org/dependabot/secrets/my_secret/repositories + */ + selected_repositories_url?: string; + }; + /** + * DependabotPublicKey + * @description The public key used for setting Dependabot Secrets. + */ + "dependabot-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + }; + /** + * ExternalGroup + * @description Information about an external group's usage and its members + */ + "external-group": { + /** + * @description The internal ID of the group + * @example 1 + */ + group_id: number; + /** + * @description The display name for the group + * @example group-azuread-test + */ + group_name: string; + /** + * @description The date when the group was last updated_at + * @example 2021-01-03 22:27:15:000 -700 + */ + updated_at?: string; + /** + * @description An array of teams linked to this group + * @example [ + * { + * "team_id": 1, + * "team_name": "team-test" + * }, + * { + * "team_id": 2, + * "team_name": "team-test2" + * } + * ] + */ + teams: { + /** + * @description The id for a team + * @example 1 + */ + team_id: number; + /** + * @description The name of the team + * @example team-test + */ + team_name: string; + }[]; + /** + * @description An array of external members linked to this group + * @example [ + * { + * "member_id": 1, + * "member_login": "mona-lisa_eocsaxrs", + * "member_name": "Mona Lisa", + * "member_email": "mona_lisa@github.com" + * }, + * { + * "member_id": 2, + * "member_login": "octo-lisa_eocsaxrs", + * "member_name": "Octo Lisa", + * "member_email": "octo_lisa@github.com" + * } + * ] + */ + members: { + /** + * @description The internal user ID of the identity + * @example 1 + */ + member_id: number; + /** + * @description The handle/login for the user + * @example mona-lisa_eocsaxrs + */ + member_login: string; + /** + * @description The user display name/profile name + * @example Mona Lisa + */ + member_name: string; + /** + * @description An email attached to a user + * @example mona_lisa@github.com + */ + member_email: string; + }[]; + }; + /** + * ExternalGroups + * @description A list of external groups available to be connected to a team + */ + "external-groups": { + /** + * @description An array of external groups available to be mapped to a team + * @example [ + * { + * "group_id": 1, + * "group_name": "group-azuread-test", + * "updated_at": "2021-01-03 22:27:15:000 -700" + * }, + * { + * "group_id": 2, + * "group_name": "group-azuread-test2", + * "updated_at": "2021-06-03 22:27:15:000 -700" + * } + * ] + */ + groups?: { + /** + * @description The internal ID of the group + * @example 1 + */ + group_id: number; + /** + * @description The display name of the group + * @example group-azuread-test + */ + group_name: string; + /** + * @description The time of the last update for this group + * @example 2019-06-03 22:27:15:000 -700 + */ + updated_at: string; + }[]; + }; + /** + * Organization Invitation + * @description Organization Invitation + */ + "organization-invitation": { + id: number; + login: string | null; + email: string | null; + role: string; + created_at: string; + failed_at?: string | null; + failed_reason?: string | null; + inviter: components["schemas"]["simple-user"]; + team_count: number; + /** @example "MDIyOk9yZ2FuaXphdGlvbkludml0YXRpb24x" */ + node_id: string; + /** @example "https://api.github.com/organizations/16/invitations/1/teams" */ + invitation_teams_url: string; + }; + /** + * Org Hook + * @description Org Hook + */ + "org-hook": { + /** @example 1 */ + id: number; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/hooks/1 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/hooks/1/pings + */ + ping_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/hooks/1/deliveries + */ + deliveries_url?: string; + /** @example web */ + name: string; + /** + * @example [ + * "push", + * "pull_request" + * ] + */ + events: string[]; + /** @example true */ + active: boolean; + config: { + /** @example "http://example.com/2" */ + url?: string; + /** @example "0" */ + insecure_ssl?: string; + /** @example "form" */ + content_type?: string; + /** @example "********" */ + secret?: string; + }; + /** + * Format: date-time + * @example 2011-09-06T20:39:23Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-09-06T17:26:27Z + */ + created_at: string; + type: string; + }; + /** + * @description The type of GitHub user that can comment, open issues, or create pull requests while the interaction limit is in effect. + * @example collaborators_only + * @enum {string} + */ + "interaction-group": + | "existing_users" + | "contributors_only" + | "collaborators_only"; + /** + * Interaction Limits + * @description Interaction limit settings. + */ + "interaction-limit-response": { + limit: components["schemas"]["interaction-group"]; + /** @example repository */ + origin: string; + /** + * Format: date-time + * @example 2018-08-17T04:18:39Z + */ + expires_at: string; + }; + /** + * @description The duration of the interaction restriction. Default: `one_day`. + * @example one_month + * @enum {string} + */ + "interaction-expiry": + | "one_day" + | "three_days" + | "one_week" + | "one_month" + | "six_months"; + /** + * Interaction Restrictions + * @description Limit interactions to a specific type of user for a specified duration + */ + "interaction-limit": { + limit: components["schemas"]["interaction-group"]; + expiry?: components["schemas"]["interaction-expiry"]; + }; + /** + * Team Simple + * @description Groups of organization members that gives permissions on specified repositories. + */ + "nullable-team-simple": { + /** + * @description Unique identifier of the team + * @example 1 + */ + id: number; + /** @example MDQ6VGVhbTE= */ + node_id: string; + /** + * Format: uri + * @description URL for the team + * @example https://api.github.com/organizations/1/team/1 + */ + url: string; + /** @example https://api.github.com/organizations/1/team/1/members{/member} */ + members_url: string; + /** + * @description Name of the team + * @example Justice League + */ + name: string; + /** + * @description Description of the team + * @example A great team. + */ + description: string | null; + /** + * @description Permission that the team will have for its repositories + * @example admin + */ + permission: string; + /** + * @description The level of privacy this team should have + * @example closed + */ + privacy?: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/1/repos + */ + repositories_url: string; + /** @example justice-league */ + slug: string; + /** + * @description Distinguished Name (DN) that team maps to within LDAP environment + * @example uid=example,ou=users,dc=github,dc=com + */ + ldap_dn?: string; + } | null; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + id: number; + node_id: string; + name: string; + slug: string; + description: string | null; + privacy?: string; + permission: string; + permissions?: { + pull: boolean; + triage: boolean; + push: boolean; + maintain: boolean; + admin: boolean; + }; + /** Format: uri */ + url: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + members_url: string; + /** Format: uri */ + repositories_url: string; + parent: components["schemas"]["nullable-team-simple"]; + }; + /** + * Org Membership + * @description Org Membership + */ + "org-membership": { + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/memberships/defunkt + */ + url: string; + /** + * @description The state of the member in the organization. The `pending` state indicates the user has not yet accepted an invitation. + * @example active + * @enum {string} + */ + state: "active" | "pending"; + /** + * @description The user's membership type in the organization. + * @example admin + * @enum {string} + */ + role: "admin" | "member" | "billing_manager"; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat + */ + organization_url: string; + organization: components["schemas"]["organization-simple"]; + user: components["schemas"]["nullable-simple-user"]; + permissions?: { + can_create_repository: boolean; + }; + }; + /** + * Migration + * @description A migration. + */ + migration: { + /** @example 79 */ + id: number; + owner: components["schemas"]["nullable-simple-user"]; + /** @example 0b989ba4-242f-11e5-81e1-c7b6966d2516 */ + guid: string; + /** @example pending */ + state: string; + /** @example true */ + lock_repositories: boolean; + exclude_metadata: boolean; + exclude_git_data: boolean; + exclude_attachments: boolean; + exclude_releases: boolean; + exclude_owner_projects: boolean; + org_metadata_only: boolean; + repositories: components["schemas"]["repository"][]; + /** + * Format: uri + * @example https://api.github.com/orgs/octo-org/migrations/79 + */ + url: string; + /** + * Format: date-time + * @example 2015-07-06T15:33:38-07:00 + */ + created_at: string; + /** + * Format: date-time + * @example 2015-07-06T15:33:38-07:00 + */ + updated_at: string; + node_id: string; + /** Format: uri */ + archive_url?: string; + exclude?: unknown[]; + }; + /** + * Minimal Repository + * @description Minimal Repository + */ + "nullable-minimal-repository": { + /** @example 1296269 */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** @example Hello-World */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + owner: components["schemas"]["simple-user"]; + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + git_url?: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + ssh_url?: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + clone_url?: string; + mirror_url?: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + svn_url?: string; + homepage?: string | null; + language?: string | null; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at?: string | null; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + /** @example admin */ + role_name?: string; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string; + delete_branch_on_merge?: boolean; + subscribers_count?: number; + network_count?: number; + code_of_conduct?: components["schemas"]["code-of-conduct"]; + license?: { + key?: string; + name?: string; + spdx_id?: string; + url?: string; + node_id?: string; + } | null; + /** @example 0 */ + forks?: number; + /** @example 0 */ + open_issues?: number; + /** @example 0 */ + watchers?: number; + allow_forking?: boolean; + } | null; + /** + * Package + * @description A software package + */ + package: { + /** + * @description Unique identifier of the package. + * @example 1 + */ + id: number; + /** + * @description The name of the package. + * @example super-linter + */ + name: string; + /** + * @example docker + * @enum {string} + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** @example https://api.github.com/orgs/github/packages/container/super-linter */ + url: string; + /** @example https://github.com/orgs/github/packages/container/package/super-linter */ + html_url: string; + /** + * @description The number of versions of the package. + * @example 1 + */ + version_count: number; + /** + * @example private + * @enum {string} + */ + visibility: "private" | "public"; + owner?: components["schemas"]["nullable-simple-user"]; + repository?: components["schemas"]["nullable-minimal-repository"]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Package Version + * @description A version of a software package + */ + "package-version": { + /** + * @description Unique identifier of the package version. + * @example 1 + */ + id: number; + /** + * @description The name of the package version. + * @example latest + */ + name: string; + /** @example https://api.github.com/orgs/github/packages/container/super-linter/versions/786068 */ + url: string; + /** @example https://github.com/orgs/github/packages/container/package/super-linter */ + package_html_url: string; + /** @example https://github.com/orgs/github/packages/container/super-linter/786068 */ + html_url?: string; + /** @example MIT */ + license?: string; + description?: string; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + deleted_at?: string; + /** Package Version Metadata */ + metadata?: { + /** + * @example docker + * @enum {string} + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** Container Metadata */ + container?: { + tags: string[]; + }; + /** Docker Metadata */ + docker?: { + tag?: string[]; + } & { + tags: unknown; + }; + }; + }; + /** + * Project + * @description Projects are a way to organize columns and cards of work. + */ + project: { + /** + * Format: uri + * @example https://api.github.com/repos/api-playground/projects-test + */ + owner_url: string; + /** + * Format: uri + * @example https://api.github.com/projects/1002604 + */ + url: string; + /** + * Format: uri + * @example https://github.com/api-playground/projects-test/projects/12 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/projects/1002604/columns + */ + columns_url: string; + /** @example 1002604 */ + id: number; + /** @example MDc6UHJvamVjdDEwMDI2MDQ= */ + node_id: string; + /** + * @description Name of the project + * @example Week One Sprint + */ + name: string; + /** + * @description Body of the project + * @example This project represents the sprint of the first week in January + */ + body: string | null; + /** @example 1 */ + number: number; + /** + * @description State of the project; either 'open' or 'closed' + * @example open + */ + state: string; + creator: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * @description The baseline permission that all organization members have on this project. Only present if owner is an organization. + * @enum {string} + */ + organization_permission?: "read" | "write" | "admin" | "none"; + /** @description Whether or not this project can be seen by everyone. Only present if owner is an organization. */ + private?: boolean; + }; + /** + * GroupMapping + * @description External Groups to be mapped to a team for membership + */ + "group-mapping": { + /** + * @description Array of groups to be mapped to this team + * @example [ + * { + * "group_id": "111a1a11-aaa1-1aaa-11a1-a1a1a1a1a1aa", + * "group_name": "saml-azuread-test", + * "group_description": "A group of Developers working on AzureAD SAML SSO" + * }, + * { + * "group_id": "2bb2bb2b-bb22-22bb-2bb2-bb2bbb2bb2b2", + * "group_name": "saml-azuread-test2", + * "group_description": "Another group of Developers working on AzureAD SAML SSO" + * } + * ] + */ + groups?: { + /** + * @description The ID of the group + * @example 111a1a11-aaa1-1aaa-11a1-a1a1a1a1a1aa + */ + group_id: string; + /** + * @description The name of the group + * @example saml-azuread-test + */ + group_name: string; + /** + * @description a description of the group + * @example A group of Developers working on AzureAD SAML SSO + */ + group_description: string; + /** + * @description synchronization status for this group mapping + * @example unsynced + */ + status?: string; + /** + * @description the time of the last sync for this group-mapping + * @example 2019-06-03 22:27:15:000 -700 + */ + synced_at?: string | null; + }[]; + }; + /** + * Full Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + "team-full": { + /** + * @description Unique identifier of the team + * @example 42 + */ + id: number; + /** @example MDQ6VGVhbTE= */ + node_id: string; + /** + * Format: uri + * @description URL for the team + * @example https://api.github.com/organizations/1/team/1 + */ + url: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + /** + * @description Name of the team + * @example Developers + */ + name: string; + /** @example justice-league */ + slug: string; + /** @example A great team. */ + description: string | null; + /** + * @description The level of privacy this team should have + * @example closed + * @enum {string} + */ + privacy?: "closed" | "secret"; + /** + * @description Permission that the team will have for its repositories + * @example push + */ + permission: string; + /** @example https://api.github.com/organizations/1/team/1/members{/member} */ + members_url: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/1/repos + */ + repositories_url: string; + parent?: components["schemas"]["nullable-team-simple"]; + /** @example 3 */ + members_count: number; + /** @example 10 */ + repos_count: number; + /** + * Format: date-time + * @example 2017-07-14T16:53:42Z + */ + created_at: string; + /** + * Format: date-time + * @example 2017-08-17T12:37:15Z + */ + updated_at: string; + organization: components["schemas"]["organization-full"]; + /** + * @description Distinguished Name (DN) that team maps to within LDAP environment + * @example uid=example,ou=users,dc=github,dc=com + */ + ldap_dn?: string; + }; + /** + * Team Discussion + * @description A team discussion is a persistent record of a free-form conversation within a team. + */ + "team-discussion": { + author: components["schemas"]["nullable-simple-user"]; + /** + * @description The main text of the discussion. + * @example Please suggest improvements to our workflow in comments. + */ + body: string; + /** @example

Hi! This is an area for us to collaborate as a team

*/ + body_html: string; + /** + * @description The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server. + * @example 0307116bbf7ced493b8d8a346c650b71 + */ + body_version: string; + /** @example 0 */ + comments_count: number; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2343027/discussions/1/comments + */ + comments_url: string; + /** + * Format: date-time + * @example 2018-01-25T18:56:31Z + */ + created_at: string; + /** Format: date-time */ + last_edited_at: string | null; + /** + * Format: uri + * @example https://github.com/orgs/github/teams/justice-league/discussions/1 + */ + html_url: string; + /** @example MDE0OlRlYW1EaXNjdXNzaW9uMQ== */ + node_id: string; + /** + * @description The unique sequence number of a team discussion. + * @example 42 + */ + number: number; + /** + * @description Whether or not this discussion should be pinned for easy retrieval. + * @example true + */ + pinned: boolean; + /** + * @description Whether or not this discussion should be restricted to team members and organization administrators. + * @example true + */ + private: boolean; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2343027 + */ + team_url: string; + /** + * @description The title of the discussion. + * @example How can we improve our workflow? + */ + title: string; + /** + * Format: date-time + * @example 2018-01-25T18:56:31Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2343027/discussions/1 + */ + url: string; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Team Discussion Comment + * @description A reply to a discussion within a team. + */ + "team-discussion-comment": { + author: components["schemas"]["nullable-simple-user"]; + /** + * @description The main text of the comment. + * @example I agree with this suggestion. + */ + body: string; + /** @example

Do you like apples?

*/ + body_html: string; + /** + * @description The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server. + * @example 0307116bbf7ced493b8d8a346c650b71 + */ + body_version: string; + /** + * Format: date-time + * @example 2018-01-15T23:53:58Z + */ + created_at: string; + /** Format: date-time */ + last_edited_at: string | null; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2403582/discussions/1 + */ + discussion_url: string; + /** + * Format: uri + * @example https://github.com/orgs/github/teams/justice-league/discussions/1/comments/1 + */ + html_url: string; + /** @example MDIxOlRlYW1EaXNjdXNzaW9uQ29tbWVudDE= */ + node_id: string; + /** + * @description The unique sequence number of a team discussion comment. + * @example 42 + */ + number: number; + /** + * Format: date-time + * @example 2018-01-15T23:53:58Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2403582/discussions/1/comments/1 + */ + url: string; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Reaction + * @description Reactions to conversations provide a way to help people express their feelings more simply and effectively. + */ + reaction: { + /** @example 1 */ + id: number; + /** @example MDg6UmVhY3Rpb24x */ + node_id: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description The reaction to use + * @example heart + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** + * Format: date-time + * @example 2016-05-20T20:09:31Z + */ + created_at: string; + }; + /** + * Team Membership + * @description Team Membership + */ + "team-membership": { + /** Format: uri */ + url: string; + /** + * @description The role of the user in the team. + * @default member + * @example member + * @enum {string} + */ + role: "member" | "maintainer"; + /** + * @description The state of the user's membership in the team. + * @enum {string} + */ + state: "active" | "pending"; + }; + /** + * Team Project + * @description A team's access to a project. + */ + "team-project": { + owner_url: string; + url: string; + html_url: string; + columns_url: string; + id: number; + node_id: string; + name: string; + body: string | null; + number: number; + state: string; + creator: components["schemas"]["simple-user"]; + created_at: string; + updated_at: string; + /** @description The organization permission for this project. Only present when owner is an organization. */ + organization_permission?: string; + /** @description Whether the project is private or not. Only present when owner is an organization. */ + private?: boolean; + permissions: { + read: boolean; + write: boolean; + admin: boolean; + }; + }; + /** + * Team Repository + * @description A team's access to a repository. + */ + "team-repository": { + /** + * @description Unique identifier of the repository + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + license: components["schemas"]["nullable-license-simple"]; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + }; + /** @example admin */ + role_name?: string; + owner: components["schemas"]["nullable-simple-user"]; + /** + * @description Whether the repository is private or public. + * @default false + */ + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** @example 108 */ + size: number; + /** + * @description The default branch of the repository. + * @example master + */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + topics?: string[]; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki: boolean; + has_pages: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads: boolean; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @default public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string | null; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow forking this repo + * @default false + * @example false + */ + allow_forking?: boolean; + subscribers_count?: number; + network_count?: number; + open_issues: number; + watchers: number; + master_branch?: string; + }; + /** + * Project Card + * @description Project cards represent a scope of work. + */ + "project-card": { + /** + * Format: uri + * @example https://api.github.com/projects/columns/cards/1478 + */ + url: string; + /** + * @description The project card's ID + * @example 42 + */ + id: number; + /** @example MDExOlByb2plY3RDYXJkMTQ3OA== */ + node_id: string; + /** @example Add payload for delete Project column */ + note: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2016-09-05T14:21:06Z + */ + created_at: string; + /** + * Format: date-time + * @example 2016-09-05T14:20:22Z + */ + updated_at: string; + /** + * @description Whether or not the card is archived + * @example false + */ + archived?: boolean; + column_name?: string; + project_id?: string; + /** + * Format: uri + * @example https://api.github.com/projects/columns/367 + */ + column_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/api-playground/projects-test/issues/3 + */ + content_url?: string; + /** + * Format: uri + * @example https://api.github.com/projects/120 + */ + project_url: string; + }; + /** + * Project Column + * @description Project columns contain cards of work. + */ + "project-column": { + /** + * Format: uri + * @example https://api.github.com/projects/columns/367 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/projects/120 + */ + project_url: string; + /** + * Format: uri + * @example https://api.github.com/projects/columns/367/cards + */ + cards_url: string; + /** + * @description The unique identifier of the project column + * @example 42 + */ + id: number; + /** @example MDEzOlByb2plY3RDb2x1bW4zNjc= */ + node_id: string; + /** + * @description Name of the project column + * @example Remaining tasks + */ + name: string; + /** + * Format: date-time + * @example 2016-09-05T14:18:44Z + */ + created_at: string; + /** + * Format: date-time + * @example 2016-09-05T14:22:28Z + */ + updated_at: string; + }; + /** + * Project Collaborator Permission + * @description Project Collaborator Permission + */ + "project-collaborator-permission": { + permission: string; + user: components["schemas"]["nullable-simple-user"]; + }; + /** Rate Limit */ + "rate-limit": { + limit: number; + remaining: number; + reset: number; + used: number; + }; + /** + * Rate Limit Overview + * @description Rate Limit Overview + */ + "rate-limit-overview": { + resources: { + core: components["schemas"]["rate-limit"]; + graphql?: components["schemas"]["rate-limit"]; + search: components["schemas"]["rate-limit"]; + source_import?: components["schemas"]["rate-limit"]; + integration_manifest?: components["schemas"]["rate-limit"]; + code_scanning_upload?: components["schemas"]["rate-limit"]; + actions_runner_registration?: components["schemas"]["rate-limit"]; + scim?: components["schemas"]["rate-limit"]; + dependency_snapshots?: components["schemas"]["rate-limit"]; + }; + rate: components["schemas"]["rate-limit"]; + }; + /** + * Code Of Conduct Simple + * @description Code of Conduct Simple + */ + "code-of-conduct-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/github/docs/community/code_of_conduct + */ + url: string; + /** @example citizen_code_of_conduct */ + key: string; + /** @example Citizen Code of Conduct */ + name: string; + /** + * Format: uri + * @example https://github.com/github/docs/blob/main/CODE_OF_CONDUCT.md + */ + html_url: string | null; + }; + "security-and-analysis": { + advanced_security?: { + /** @enum {string} */ + status?: "enabled" | "disabled"; + }; + secret_scanning?: { + /** @enum {string} */ + status?: "enabled" | "disabled"; + }; + secret_scanning_push_protection?: { + /** @enum {string} */ + status?: "enabled" | "disabled"; + }; + } | null; + /** + * Full Repository + * @description Full Repository + */ + "full-repository": { + /** @example 1296269 */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** @example Hello-World */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + owner: components["schemas"]["simple-user"]; + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** @example 108 */ + size: number; + /** @example master */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** @example true */ + is_template?: boolean; + /** + * @example [ + * "octocat", + * "atom", + * "electron", + * "API" + * ] + */ + topics?: string[]; + /** @example true */ + has_issues: boolean; + /** @example true */ + has_projects: boolean; + /** @example true */ + has_wiki: boolean; + has_pages: boolean; + /** @example true */ + has_downloads: boolean; + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @example public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + /** @example true */ + allow_rebase_merge?: boolean; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string | null; + /** @example true */ + allow_squash_merge?: boolean; + /** @example false */ + allow_auto_merge?: boolean; + /** @example false */ + delete_branch_on_merge?: boolean; + /** @example true */ + allow_merge_commit?: boolean; + /** @example true */ + allow_update_branch?: boolean; + /** @example false */ + use_squash_pr_title_as_default?: boolean; + /** @example true */ + allow_forking?: boolean; + /** @example 42 */ + subscribers_count: number; + /** @example 0 */ + network_count: number; + license: components["schemas"]["nullable-license-simple"]; + organization?: components["schemas"]["nullable-simple-user"]; + parent?: components["schemas"]["repository"]; + source?: components["schemas"]["repository"]; + forks: number; + master_branch?: string; + open_issues: number; + watchers: number; + /** + * @description Whether anonymous git access is allowed. + * @default true + */ + anonymous_access_enabled?: boolean; + code_of_conduct?: components["schemas"]["code-of-conduct-simple"]; + security_and_analysis?: components["schemas"]["security-and-analysis"]; + }; + /** + * Artifact + * @description An artifact + */ + artifact: { + /** @example 5 */ + id: number; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id: string; + /** + * @description The name of the artifact. + * @example AdventureWorks.Framework + */ + name: string; + /** + * @description The size in bytes of the artifact. + * @example 12345 + */ + size_in_bytes: number; + /** @example https://api.github.com/repos/github/hello-world/actions/artifacts/5 */ + url: string; + /** @example https://api.github.com/repos/github/hello-world/actions/artifacts/5/zip */ + archive_download_url: string; + /** @description Whether or not the artifact has expired. */ + expired: boolean; + /** Format: date-time */ + created_at: string | null; + /** Format: date-time */ + expires_at: string | null; + /** Format: date-time */ + updated_at: string | null; + workflow_run?: { + /** @example 10 */ + id?: number; + /** @example 42 */ + repository_id?: number; + /** @example 42 */ + head_repository_id?: number; + /** @example main */ + head_branch?: string; + /** @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d */ + head_sha?: string; + } | null; + }; + /** + * Repository actions caches + * @description Repository actions caches + */ + "actions-cache-list": { + /** + * @description Total number of caches + * @example 2 + */ + total_count: number; + /** @description Array of caches */ + actions_caches: { + /** @example 2 */ + id?: number; + /** @example refs/heads/main */ + ref?: string; + /** @example Linux-node-958aff96db2d75d67787d1e634ae70b659de937b */ + key?: string; + /** @example 73885106f58cc52a7df9ec4d4a5622a5614813162cb516c759a30af6bf56e6f0 */ + version?: string; + /** + * Format: date-time + * @example 2019-01-24T22:45:36.000Z + */ + last_accessed_at?: string; + /** + * Format: date-time + * @example 2019-01-24T22:45:36.000Z + */ + created_at?: string; + /** @example 1024 */ + size_in_bytes?: number; + }[]; + }; + /** + * Job + * @description Information of a job execution in a workflow run + */ + job: { + /** + * @description The id of the job. + * @example 21 + */ + id: number; + /** + * @description The id of the associated workflow run. + * @example 5 + */ + run_id: number; + /** @example https://api.github.com/repos/github/hello-world/actions/runs/5 */ + run_url: string; + /** + * @description Attempt number of the associated workflow run, 1 for first attempt and higher if the workflow was re-run. + * @example 1 + */ + run_attempt?: number; + /** @example MDg6Q2hlY2tSdW40 */ + node_id: string; + /** + * @description The SHA of the commit that is being run. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** @example https://api.github.com/repos/github/hello-world/actions/jobs/21 */ + url: string; + /** @example https://github.com/github/hello-world/runs/4 */ + html_url: string | null; + /** + * @description The phase of the lifecycle that the job is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** + * @description The outcome of the job. + * @example success + */ + conclusion: string | null; + /** + * Format: date-time + * @description The time that the job started, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + started_at: string; + /** + * Format: date-time + * @description The time that the job finished, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + completed_at: string | null; + /** + * @description The name of the job. + * @example test-coverage + */ + name: string; + /** @description Steps in this job. */ + steps?: { + /** + * @description The phase of the lifecycle that the job is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** + * @description The outcome of the job. + * @example success + */ + conclusion: string | null; + /** + * @description The name of the job. + * @example test-coverage + */ + name: string; + /** @example 1 */ + number: number; + /** + * Format: date-time + * @description The time that the step started, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + started_at?: string | null; + /** + * Format: date-time + * @description The time that the job finished, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + completed_at?: string | null; + }[]; + /** @example https://api.github.com/repos/github/hello-world/check-runs/4 */ + check_run_url: string; + /** + * @description Labels for the workflow job. Specified by the "runs_on" attribute in the action's workflow file. + * @example [ + * "self-hosted", + * "foo", + * "bar" + * ] + */ + labels: string[]; + /** + * @description The ID of the runner to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example 1 + */ + runner_id: number | null; + /** + * @description The name of the runner to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example my runner + */ + runner_name: string | null; + /** + * @description The ID of the runner group to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example 2 + */ + runner_group_id: number | null; + /** + * @description The name of the runner group to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example my runner group + */ + runner_group_name: string | null; + }; + /** + * The json payload enables/disables the use of sub claim customization + * @description OIDC Customer Subject + */ + "opt-out-oidc-custom-sub": { + use_default: boolean; + }; + /** @description Whether GitHub Actions is enabled on the repository. */ + "actions-enabled": boolean; + "actions-repository-permissions": { + enabled: components["schemas"]["actions-enabled"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + selected_actions_url?: components["schemas"]["selected-actions-url"]; + }; + "actions-workflow-access-to-repository": { + /** + * @description Defines the level of access that workflows outside of the repository have to actions and reusable workflows within the + * repository. `none` means access is only possible from workflows in this repository. + * @enum {string} + */ + access_level: "none" | "organization" | "enterprise"; + }; + /** + * Referenced workflow + * @description A workflow referenced/reused by the initial caller workflow + */ + "referenced-workflow": { + path: string; + sha: string; + ref?: string; + }; + /** Pull Request Minimal */ + "pull-request-minimal": { + id: number; + number: number; + url: string; + head: { + ref: string; + sha: string; + repo: { + id: number; + url: string; + name: string; + }; + }; + base: { + ref: string; + sha: string; + repo: { + id: number; + url: string; + name: string; + }; + }; + }; + /** + * Simple Commit + * @description Simple Commit + */ + "nullable-simple-commit": { + id: string; + tree_id: string; + message: string; + /** Format: date-time */ + timestamp: string; + author: { + name: string; + email: string; + } | null; + committer: { + name: string; + email: string; + } | null; + } | null; + /** + * Workflow Run + * @description An invocation of a workflow + */ + "workflow-run": { + /** + * @description The ID of the workflow run. + * @example 5 + */ + id: number; + /** + * @description The name of the workflow run. + * @example Build + */ + name?: string | null; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id: string; + /** + * @description The ID of the associated check suite. + * @example 42 + */ + check_suite_id?: number; + /** + * @description The node ID of the associated check suite. + * @example MDEwOkNoZWNrU3VpdGU0Mg== + */ + check_suite_node_id?: string; + /** @example master */ + head_branch: string | null; + /** + * @description The SHA of the head commit that points to the version of the workflow being run. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** + * @description The full path of the workflow + * @example octocat/octo-repo/.github/workflows/ci.yml@main + */ + path: string; + /** + * @description The auto incrementing run number for the workflow run. + * @example 106 + */ + run_number: number; + /** + * @description Attempt number of the run, 1 for first attempt and higher if the workflow was re-run. + * @example 1 + */ + run_attempt?: number; + referenced_workflows?: + | components["schemas"]["referenced-workflow"][] + | null; + /** @example push */ + event: string; + /** @example completed */ + status: string | null; + /** @example neutral */ + conclusion: string | null; + /** + * @description The ID of the parent workflow. + * @example 5 + */ + workflow_id: number; + /** + * @description The URL to the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5 + */ + url: string; + /** @example https://github.com/github/hello-world/suites/4 */ + html_url: string; + pull_requests: components["schemas"]["pull-request-minimal"][] | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + actor?: components["schemas"]["simple-user"]; + triggering_actor?: components["schemas"]["simple-user"]; + /** + * Format: date-time + * @description The start time of the latest run. Resets on re-run. + */ + run_started_at?: string; + /** + * @description The URL to the jobs for the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/jobs + */ + jobs_url: string; + /** + * @description The URL to download the logs for the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/logs + */ + logs_url: string; + /** + * @description The URL to the associated check suite. + * @example https://api.github.com/repos/github/hello-world/check-suites/12 + */ + check_suite_url: string; + /** + * @description The URL to the artifacts for the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/rerun/artifacts + */ + artifacts_url: string; + /** + * @description The URL to cancel the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/cancel + */ + cancel_url: string; + /** + * @description The URL to rerun the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/rerun + */ + rerun_url: string; + /** + * @description The URL to the previous attempted run of this workflow, if one exists. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/attempts/3 + */ + previous_attempt_url?: string | null; + /** + * @description The URL to the workflow. + * @example https://api.github.com/repos/github/hello-world/actions/workflows/main.yaml + */ + workflow_url: string; + head_commit: components["schemas"]["nullable-simple-commit"]; + repository: components["schemas"]["minimal-repository"]; + head_repository: components["schemas"]["minimal-repository"]; + /** @example 5 */ + head_repository_id?: number; + }; + /** + * Environment Approval + * @description An entry in the reviews log for environment deployments + */ + "environment-approvals": { + /** @description The list of environments that were approved or rejected */ + environments: { + /** + * @description The id of the environment. + * @example 56780428 + */ + id?: number; + /** @example MDExOkVudmlyb25tZW50NTY3ODA0Mjg= */ + node_id?: string; + /** + * @description The name of the environment. + * @example staging + */ + name?: string; + /** @example https://api.github.com/repos/github/hello-world/environments/staging */ + url?: string; + /** @example https://github.com/github/hello-world/deployments/activity_log?environments_filter=staging */ + html_url?: string; + /** + * Format: date-time + * @description The time that the environment was created, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + created_at?: string; + /** + * Format: date-time + * @description The time that the environment was last updated, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + updated_at?: string; + }[]; + /** + * @description Whether deployment to the environment(s) was approved or rejected + * @example approved + * @enum {string} + */ + state: "approved" | "rejected"; + user: components["schemas"]["simple-user"]; + /** + * @description The comment submitted with the deployment review + * @example Ship it! + */ + comment: string; + }; + /** + * @description The type of reviewer. + * @example User + * @enum {string} + */ + "deployment-reviewer-type": "User" | "Team"; + /** + * Pending Deployment + * @description Details of a deployment that is waiting for protection rules to pass + */ + "pending-deployment": { + environment: { + /** + * @description The id of the environment. + * @example 56780428 + */ + id?: number; + /** @example MDExOkVudmlyb25tZW50NTY3ODA0Mjg= */ + node_id?: string; + /** + * @description The name of the environment. + * @example staging + */ + name?: string; + /** @example https://api.github.com/repos/github/hello-world/environments/staging */ + url?: string; + /** @example https://github.com/github/hello-world/deployments/activity_log?environments_filter=staging */ + html_url?: string; + }; + /** + * @description The set duration of the wait timer + * @example 30 + */ + wait_timer: number; + /** + * Format: date-time + * @description The time that the wait timer began. + * @example 2020-11-23T22:00:40Z + */ + wait_timer_started_at: string | null; + /** + * @description Whether the currently authenticated user can approve the deployment + * @example true + */ + current_user_can_approve: boolean; + /** @description The people or teams that may approve jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ + reviewers: { + type?: components["schemas"]["deployment-reviewer-type"]; + reviewer?: Partial & + Partial; + }[]; + }; + /** + * Deployment + * @description A request for a specific ref(branch,sha,tag) to be deployed + */ + deployment: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1 + */ + url: string; + /** + * @description Unique identifier of the deployment + * @example 42 + */ + id: number; + /** @example MDEwOkRlcGxveW1lbnQx */ + node_id: string; + /** @example a84d88e7554fc1fa21bcbc4efae3c782a70d2b9d */ + sha: string; + /** + * @description The ref to deploy. This can be a branch, tag, or sha. + * @example topic-branch + */ + ref: string; + /** + * @description Parameter to specify a task to execute + * @example deploy + */ + task: string; + payload: { [key: string]: unknown } | string; + /** @example staging */ + original_environment?: string; + /** + * @description Name for the target deployment environment. + * @example production + */ + environment: string; + /** @example Deploy request from hubot */ + description: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + created_at: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1/statuses + */ + statuses_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + /** + * @description Specifies if the given environment is will no longer exist at some point in the future. Default: false. + * @example true + */ + transient_environment?: boolean; + /** + * @description Specifies if the given environment is one that end-users directly interact with. Default: false. + * @example true + */ + production_environment?: boolean; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * Workflow Run Usage + * @description Workflow Run Usage + */ + "workflow-run-usage": { + billable: { + UBUNTU?: { + total_ms: number; + jobs: number; + job_runs?: { + job_id: number; + duration_ms: number; + }[]; + }; + MACOS?: { + total_ms: number; + jobs: number; + job_runs?: { + job_id: number; + duration_ms: number; + }[]; + }; + WINDOWS?: { + total_ms: number; + jobs: number; + job_runs?: { + job_id: number; + duration_ms: number; + }[]; + }; + }; + run_duration_ms?: number; + }; + /** + * Actions Secret + * @description Set secrets for GitHub Actions. + */ + "actions-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Workflow + * @description A GitHub Actions workflow + */ + workflow: { + /** @example 5 */ + id: number; + /** @example MDg6V29ya2Zsb3cxMg== */ + node_id: string; + /** @example CI */ + name: string; + /** @example ruby.yaml */ + path: string; + /** + * @example active + * @enum {string} + */ + state: + | "active" + | "deleted" + | "disabled_fork" + | "disabled_inactivity" + | "disabled_manually"; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + created_at: string; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + updated_at: string; + /** @example https://api.github.com/repos/actions/setup-ruby/workflows/5 */ + url: string; + /** @example https://github.com/actions/setup-ruby/blob/master/.github/workflows/ruby.yaml */ + html_url: string; + /** @example https://github.com/actions/setup-ruby/workflows/CI/badge.svg */ + badge_url: string; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + deleted_at?: string; + }; + /** + * Workflow Usage + * @description Workflow Usage + */ + "workflow-usage": { + billable: { + UBUNTU?: { + total_ms?: number; + }; + MACOS?: { + total_ms?: number; + }; + WINDOWS?: { + total_ms?: number; + }; + }; + }; + /** + * Autolink reference + * @description An autolink reference. + */ + autolink: { + /** @example 3 */ + id: number; + /** + * @description The prefix of a key that is linkified. + * @example TICKET- + */ + key_prefix: string; + /** + * @description A template for the target URL that is generated if a key was found. + * @example https://example.com/TICKET?query= + */ + url_template: string; + /** @description Whether this autolink reference matches alphanumeric characters. If false, this autolink reference is a legacy autolink that only matches numeric characters. */ + is_alphanumeric?: boolean; + }; + /** + * Protected Branch Required Status Check + * @description Protected Branch Required Status Check + */ + "protected-branch-required-status-check": { + url?: string; + enforcement_level?: string; + contexts: string[]; + checks: { + context: string; + app_id: number | null; + }[]; + contexts_url?: string; + strict?: boolean; + }; + /** + * Protected Branch Admin Enforced + * @description Protected Branch Admin Enforced + */ + "protected-branch-admin-enforced": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/enforce_admins + */ + url: string; + /** @example true */ + enabled: boolean; + }; + /** + * Protected Branch Pull Request Review + * @description Protected Branch Pull Request Review + */ + "protected-branch-pull-request-review": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/dismissal_restrictions + */ + url?: string; + dismissal_restrictions?: { + /** @description The list of users with review dismissal access. */ + users?: components["schemas"]["simple-user"][]; + /** @description The list of teams with review dismissal access. */ + teams?: components["schemas"]["team"][]; + /** @description The list of apps with review dismissal access. */ + apps?: components["schemas"]["integration"][]; + /** @example "https://api.github.com/repos/the-org/an-org-repo/branches/master/protection/dismissal_restrictions" */ + url?: string; + /** @example "https://api.github.com/repos/the-org/an-org-repo/branches/master/protection/dismissal_restrictions/users" */ + users_url?: string; + /** @example "https://api.github.com/repos/the-org/an-org-repo/branches/master/protection/dismissal_restrictions/teams" */ + teams_url?: string; + }; + /** @description Allow specific users, teams, or apps to bypass pull request requirements. */ + bypass_pull_request_allowances?: { + /** @description The list of users allowed to bypass pull request requirements. */ + users?: components["schemas"]["simple-user"][]; + /** @description The list of teams allowed to bypass pull request requirements. */ + teams?: components["schemas"]["team"][]; + /** @description The list of apps allowed to bypass pull request requirements. */ + apps?: components["schemas"]["integration"][]; + }; + /** @example true */ + dismiss_stale_reviews: boolean; + /** @example true */ + require_code_owner_reviews: boolean; + /** @example 2 */ + required_approving_review_count?: number; + }; + /** + * Branch Restriction Policy + * @description Branch Restriction Policy + */ + "branch-restriction-policy": { + /** Format: uri */ + url: string; + /** Format: uri */ + users_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri */ + apps_url: string; + users: { + login?: string; + id?: number; + node_id?: string; + avatar_url?: string; + gravatar_id?: string; + url?: string; + html_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + subscriptions_url?: string; + organizations_url?: string; + repos_url?: string; + events_url?: string; + received_events_url?: string; + type?: string; + site_admin?: boolean; + }[]; + teams: { + id?: number; + node_id?: string; + url?: string; + html_url?: string; + name?: string; + slug?: string; + description?: string | null; + privacy?: string; + permission?: string; + members_url?: string; + repositories_url?: string; + parent?: string | null; + }[]; + apps: { + id?: number; + slug?: string; + node_id?: string; + owner?: { + login?: string; + id?: number; + node_id?: string; + url?: string; + repos_url?: string; + events_url?: string; + hooks_url?: string; + issues_url?: string; + members_url?: string; + public_members_url?: string; + avatar_url?: string; + description?: string; + /** @example "" */ + gravatar_id?: string; + /** @example "https://github.com/testorg-ea8ec76d71c3af4b" */ + html_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/followers" */ + followers_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/following{/other_user}" */ + following_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/gists{/gist_id}" */ + gists_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/starred{/owner}{/repo}" */ + starred_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/subscriptions" */ + subscriptions_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/orgs" */ + organizations_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/received_events" */ + received_events_url?: string; + /** @example "Organization" */ + type?: string; + /** @example false */ + site_admin?: boolean; + }; + name?: string; + description?: string; + external_url?: string; + html_url?: string; + created_at?: string; + updated_at?: string; + permissions?: { + metadata?: string; + contents?: string; + issues?: string; + single_file?: string; + }; + events?: string[]; + }[]; + }; + /** + * Branch Protection + * @description Branch Protection + */ + "branch-protection": { + url?: string; + enabled?: boolean; + required_status_checks?: components["schemas"]["protected-branch-required-status-check"]; + enforce_admins?: components["schemas"]["protected-branch-admin-enforced"]; + required_pull_request_reviews?: components["schemas"]["protected-branch-pull-request-review"]; + restrictions?: components["schemas"]["branch-restriction-policy"]; + required_linear_history?: { + enabled?: boolean; + }; + allow_force_pushes?: { + enabled?: boolean; + }; + allow_deletions?: { + enabled?: boolean; + }; + block_creations?: { + enabled?: boolean; + }; + required_conversation_resolution?: { + enabled?: boolean; + }; + /** @example "branch/with/protection" */ + name?: string; + /** @example "https://api.github.com/repos/owner-79e94e2d36b3fd06a32bb213/AAA_Public_Repo/branches/branch/with/protection/protection" */ + protection_url?: string; + required_signatures?: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_signatures + */ + url: string; + /** @example true */ + enabled: boolean; + }; + }; + /** + * Short Branch + * @description Short Branch + */ + "short-branch": { + name: string; + commit: { + sha: string; + /** Format: uri */ + url: string; + }; + protected: boolean; + protection?: components["schemas"]["branch-protection"]; + /** Format: uri */ + protection_url?: string; + }; + /** + * Git User + * @description Metaproperties for Git author/committer information. + */ + "nullable-git-user": { + /** @example "Chris Wanstrath" */ + name?: string; + /** @example "chris@ozmm.org" */ + email?: string; + /** @example "2007-10-29T02:42:39.000-07:00" */ + date?: string; + } | null; + /** Verification */ + verification: { + verified: boolean; + reason: string; + payload: string | null; + signature: string | null; + }; + /** + * Diff Entry + * @description Diff Entry + */ + "diff-entry": { + /** @example bbcd538c8e72b8c175046e27cc8f907076331401 */ + sha: string; + /** @example file1.txt */ + filename: string; + /** + * @example added + * @enum {string} + */ + status: + | "added" + | "removed" + | "modified" + | "renamed" + | "copied" + | "changed" + | "unchanged"; + /** @example 103 */ + additions: number; + /** @example 21 */ + deletions: number; + /** @example 124 */ + changes: number; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/blob/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt + */ + blob_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/raw/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt + */ + raw_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/contents/file1.txt?ref=6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + contents_url: string; + /** @example @@ -132,7 +132,7 @@ module Test @@ -1000,7 +1000,7 @@ module Test */ + patch?: string; + /** @example file.txt */ + previous_filename?: string; + }; + /** + * Commit + * @description Commit + */ + commit: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + url: string; + /** @example 6dcb09b5b57875f334f61aebed695e2e4193db5e */ + sha: string; + /** @example MDY6Q29tbWl0NmRjYjA5YjViNTc4NzVmMzM0ZjYxYWViZWQ2OTVlMmU0MTkzZGI1ZQ== */ + node_id: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/commit/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e/comments + */ + comments_url: string; + commit: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + url: string; + author: components["schemas"]["nullable-git-user"]; + committer: components["schemas"]["nullable-git-user"]; + /** @example Fix all the bugs */ + message: string; + /** @example 0 */ + comment_count: number; + tree: { + /** @example 827efc6d56897b048c772eb4087f854f46256132 */ + sha: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/tree/827efc6d56897b048c772eb4087f854f46256132 + */ + url: string; + }; + verification?: components["schemas"]["verification"]; + }; + author: components["schemas"]["nullable-simple-user"]; + committer: components["schemas"]["nullable-simple-user"]; + parents: { + /** @example 7638417db6d59f3c431d3e1f261cc637155684cd */ + sha: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/7638417db6d59f3c431d3e1f261cc637155684cd + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/commit/7638417db6d59f3c431d3e1f261cc637155684cd + */ + html_url?: string; + }[]; + stats?: { + additions?: number; + deletions?: number; + total?: number; + }; + files?: components["schemas"]["diff-entry"][]; + }; + /** + * Branch With Protection + * @description Branch With Protection + */ + "branch-with-protection": { + name: string; + commit: components["schemas"]["commit"]; + _links: { + html: string; + /** Format: uri */ + self: string; + }; + protected: boolean; + protection: components["schemas"]["branch-protection"]; + /** Format: uri */ + protection_url: string; + /** @example "mas*" */ + pattern?: string; + /** @example 1 */ + required_approving_review_count?: number; + }; + /** + * Status Check Policy + * @description Status Check Policy + */ + "status-check-policy": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_status_checks + */ + url: string; + /** @example true */ + strict: boolean; + /** + * @example [ + * "continuous-integration/travis-ci" + * ] + */ + contexts: string[]; + checks: { + /** @example continuous-integration/travis-ci */ + context: string; + app_id: number | null; + }[]; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_status_checks/contexts + */ + contexts_url: string; + }; + /** + * Protected Branch + * @description Branch protections protect branches + */ + "protected-branch": { + /** Format: uri */ + url: string; + required_status_checks?: components["schemas"]["status-check-policy"]; + required_pull_request_reviews?: { + /** Format: uri */ + url: string; + dismiss_stale_reviews?: boolean; + require_code_owner_reviews?: boolean; + required_approving_review_count?: number; + dismissal_restrictions?: { + /** Format: uri */ + url: string; + /** Format: uri */ + users_url: string; + /** Format: uri */ + teams_url: string; + users: components["schemas"]["simple-user"][]; + teams: components["schemas"]["team"][]; + apps?: components["schemas"]["integration"][]; + }; + bypass_pull_request_allowances?: { + users: components["schemas"]["simple-user"][]; + teams: components["schemas"]["team"][]; + apps?: components["schemas"]["integration"][]; + }; + }; + required_signatures?: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_signatures + */ + url: string; + /** @example true */ + enabled: boolean; + }; + enforce_admins?: { + /** Format: uri */ + url: string; + enabled: boolean; + }; + required_linear_history?: { + enabled: boolean; + }; + allow_force_pushes?: { + enabled: boolean; + }; + allow_deletions?: { + enabled: boolean; + }; + restrictions?: components["schemas"]["branch-restriction-policy"]; + required_conversation_resolution?: { + enabled?: boolean; + }; + block_creations?: { + enabled: boolean; + }; + }; + /** + * Deployment + * @description A deployment created as the result of an Actions check run from a workflow that references an environment + */ + "deployment-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1 + */ + url: string; + /** + * @description Unique identifier of the deployment + * @example 42 + */ + id: number; + /** @example MDEwOkRlcGxveW1lbnQx */ + node_id: string; + /** + * @description Parameter to specify a task to execute + * @example deploy + */ + task: string; + /** @example staging */ + original_environment?: string; + /** + * @description Name for the target deployment environment. + * @example production + */ + environment: string; + /** @example Deploy request from hubot */ + description: string | null; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + created_at: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1/statuses + */ + statuses_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + /** + * @description Specifies if the given environment is will no longer exist at some point in the future. Default: false. + * @example true + */ + transient_environment?: boolean; + /** + * @description Specifies if the given environment is one that end-users directly interact with. Default: false. + * @example true + */ + production_environment?: boolean; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * CheckRun + * @description A check performed on the code of a given code change + */ + "check-run": { + /** + * @description The id of the check. + * @example 21 + */ + id: number; + /** + * @description The SHA of the commit that is being checked. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** @example MDg6Q2hlY2tSdW40 */ + node_id: string; + /** @example 42 */ + external_id: string | null; + /** @example https://api.github.com/repos/github/hello-world/check-runs/4 */ + url: string; + /** @example https://github.com/github/hello-world/runs/4 */ + html_url: string | null; + /** @example https://example.com */ + details_url: string | null; + /** + * @description The phase of the lifecycle that the check is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** + * @example neutral + * @enum {string|null} + */ + conclusion: + | ( + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + ) + | null; + /** + * Format: date-time + * @example 2018-05-04T01:14:52Z + */ + started_at: string | null; + /** + * Format: date-time + * @example 2018-05-04T01:14:52Z + */ + completed_at: string | null; + output: { + title: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + /** Format: uri */ + annotations_url: string; + }; + /** + * @description The name of the check. + * @example test-coverage + */ + name: string; + check_suite: { + id: number; + } | null; + app: components["schemas"]["nullable-integration"]; + pull_requests: components["schemas"]["pull-request-minimal"][]; + deployment?: components["schemas"]["deployment-simple"]; + }; + /** + * Check Annotation + * @description Check Annotation + */ + "check-annotation": { + /** @example README.md */ + path: string; + /** @example 2 */ + start_line: number; + /** @example 2 */ + end_line: number; + /** @example 5 */ + start_column: number | null; + /** @example 10 */ + end_column: number | null; + /** @example warning */ + annotation_level: string | null; + /** @example Spell Checker */ + title: string | null; + /** @example Check your spelling for 'banaas'. */ + message: string | null; + /** @example Do you mean 'bananas' or 'banana'? */ + raw_details: string | null; + blob_href: string; + }; + /** + * Simple Commit + * @description Simple Commit + */ + "simple-commit": { + id: string; + tree_id: string; + message: string; + /** Format: date-time */ + timestamp: string; + author: { + name: string; + email: string; + } | null; + committer: { + name: string; + email: string; + } | null; + }; + /** + * CheckSuite + * @description A suite of checks performed on the code of a given code change + */ + "check-suite": { + /** @example 5 */ + id: number; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id: string; + /** @example master */ + head_branch: string | null; + /** + * @description The SHA of the head commit that is being checked. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** + * @example completed + * @enum {string|null} + */ + status: ("queued" | "in_progress" | "completed") | null; + /** + * @example neutral + * @enum {string|null} + */ + conclusion: + | ( + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + ) + | null; + /** @example https://api.github.com/repos/github/hello-world/check-suites/5 */ + url: string | null; + /** @example 146e867f55c26428e5f9fade55a9bbf5e95a7912 */ + before: string | null; + /** @example d6fde92930d4715a2b49857d24b940956b26d2d3 */ + after: string | null; + pull_requests: components["schemas"]["pull-request-minimal"][] | null; + app: components["schemas"]["nullable-integration"]; + repository: components["schemas"]["minimal-repository"]; + /** Format: date-time */ + created_at: string | null; + /** Format: date-time */ + updated_at: string | null; + head_commit: components["schemas"]["simple-commit"]; + latest_check_runs_count: number; + check_runs_url: string; + rerequestable?: boolean; + runs_rerequestable?: boolean; + }; + /** + * Check Suite Preference + * @description Check suite configuration preferences for a repository. + */ + "check-suite-preference": { + preferences: { + auto_trigger_checks?: { + app_id: number; + setting: boolean; + }[]; + }; + repository: components["schemas"]["minimal-repository"]; + }; + "code-scanning-alert-rule-summary": { + /** @description A unique identifier for the rule used to detect the alert. */ + id?: string | null; + /** @description The name of the rule used to detect the alert. */ + name?: string; + /** @description A set of tags applicable for the rule. */ + tags?: string[] | null; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity?: ("none" | "note" | "warning" | "error") | null; + /** @description A short description of the rule used to detect the alert. */ + description?: string; + }; + "code-scanning-alert-items": { + number: components["schemas"]["alert-number"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + instances_url: components["schemas"]["alert-instances-url"]; + state: components["schemas"]["code-scanning-alert-state"]; + fixed_at?: components["schemas"]["code-scanning-alert-fixed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_at: components["schemas"]["code-scanning-alert-dismissed-at"]; + dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + rule: components["schemas"]["code-scanning-alert-rule-summary"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; + }; + "code-scanning-alert": { + number: components["schemas"]["alert-number"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + instances_url: components["schemas"]["alert-instances-url"]; + state: components["schemas"]["code-scanning-alert-state"]; + fixed_at?: components["schemas"]["code-scanning-alert-fixed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_at: components["schemas"]["code-scanning-alert-dismissed-at"]; + dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + rule: components["schemas"]["code-scanning-alert-rule"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; + }; + /** + * @description Sets the state of the code scanning alert. You must provide `dismissed_reason` when you set the state to `dismissed`. + * @enum {string} + */ + "code-scanning-alert-set-state": "open" | "dismissed"; + /** + * @description An identifier for the upload. + * @example 6c81cd8e-b078-4ac3-a3be-1dad7dbd0b53 + */ + "code-scanning-analysis-sarif-id": string; + /** @description The SHA of the commit to which the analysis you are uploading relates. */ + "code-scanning-analysis-commit-sha": string; + /** @description Identifies the variable values associated with the environment in which this analysis was performed. */ + "code-scanning-analysis-environment": string; + /** + * Format: date-time + * @description The time that the analysis was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "code-scanning-analysis-created-at": string; + /** + * Format: uri + * @description The REST API URL of the analysis resource. + */ + "code-scanning-analysis-url": string; + "code-scanning-analysis": { + ref: components["schemas"]["code-scanning-ref"]; + commit_sha: components["schemas"]["code-scanning-analysis-commit-sha"]; + analysis_key: components["schemas"]["code-scanning-analysis-analysis-key"]; + environment: components["schemas"]["code-scanning-analysis-environment"]; + category?: components["schemas"]["code-scanning-analysis-category"]; + /** @example error reading field xyz */ + error: string; + created_at: components["schemas"]["code-scanning-analysis-created-at"]; + /** @description The total number of results in the analysis. */ + results_count: number; + /** @description The total number of rules used in the analysis. */ + rules_count: number; + /** @description Unique identifier for this analysis. */ + id: number; + url: components["schemas"]["code-scanning-analysis-url"]; + sarif_id: components["schemas"]["code-scanning-analysis-sarif-id"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + deletable: boolean; + /** + * @description Warning generated when processing the analysis + * @example 123 results were ignored + */ + warning: string; + }; + /** + * Analysis deletion + * @description Successful deletion of a code scanning analysis + */ + "code-scanning-analysis-deletion": { + /** + * Format: uri + * @description Next deletable analysis in chain, without last analysis deletion confirmation + */ + next_analysis_url: string | null; + /** + * Format: uri + * @description Next deletable analysis in chain, with last analysis deletion confirmation + */ + confirm_delete_url: string | null; + }; + /** @description A Base64 string representing the SARIF file to upload. You must first compress your SARIF file using [`gzip`](http://www.gnu.org/software/gzip/manual/gzip.html) and then translate the contents of the file into a Base64 encoding string. For more information, see "[SARIF support for code scanning](https://docs.github.com/code-security/secure-coding/sarif-support-for-code-scanning)." */ + "code-scanning-analysis-sarif-file": string; + "code-scanning-sarifs-receipt": { + id?: components["schemas"]["code-scanning-analysis-sarif-id"]; + /** + * Format: uri + * @description The REST API URL for checking the status of the upload. + */ + url?: string; + }; + "code-scanning-sarifs-status": { + /** + * @description `pending` files have not yet been processed, while `complete` means results from the SARIF have been stored. `failed` files have either not been processed at all, or could only be partially processed. + * @enum {string} + */ + processing_status?: "pending" | "complete" | "failed"; + /** + * Format: uri + * @description The REST API URL for getting the analyses associated with the upload. + */ + analyses_url?: string | null; + /** @description Any errors that ocurred during processing of the delivery. */ + errors?: string[] | null; + }; + /** + * CODEOWNERS errors + * @description A list of errors found in a repo's CODEOWNERS file + */ + "codeowners-errors": { + errors: { + /** + * @description The line number where this errors occurs. + * @example 7 + */ + line: number; + /** + * @description The column number where this errors occurs. + * @example 3 + */ + column: number; + /** + * @description The contents of the line where the error occurs. + * @example * user + */ + source?: string; + /** + * @description The type of error. + * @example Invalid owner + */ + kind: string; + /** + * @description Suggested action to fix the error. This will usually be `null`, but is provided for some common errors. + * @example The pattern `/` will never match anything, did you mean `*` instead? + */ + suggestion?: string | null; + /** + * @description A human-readable description of the error, combining information from multiple fields, laid out for display in a monospaced typeface (for example, a command-line setting). + * @example Invalid owner on line 7: + * + * * user + * ^ + */ + message: string; + /** + * @description The path of the file where the error occured. + * @example .github/CODEOWNERS + */ + path: string; + }[]; + }; + /** + * Codespace machine + * @description A description of the machine powering a codespace. + */ + "codespace-machine": { + /** + * @description The name of the machine. + * @example standardLinux + */ + name: string; + /** + * @description The display name of the machine includes cores, memory, and storage. + * @example 4 cores, 8 GB RAM, 64 GB storage + */ + display_name: string; + /** + * @description The operating system of the machine. + * @example linux + */ + operating_system: string; + /** + * @description How much storage is available to the codespace. + * @example 68719476736 + */ + storage_in_bytes: number; + /** + * @description How much memory is available to the codespace. + * @example 8589934592 + */ + memory_in_bytes: number; + /** + * @description How many cores are available to the codespace. + * @example 4 + */ + cpus: number; + /** + * @description Whether a prebuild is currently available when creating a codespace for this machine and repository. If a branch was not specified as a ref, the default branch will be assumed. Value will be "null" if prebuilds are not supported or prebuild availability could not be determined. Value will be "none" if no prebuild is available. Latest values "ready" and "in_progress" indicate the prebuild availability status. + * @example ready + * @enum {string|null} + */ + prebuild_availability: ("none" | "ready" | "in_progress") | null; + }; + /** + * Codespaces Secret + * @description Set repository secrets for GitHub Codespaces. + */ + "repo-codespaces-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * CodespacesPublicKey + * @description The public key used for setting Codespaces secrets. + */ + "codespaces-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + /** @example 2 */ + id?: number; + /** @example https://api.github.com/user/keys/2 */ + url?: string; + /** @example ssh-rsa AAAAB3NzaC1yc2EAAA */ + title?: string; + /** @example 2011-01-26T19:01:12Z */ + created_at?: string; + }; + /** + * Collaborator + * @description Collaborator + */ + collaborator: { + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + email?: string | null; + name?: string | null; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + permissions?: { + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + admin: boolean; + }; + /** @example admin */ + role_name: string; + }; + /** + * Repository Invitation + * @description Repository invitations let you manage who you collaborate with. + */ + "repository-invitation": { + /** + * @description Unique identifier of the repository invitation. + * @example 42 + */ + id: number; + repository: components["schemas"]["minimal-repository"]; + invitee: components["schemas"]["nullable-simple-user"]; + inviter: components["schemas"]["nullable-simple-user"]; + /** + * @description The permission associated with the invitation. + * @example read + * @enum {string} + */ + permissions: "read" | "write" | "admin" | "triage" | "maintain"; + /** + * Format: date-time + * @example 2016-06-13T14:52:50-05:00 + */ + created_at: string; + /** @description Whether or not the invitation has expired */ + expired?: boolean; + /** + * @description URL for the repository invitation + * @example https://api.github.com/user/repository-invitations/1 + */ + url: string; + /** @example https://github.com/octocat/Hello-World/invitations */ + html_url: string; + node_id: string; + }; + /** + * Collaborator + * @description Collaborator + */ + "nullable-collaborator": { + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + email?: string | null; + name?: string | null; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + permissions?: { + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + admin: boolean; + }; + /** @example admin */ + role_name: string; + } | null; + /** + * Repository Collaborator Permission + * @description Repository Collaborator Permission + */ + "repository-collaborator-permission": { + permission: string; + /** @example admin */ + role_name: string; + user: components["schemas"]["nullable-collaborator"]; + }; + /** + * Commit Comment + * @description Commit Comment + */ + "commit-comment": { + /** Format: uri */ + html_url: string; + /** Format: uri */ + url: string; + id: number; + node_id: string; + body: string; + path: string | null; + position: number | null; + line: number | null; + commit_id: string; + user: components["schemas"]["nullable-simple-user"]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + author_association: components["schemas"]["author-association"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Branch Short + * @description Branch Short + */ + "branch-short": { + name: string; + commit: { + sha: string; + url: string; + }; + protected: boolean; + }; + /** + * Link + * @description Hypermedia Link + */ + link: { + href: string; + }; + /** + * Auto merge + * @description The status of auto merging a pull request. + */ + "auto-merge": { + enabled_by: components["schemas"]["simple-user"]; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + /** @description Title for the merge commit message. */ + commit_title: string; + /** @description Commit message for the merge commit. */ + commit_message: string; + } | null; + /** + * Pull Request Simple + * @description Pull Request Simple + */ + "pull-request-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347 + */ + url: string; + /** @example 1 */ + id: number; + /** @example MDExOlB1bGxSZXF1ZXN0MQ== */ + node_id: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347 + */ + html_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.diff + */ + diff_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.patch + */ + patch_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347 + */ + issue_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits + */ + commits_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments + */ + review_comments_url: string; + /** @example https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number} */ + review_comment_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347/comments + */ + comments_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + statuses_url: string; + /** @example 1347 */ + number: number; + /** @example open */ + state: string; + /** @example true */ + locked: boolean; + /** @example new-feature */ + title: string; + user: components["schemas"]["nullable-simple-user"]; + /** @example Please pull these awesome changes */ + body: string | null; + labels: { + /** Format: int64 */ + id: number; + node_id: string; + url: string; + name: string; + description: string; + color: string; + default: boolean; + }[]; + milestone: components["schemas"]["nullable-milestone"]; + /** @example too heated */ + active_lock_reason?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + merged_at: string | null; + /** @example e5bd3914e2e596debea16f433f57875b5b90bcd6 */ + merge_commit_sha: string | null; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + requested_reviewers?: components["schemas"]["simple-user"][] | null; + requested_teams?: components["schemas"]["team"][] | null; + head: { + label: string; + ref: string; + repo: components["schemas"]["repository"]; + sha: string; + user: components["schemas"]["nullable-simple-user"]; + }; + base: { + label: string; + ref: string; + repo: components["schemas"]["repository"]; + sha: string; + user: components["schemas"]["nullable-simple-user"]; + }; + _links: { + comments: components["schemas"]["link"]; + commits: components["schemas"]["link"]; + statuses: components["schemas"]["link"]; + html: components["schemas"]["link"]; + issue: components["schemas"]["link"]; + review_comments: components["schemas"]["link"]; + review_comment: components["schemas"]["link"]; + self: components["schemas"]["link"]; + }; + author_association: components["schemas"]["author-association"]; + auto_merge: components["schemas"]["auto-merge"]; + /** + * @description Indicates whether or not the pull request is a draft. + * @example false + */ + draft?: boolean; + }; + /** Simple Commit Status */ + "simple-commit-status": { + description: string | null; + id: number; + node_id: string; + state: string; + context: string; + /** Format: uri */ + target_url: string; + required?: boolean | null; + /** Format: uri */ + avatar_url: string | null; + /** Format: uri */ + url: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Combined Commit Status + * @description Combined Commit Status + */ + "combined-commit-status": { + state: string; + statuses: components["schemas"]["simple-commit-status"][]; + sha: string; + total_count: number; + repository: components["schemas"]["minimal-repository"]; + /** Format: uri */ + commit_url: string; + /** Format: uri */ + url: string; + }; + /** + * Status + * @description The status of a commit. + */ + status: { + url: string; + avatar_url: string | null; + id: number; + node_id: string; + state: string; + description: string; + target_url: string; + context: string; + created_at: string; + updated_at: string; + creator: components["schemas"]["nullable-simple-user"]; + }; + /** + * Code Of Conduct Simple + * @description Code of Conduct Simple + */ + "nullable-code-of-conduct-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/github/docs/community/code_of_conduct + */ + url: string; + /** @example citizen_code_of_conduct */ + key: string; + /** @example Citizen Code of Conduct */ + name: string; + /** + * Format: uri + * @example https://github.com/github/docs/blob/main/CODE_OF_CONDUCT.md + */ + html_url: string | null; + } | null; + /** Community Health File */ + "nullable-community-health-file": { + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + } | null; + /** + * Community Profile + * @description Community Profile + */ + "community-profile": { + /** @example 100 */ + health_percentage: number; + /** @example My first repository on GitHub! */ + description: string | null; + /** @example example.com */ + documentation: string | null; + files: { + code_of_conduct: components["schemas"]["nullable-code-of-conduct-simple"]; + code_of_conduct_file: components["schemas"]["nullable-community-health-file"]; + license: components["schemas"]["nullable-license-simple"]; + contributing: components["schemas"]["nullable-community-health-file"]; + readme: components["schemas"]["nullable-community-health-file"]; + issue_template: components["schemas"]["nullable-community-health-file"]; + pull_request_template: components["schemas"]["nullable-community-health-file"]; + }; + /** + * Format: date-time + * @example 2017-02-28T19:09:29Z + */ + updated_at: string | null; + /** @example true */ + content_reports_enabled?: boolean; + }; + /** + * Commit Comparison + * @description Commit Comparison + */ + "commit-comparison": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/compare/master...topic + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/master...topic + */ + html_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/octocat:bbcd538c8e72b8c175046e27cc8f907076331401...octocat:0328041d1152db8ae77652d1618a02e57f745f17 + */ + permalink_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/master...topic.diff + */ + diff_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/master...topic.patch + */ + patch_url: string; + base_commit: components["schemas"]["commit"]; + merge_base_commit: components["schemas"]["commit"]; + /** + * @example ahead + * @enum {string} + */ + status: "diverged" | "ahead" | "behind" | "identical"; + /** @example 4 */ + ahead_by: number; + /** @example 5 */ + behind_by: number; + /** @example 6 */ + total_commits: number; + commits: components["schemas"]["commit"][]; + files?: components["schemas"]["diff-entry"][]; + }; + /** + * Content Tree + * @description Content Tree + */ + "content-tree": { + type: string; + size: number; + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + entries?: { + type: string; + size: number; + name: string; + path: string; + content?: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }[]; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + } & { + content: unknown; + encoding: unknown; + }; + /** + * Content Directory + * @description A list of directory items + */ + "content-directory": { + type: string; + size: number; + name: string; + path: string; + content?: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }[]; + /** + * Content File + * @description Content File + */ + "content-file": { + type: string; + encoding: string; + size: number; + name: string; + path: string; + content: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + /** @example "actual/actual.md" */ + target?: string; + /** @example "git://example.com/defunkt/dotjs.git" */ + submodule_git_url?: string; + }; + /** + * Symlink Content + * @description An object describing a symlink + */ + "content-symlink": { + type: string; + target: string; + size: number; + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }; + /** + * Symlink Content + * @description An object describing a symlink + */ + "content-submodule": { + type: string; + /** Format: uri */ + submodule_git_url: string; + size: number; + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }; + /** + * File Commit + * @description File Commit + */ + "file-commit": { + content: { + name?: string; + path?: string; + sha?: string; + size?: number; + url?: string; + html_url?: string; + git_url?: string; + download_url?: string; + type?: string; + _links?: { + self?: string; + git?: string; + html?: string; + }; + } | null; + commit: { + sha?: string; + node_id?: string; + url?: string; + html_url?: string; + author?: { + date?: string; + name?: string; + email?: string; + }; + committer?: { + date?: string; + name?: string; + email?: string; + }; + message?: string; + tree?: { + url?: string; + sha?: string; + }; + parents?: { + url?: string; + html_url?: string; + sha?: string; + }[]; + verification?: { + verified?: boolean; + reason?: string; + signature?: string | null; + payload?: string | null; + }; + }; + }; + /** + * Contributor + * @description Contributor + */ + contributor: { + login?: string; + id?: number; + node_id?: string; + /** Format: uri */ + avatar_url?: string; + gravatar_id?: string | null; + /** Format: uri */ + url?: string; + /** Format: uri */ + html_url?: string; + /** Format: uri */ + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + repos_url?: string; + events_url?: string; + /** Format: uri */ + received_events_url?: string; + type: string; + site_admin?: boolean; + contributions: number; + email?: string; + name?: string; + }; + /** + * Dependabot Secret + * @description Set secrets for Dependabot. + */ + "dependabot-secret": { + /** + * @description The name of the secret. + * @example MY_ARTIFACTORY_PASSWORD + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Dependency Graph Diff + * @description A diff of the dependencies between two commits. + */ + "dependency-graph-diff": { + /** @enum {string} */ + change_type: "added" | "removed"; + /** @example path/to/package-lock.json */ + manifest: string; + /** @example npm */ + ecosystem: string; + /** @example @actions/core */ + name: string; + /** @example 1.0.0 */ + version: string; + /** @example pkg:/npm/%40actions/core@1.1.0 */ + package_url: string | null; + /** @example MIT */ + license: string | null; + /** @example https://github.com/github/actions */ + source_repository_url: string | null; + vulnerabilities: { + /** @example critical */ + severity: string; + /** @example GHSA-rf4j-j272-fj86 */ + advisory_ghsa_id: string; + /** @example A summary of the advisory. */ + advisory_summary: string; + /** @example https://github.com/advisories/GHSA-rf4j-j272-fj86 */ + advisory_url: string; + }[]; + }[]; + /** + * metadata + * @description User-defined metadata to store domain-specific information limited to 8 keys with scalar values. + */ + metadata: { + [key: string]: Partial & Partial & Partial; + }; + /** + * Dependency + * @description A single package dependency. + */ + dependency: { + /** + * @description Package-url (PURL) of dependency. See https://github.com/package-url/purl-spec for more details. + * @example pkg:/npm/%40actions/http-client@1.0.11 + */ + package_url?: string; + metadata?: components["schemas"]["metadata"]; + /** + * @description A notation of whether a dependency is requested directly by this manifest or is a dependency of another dependency. + * @example direct + * @enum {string} + */ + relationship?: "direct" | "indirect"; + /** + * @description A notation of whether the dependency is required for the primary build artifact (runtime) or is only used for development. Future versions of this specification may allow for more granular scopes. + * @example runtime + * @enum {string} + */ + scope?: "runtime" | "development"; + /** + * @description Array of package-url (PURLs) of direct child dependencies. + * @example @actions/http-client + */ + dependencies?: string[]; + }; + /** + * manifest + * @description A collection of related dependencies declared in a file or representing a logical group of dependencies. + */ + manifest: { + /** + * @description The name of the manifest. + * @example package-lock.json + */ + name: string; + file?: { + /** + * @description The path of the manifest file relative to the root of the Git repository. + * @example /src/build/package-lock.json + */ + source_location?: string; + }; + metadata?: components["schemas"]["metadata"]; + resolved?: { [key: string]: components["schemas"]["dependency"] }; + }; + /** + * snapshot + * @description Create a new snapshot of a repository's dependencies. + */ + snapshot: { + /** @description The version of the repository snapshot submission. */ + version: number; + job: { + /** + * @description The external ID of the job. + * @example 5622a2b0-63f6-4732-8c34-a1ab27e102a11 + */ + id: string; + /** + * @description Correlator provides a key that is used to group snapshots submitted over time. Only the "latest" submitted snapshot for a given combination of `job.correlator` and `detector.name` will be considered when calculating a repository's current dependencies. Correlator should be as unique as it takes to distinguish all detection runs for a given "wave" of CI workflow you run. If you're using GitHub Actions, a good default value for this could be the environment variables GITHUB_WORKFLOW and GITHUB_JOB concatenated together. If you're using a build matrix, then you'll also need to add additional key(s) to distinguish between each submission inside a matrix variation. + * @example yourworkflowname_yourjobname + */ + correlator: string; + /** + * @description The url for the job. + * @example http://example.com/build + */ + html_url?: string; + }; + /** + * @description The commit SHA associated with this dependency snapshot. + * @example ddc951f4b1293222421f2c8df679786153acf689 + */ + sha: string; + /** + * @description The repository branch that triggered this snapshot. + * @example refs/heads/main + */ + ref: string; + /** @description A description of the detector used. */ + detector: { + /** + * @description The name of the detector used. + * @example docker buildtime detector + */ + name: string; + /** + * @description The version of the detector used. + * @example 1.0.0 + */ + version: string; + /** + * @description The url of the detector used. + * @example http://example.com/docker-buildtimer-detector + */ + url: string; + }; + metadata?: components["schemas"]["metadata"]; + /** @description A collection of package manifests */ + manifests?: { [key: string]: components["schemas"]["manifest"] }; + /** + * Format: date-time + * @description The time at which the snapshot was scanned. + * @example 2020-06-13T14:52:50-05:00 + */ + scanned: string; + }; + /** + * Deployment Status + * @description The status of a deployment. + */ + "deployment-status": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/42/statuses/1 + */ + url: string; + /** @example 1 */ + id: number; + /** @example MDE2OkRlcGxveW1lbnRTdGF0dXMx */ + node_id: string; + /** + * @description The state of the status. + * @example success + * @enum {string} + */ + state: + | "error" + | "failure" + | "inactive" + | "pending" + | "success" + | "queued" + | "in_progress"; + creator: components["schemas"]["nullable-simple-user"]; + /** + * @description A short description of the status. + * @default + * @example Deployment finished successfully. + */ + description: string; + /** + * @description The environment of the deployment that the status is for. + * @default + * @example production + */ + environment?: string; + /** + * Format: uri + * @description Deprecated: the URL to associate with this status. + * @default + * @example https://example.com/deployment/42/output + */ + target_url: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + created_at: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/42 + */ + deployment_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + /** + * Format: uri + * @description The URL for accessing your environment. + * @default + * @example https://staging.example.com/ + */ + environment_url?: string; + /** + * Format: uri + * @description The URL to associate with this status. + * @default + * @example https://example.com/deployment/42/output + */ + log_url?: string; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * @description The amount of time to delay a job after the job is initially triggered. The time (in minutes) must be an integer between 0 and 43,200 (30 days). + * @example 30 + */ + "wait-timer": number; + /** @description The type of deployment branch policy for this environment. To allow all branches to deploy, set to `null`. */ + "deployment-branch-policy": { + /** @description Whether only branches with branch protection rules can deploy to this environment. If `protected_branches` is `true`, `custom_branch_policies` must be `false`; if `protected_branches` is `false`, `custom_branch_policies` must be `true`. */ + protected_branches: boolean; + /** @description Whether only branches that match the specified name patterns can deploy to this environment. If `custom_branch_policies` is `true`, `protected_branches` must be `false`; if `custom_branch_policies` is `false`, `protected_branches` must be `true`. */ + custom_branch_policies: boolean; + } | null; + /** + * Environment + * @description Details of a deployment environment + */ + environment: { + /** + * @description The id of the environment. + * @example 56780428 + */ + id: number; + /** @example MDExOkVudmlyb25tZW50NTY3ODA0Mjg= */ + node_id: string; + /** + * @description The name of the environment. + * @example staging + */ + name: string; + /** @example https://api.github.com/repos/github/hello-world/environments/staging */ + url: string; + /** @example https://github.com/github/hello-world/deployments/activity_log?environments_filter=staging */ + html_url: string; + /** + * Format: date-time + * @description The time that the environment was created, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + created_at: string; + /** + * Format: date-time + * @description The time that the environment was last updated, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + updated_at: string; + protection_rules?: (Partial<{ + /** @example 3515 */ + id: number; + /** @example MDQ6R2F0ZTM1MTU= */ + node_id: string; + /** @example wait_timer */ + type: string; + wait_timer?: components["schemas"]["wait-timer"]; + }> & + Partial<{ + /** @example 3755 */ + id: number; + /** @example MDQ6R2F0ZTM3NTU= */ + node_id: string; + /** @example required_reviewers */ + type: string; + /** @description The people or teams that may approve jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ + reviewers?: { + type?: components["schemas"]["deployment-reviewer-type"]; + reviewer?: Partial & + Partial; + }[]; + }> & + Partial<{ + /** @example 3515 */ + id: number; + /** @example MDQ6R2F0ZTM1MTU= */ + node_id: string; + /** @example branch_policy */ + type: string; + }>)[]; + deployment_branch_policy?: components["schemas"]["deployment-branch-policy"]; + }; + /** + * Short Blob + * @description Short Blob + */ + "short-blob": { + url: string; + sha: string; + }; + /** + * Blob + * @description Blob + */ + blob: { + content: string; + encoding: string; + /** Format: uri */ + url: string; + sha: string; + size: number | null; + node_id: string; + highlighted_content?: string; + }; + /** + * Git Commit + * @description Low-level Git commit operations within a repository + */ + "git-commit": { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + node_id: string; + /** Format: uri */ + url: string; + /** @description Identifying information for the git-user */ + author: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** @description Identifying information for the git-user */ + committer: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** + * @description Message describing the purpose of the commit + * @example Fix #42 + */ + message: string; + tree: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + }; + parents: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + }[]; + verification: { + verified: boolean; + reason: string; + signature: string | null; + payload: string | null; + }; + /** Format: uri */ + html_url: string; + }; + /** + * Git Reference + * @description Git references within a repository + */ + "git-ref": { + ref: string; + node_id: string; + /** Format: uri */ + url: string; + object: { + type: string; + /** + * @description SHA for the reference + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + }; + }; + /** + * Git Tag + * @description Metadata for a Git tag + */ + "git-tag": { + /** @example MDM6VGFnOTQwYmQzMzYyNDhlZmFlMGY5ZWU1YmM3YjJkNWM5ODU4ODdiMTZhYw== */ + node_id: string; + /** + * @description Name of the tag + * @example v0.0.1 + */ + tag: string; + /** @example 940bd336248efae0f9ee5bc7b2d5c985887b16ac */ + sha: string; + /** + * Format: uri + * @description URL for the tag + * @example https://api.github.com/repositories/42/git/tags/940bd336248efae0f9ee5bc7b2d5c985887b16ac + */ + url: string; + /** + * @description Message describing the purpose of the tag + * @example Initial public release + */ + message: string; + tagger: { + date: string; + email: string; + name: string; + }; + object: { + sha: string; + type: string; + /** Format: uri */ + url: string; + }; + verification?: components["schemas"]["verification"]; + }; + /** + * Git Tree + * @description The hierarchy between files in a Git repository. + */ + "git-tree": { + sha: string; + /** Format: uri */ + url: string; + truncated: boolean; + /** + * @description Objects specifying a tree structure + * @example [ + * { + * "path": "file.rb", + * "mode": "100644", + * "type": "blob", + * "size": 30, + * "sha": "44b4fc6d56897b048c772eb4087f854f46256132", + * "url": "https://api.github.com/repos/octocat/Hello-World/git/blobs/44b4fc6d56897b048c772eb4087f854f46256132", + * "properties": { + * "path": { + * "type": "string" + * }, + * "mode": { + * "type": "string" + * }, + * "type": { + * "type": "string" + * }, + * "size": { + * "type": "integer" + * }, + * "sha": { + * "type": "string" + * }, + * "url": { + * "type": "string" + * } + * }, + * "required": [ + * "path", + * "mode", + * "type", + * "sha", + * "url", + * "size" + * ] + * } + * ] + */ + tree: { + /** @example test/file.rb */ + path?: string; + /** @example 040000 */ + mode?: string; + /** @example tree */ + type?: string; + /** @example 23f6827669e43831def8a7ad935069c8bd418261 */ + sha?: string; + /** @example 12 */ + size?: number; + /** @example https://api.github.com/repos/owner-482f3203ecf01f67e9deb18e/BBB_Private_Repo/git/blobs/23f6827669e43831def8a7ad935069c8bd418261 */ + url?: string; + }[]; + }; + /** Hook Response */ + "hook-response": { + code: number | null; + status: string | null; + message: string | null; + }; + /** + * Webhook + * @description Webhooks for repositories. + */ + hook: { + type: string; + /** + * @description Unique identifier of the webhook. + * @example 42 + */ + id: number; + /** + * @description The name of a valid service, use 'web' for a webhook. + * @example web + */ + name: string; + /** + * @description Determines whether the hook is actually triggered on pushes. + * @example true + */ + active: boolean; + /** + * @description Determines what events the hook is triggered for. Default: ['push']. + * @example [ + * "push", + * "pull_request" + * ] + */ + events: string[]; + config: { + /** @example "foo@bar.com" */ + email?: string; + /** @example "foo" */ + password?: string; + /** @example "roomer" */ + room?: string; + /** @example "foo" */ + subdomain?: string; + url?: components["schemas"]["webhook-config-url"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + /** @example "sha256" */ + digest?: string; + secret?: components["schemas"]["webhook-config-secret"]; + /** @example "abc" */ + token?: string; + }; + /** + * Format: date-time + * @example 2011-09-06T20:39:23Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-09-06T17:26:27Z + */ + created_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1/test + */ + test_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1/pings + */ + ping_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1/deliveries + */ + deliveries_url?: string; + last_response: components["schemas"]["hook-response"]; + }; + /** + * Import + * @description A repository import from an external source. + */ + import: { + vcs: string | null; + use_lfs?: boolean; + /** @description The URL of the originating repository. */ + vcs_url: string; + svc_root?: string; + tfvc_project?: string; + /** @enum {string} */ + status: + | "auth" + | "error" + | "none" + | "detecting" + | "choose" + | "auth_failed" + | "importing" + | "mapping" + | "waiting_to_push" + | "pushing" + | "complete" + | "setup" + | "unknown" + | "detection_found_multiple" + | "detection_found_nothing" + | "detection_needs_auth"; + status_text?: string | null; + failed_step?: string | null; + error_message?: string | null; + import_percent?: number | null; + commit_count?: number | null; + push_percent?: number | null; + has_large_files?: boolean; + large_files_size?: number; + large_files_count?: number; + project_choices?: { + vcs?: string; + tfvc_project?: string; + human_name?: string; + }[]; + message?: string; + authors_count?: number | null; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + authors_url: string; + /** Format: uri */ + repository_url: string; + svn_root?: string; + }; + /** + * Porter Author + * @description Porter Author + */ + "porter-author": { + id: number; + remote_id: string; + remote_name: string; + email: string; + name: string; + /** Format: uri */ + url: string; + /** Format: uri */ + import_url: string; + }; + /** + * Porter Large File + * @description Porter Large File + */ + "porter-large-file": { + ref_name: string; + path: string; + oid: string; + size: number; + }; + /** + * Issue + * @description Issues are a great way to keep track of tasks, enhancements, and bugs for your projects. + */ + "nullable-issue": { + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue + * @example https://api.github.com/repositories/42/issues/1 + */ + url: string; + /** Format: uri */ + repository_url: string; + labels_url: string; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** + * @description Number uniquely identifying the issue within its repository + * @example 42 + */ + number: number; + /** + * @description State of the issue; either 'open' or 'closed' + * @example open + */ + state: string; + /** + * @description The reason for the current state + * @example not_planned + */ + state_reason?: string | null; + /** + * @description Title of the issue + * @example Widget creation fails in Safari on OS X 10.8 + */ + title: string; + /** + * @description Contents of the issue + * @example It looks like the new widget form is broken on Safari. When I try and create the widget, Safari crashes. This is reproducible on 10.8, but not 10.9. Maybe a browser bug? + */ + body?: string | null; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description Labels to associate with this issue; pass one or more label names to replace the set of labels on this issue; send an empty array to clear all labels from the issue; note that the labels are silently dropped for users without push access to the repository + * @example [ + * "bug", + * "registration" + * ] + */ + labels: ( + | string + | { + /** Format: int64 */ + id?: number; + node_id?: string; + /** Format: uri */ + url?: string; + name?: string; + description?: string | null; + color?: string | null; + default?: boolean; + } + )[]; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + milestone: components["schemas"]["nullable-milestone"]; + locked: boolean; + active_lock_reason?: string | null; + comments: number; + pull_request?: { + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + diff_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + patch_url: string | null; + /** Format: uri */ + url: string | null; + }; + /** Format: date-time */ + closed_at: string | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + draft?: boolean; + closed_by?: components["schemas"]["nullable-simple-user"]; + body_html?: string; + body_text?: string; + /** Format: uri */ + timeline_url?: string; + repository?: components["schemas"]["repository"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + author_association: components["schemas"]["author-association"]; + reactions?: components["schemas"]["reaction-rollup"]; + } | null; + /** + * Issue Event Label + * @description Issue Event Label + */ + "issue-event-label": { + name: string | null; + color: string | null; + }; + /** Issue Event Dismissed Review */ + "issue-event-dismissed-review": { + state: string; + review_id: number; + dismissal_message: string | null; + dismissal_commit_id?: string | null; + }; + /** + * Issue Event Milestone + * @description Issue Event Milestone + */ + "issue-event-milestone": { + title: string; + }; + /** + * Issue Event Project Card + * @description Issue Event Project Card + */ + "issue-event-project-card": { + /** Format: uri */ + url: string; + id: number; + /** Format: uri */ + project_url: string; + project_id: number; + column_name: string; + previous_column_name?: string; + }; + /** + * Issue Event Rename + * @description Issue Event Rename + */ + "issue-event-rename": { + from: string; + to: string; + }; + /** + * Issue Event + * @description Issue Event + */ + "issue-event": { + /** @example 1 */ + id: number; + /** @example MDEwOklzc3VlRXZlbnQx */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/events/1 + */ + url: string; + actor: components["schemas"]["nullable-simple-user"]; + /** @example closed */ + event: string; + /** @example 6dcb09b5b57875f334f61aebed695e2e4193db5e */ + commit_id: string | null; + /** @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e */ + commit_url: string | null; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + issue?: components["schemas"]["nullable-issue"]; + label?: components["schemas"]["issue-event-label"]; + assignee?: components["schemas"]["nullable-simple-user"]; + assigner?: components["schemas"]["nullable-simple-user"]; + review_requester?: components["schemas"]["nullable-simple-user"]; + requested_reviewer?: components["schemas"]["nullable-simple-user"]; + requested_team?: components["schemas"]["team"]; + dismissed_review?: components["schemas"]["issue-event-dismissed-review"]; + milestone?: components["schemas"]["issue-event-milestone"]; + project_card?: components["schemas"]["issue-event-project-card"]; + rename?: components["schemas"]["issue-event-rename"]; + author_association?: components["schemas"]["author-association"]; + lock_reason?: string | null; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * Labeled Issue Event + * @description Labeled Issue Event + */ + "labeled-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + label: { + name: string; + color: string; + }; + }; + /** + * Unlabeled Issue Event + * @description Unlabeled Issue Event + */ + "unlabeled-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + label: { + name: string; + color: string; + }; + }; + /** + * Assigned Issue Event + * @description Assigned Issue Event + */ + "assigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["integration"]; + assignee: components["schemas"]["simple-user"]; + assigner: components["schemas"]["simple-user"]; + }; + /** + * Unassigned Issue Event + * @description Unassigned Issue Event + */ + "unassigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + assignee: components["schemas"]["simple-user"]; + assigner: components["schemas"]["simple-user"]; + }; + /** + * Milestoned Issue Event + * @description Milestoned Issue Event + */ + "milestoned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + milestone: { + title: string; + }; + }; + /** + * Demilestoned Issue Event + * @description Demilestoned Issue Event + */ + "demilestoned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + milestone: { + title: string; + }; + }; + /** + * Renamed Issue Event + * @description Renamed Issue Event + */ + "renamed-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + rename: { + from: string; + to: string; + }; + }; + /** + * Review Requested Issue Event + * @description Review Requested Issue Event + */ + "review-requested-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + review_requester: components["schemas"]["simple-user"]; + requested_team?: components["schemas"]["team"]; + requested_reviewer?: components["schemas"]["simple-user"]; + }; + /** + * Review Request Removed Issue Event + * @description Review Request Removed Issue Event + */ + "review-request-removed-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + review_requester: components["schemas"]["simple-user"]; + requested_team?: components["schemas"]["team"]; + requested_reviewer?: components["schemas"]["simple-user"]; + }; + /** + * Review Dismissed Issue Event + * @description Review Dismissed Issue Event + */ + "review-dismissed-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + dismissed_review: { + state: string; + review_id: number; + dismissal_message: string | null; + dismissal_commit_id?: string; + }; + }; + /** + * Locked Issue Event + * @description Locked Issue Event + */ + "locked-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + /** @example "off-topic" */ + lock_reason: string | null; + }; + /** + * Added to Project Issue Event + * @description Added to Project Issue Event + */ + "added-to-project-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Moved Column in Project Issue Event + * @description Moved Column in Project Issue Event + */ + "moved-column-in-project-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Removed from Project Issue Event + * @description Removed from Project Issue Event + */ + "removed-from-project-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Converted Note to Issue Issue Event + * @description Converted Note to Issue Issue Event + */ + "converted-note-to-issue-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Issue Event for Issue + * @description Issue Event for Issue + */ + "issue-event-for-issue": Partial< + components["schemas"]["labeled-issue-event"] + > & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial; + /** + * Label + * @description Color-coded labels help you categorize and filter your issues (just like labels in Gmail). + */ + label: { + /** + * Format: int64 + * @example 208045946 + */ + id: number; + /** @example MDU6TGFiZWwyMDgwNDU5NDY= */ + node_id: string; + /** + * Format: uri + * @description URL for the label + * @example https://api.github.com/repositories/42/labels/bug + */ + url: string; + /** + * @description The name of the label. + * @example bug + */ + name: string; + /** @example Something isn't working */ + description: string | null; + /** + * @description 6-character hex code, without the leading #, identifying the color + * @example FFFFFF + */ + color: string; + /** @example true */ + default: boolean; + }; + /** + * Timeline Comment Event + * @description Timeline Comment Event + */ + "timeline-comment-event": { + event: string; + actor: components["schemas"]["simple-user"]; + /** + * @description Unique identifier of the issue comment + * @example 42 + */ + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue comment + * @example https://api.github.com/repositories/42/issues/comments/1 + */ + url: string; + /** + * @description Contents of the issue comment + * @example What version of Safari were you using when you observed this bug? + */ + body?: string; + body_text?: string; + body_html?: string; + /** Format: uri */ + html_url: string; + user: components["schemas"]["simple-user"]; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** Format: uri */ + issue_url: string; + author_association: components["schemas"]["author-association"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Timeline Cross Referenced Event + * @description Timeline Cross Referenced Event + */ + "timeline-cross-referenced-event": { + event: string; + actor?: components["schemas"]["simple-user"]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + source: { + type?: string; + issue?: components["schemas"]["issue"]; + }; + }; + /** + * Timeline Committed Event + * @description Timeline Committed Event + */ + "timeline-committed-event": { + event?: string; + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + node_id: string; + /** Format: uri */ + url: string; + /** @description Identifying information for the git-user */ + author: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** @description Identifying information for the git-user */ + committer: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** + * @description Message describing the purpose of the commit + * @example Fix #42 + */ + message: string; + tree: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + }; + parents: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + }[]; + verification: { + verified: boolean; + reason: string; + signature: string | null; + payload: string | null; + }; + /** Format: uri */ + html_url: string; + }; + /** + * Timeline Reviewed Event + * @description Timeline Reviewed Event + */ + "timeline-reviewed-event": { + event: string; + /** + * @description Unique identifier of the review + * @example 42 + */ + id: number; + /** @example MDE3OlB1bGxSZXF1ZXN0UmV2aWV3ODA= */ + node_id: string; + user: components["schemas"]["simple-user"]; + /** + * @description The text of the review. + * @example This looks great. + */ + body: string | null; + /** @example CHANGES_REQUESTED */ + state: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/12#pullrequestreview-80 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/12 + */ + pull_request_url: string; + _links: { + html: { + href: string; + }; + pull_request: { + href: string; + }; + }; + /** Format: date-time */ + submitted_at?: string; + /** + * @description A commit SHA for the review. + * @example 54bb654c9e6025347f57900a4a5c2313a96b8035 + */ + commit_id: string; + body_html?: string; + body_text?: string; + author_association: components["schemas"]["author-association"]; + }; + /** + * Pull Request Review Comment + * @description Pull Request Review Comments are comments on a portion of the Pull Request's diff. + */ + "pull-request-review-comment": { + /** + * @description URL for the pull request review comment + * @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 + */ + url: string; + /** + * @description The ID of the pull request review to which the comment belongs. + * @example 42 + */ + pull_request_review_id: number | null; + /** + * @description The ID of the pull request review comment. + * @example 1 + */ + id: number; + /** + * @description The node ID of the pull request review comment. + * @example MDI0OlB1bGxSZXF1ZXN0UmV2aWV3Q29tbWVudDEw + */ + node_id: string; + /** + * @description The diff of the line that the comment refers to. + * @example @@ -16,33 +16,40 @@ public class Connection : IConnection... + */ + diff_hunk: string; + /** + * @description The relative path of the file to which the comment applies. + * @example config/database.yaml + */ + path: string; + /** + * @description The line index in the diff to which the comment applies. This field is deprecated; use `line` instead. + * @example 1 + */ + position: number; + /** + * @description The index of the original line in the diff to which the comment applies. This field is deprecated; use `original_line` instead. + * @example 4 + */ + original_position: number; + /** + * @description The SHA of the commit to which the comment applies. + * @example 6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + commit_id: string; + /** + * @description The SHA of the original commit to which the comment applies. + * @example 9c48853fa3dc5c1c3d6f1f1cd1f2743e72652840 + */ + original_commit_id: string; + /** + * @description The comment ID to reply to. + * @example 8 + */ + in_reply_to_id?: number; + user: components["schemas"]["simple-user"]; + /** + * @description The text of the comment. + * @example We should probably include a check for null values here. + */ + body: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** + * Format: uri + * @description HTML URL for the pull request review comment. + * @example https://github.com/octocat/Hello-World/pull/1#discussion-diff-1 + */ + html_url: string; + /** + * Format: uri + * @description URL for the pull request that the review comment belongs to. + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1 + */ + pull_request_url: string; + author_association: components["schemas"]["author-association"]; + _links: { + self: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 + */ + href: string; + }; + html: { + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1#discussion-diff-1 + */ + href: string; + }; + pull_request: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1 + */ + href: string; + }; + }; + /** + * @description The first line of the range for a multi-line comment. + * @example 2 + */ + start_line?: number | null; + /** + * @description The first line of the range for a multi-line comment. + * @example 2 + */ + original_start_line?: number | null; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side?: ("LEFT" | "RIGHT") | null; + /** + * @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + line?: number; + /** + * @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + original_line?: number; + /** + * @description The side of the diff to which the comment applies. The side of the last line of the range for a multi-line comment + * @default RIGHT + * @enum {string} + */ + side?: "LEFT" | "RIGHT"; + reactions?: components["schemas"]["reaction-rollup"]; + /** @example "

comment body

" */ + body_html?: string; + /** @example "comment body" */ + body_text?: string; + }; + /** + * Timeline Line Commented Event + * @description Timeline Line Commented Event + */ + "timeline-line-commented-event": { + event?: string; + node_id?: string; + comments?: components["schemas"]["pull-request-review-comment"][]; + }; + /** + * Timeline Commit Commented Event + * @description Timeline Commit Commented Event + */ + "timeline-commit-commented-event": { + event?: string; + node_id?: string; + commit_id?: string; + comments?: components["schemas"]["commit-comment"][]; + }; + /** + * Timeline Assigned Issue Event + * @description Timeline Assigned Issue Event + */ + "timeline-assigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + assignee: components["schemas"]["simple-user"]; + }; + /** + * Timeline Unassigned Issue Event + * @description Timeline Unassigned Issue Event + */ + "timeline-unassigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + assignee: components["schemas"]["simple-user"]; + }; + /** + * State Change Issue Event + * @description State Change Issue Event + */ + "state-change-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + state_reason?: string | null; + }; + /** + * Timeline Event + * @description Timeline Event + */ + "timeline-issue-events": Partial< + components["schemas"]["labeled-issue-event"] + > & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial; + /** + * Deploy Key + * @description An SSH key granting access to a single repository. + */ + "deploy-key": { + id: number; + key: string; + url: string; + title: string; + verified: boolean; + created_at: string; + read_only: boolean; + }; + /** + * Language + * @description Language + */ + language: { [key: string]: number }; + /** + * License Content + * @description License Content + */ + "license-content": { + name: string; + path: string; + sha: string; + size: number; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + download_url: string | null; + type: string; + content: string; + encoding: string; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + license: components["schemas"]["nullable-license-simple"]; + }; + /** + * Merged upstream + * @description Results of a successful merge upstream request + */ + "merged-upstream": { + message?: string; + /** @enum {string} */ + merge_type?: "merge" | "fast-forward" | "none"; + base_branch?: string; + }; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1 + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/milestones/v1.0 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1/labels + */ + labels_url: string; + /** @example 1002604 */ + id: number; + /** @example MDk6TWlsZXN0b25lMTAwMjYwNA== */ + node_id: string; + /** + * @description The number of the milestone. + * @example 42 + */ + number: number; + /** + * @description The state of the milestone. + * @default open + * @example open + * @enum {string} + */ + state: "open" | "closed"; + /** + * @description The title of the milestone. + * @example v1.0 + */ + title: string; + /** @example Tracking milestone for version 1.0 */ + description: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** @example 4 */ + open_issues: number; + /** @example 8 */ + closed_issues: number; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2013-02-12T13:22:01Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2012-10-09T23:39:01Z + */ + due_on: string | null; + }; + /** Pages Source Hash */ + "pages-source-hash": { + branch: string; + path: string; + }; + /** Pages Https Certificate */ + "pages-https-certificate": { + /** + * @example approved + * @enum {string} + */ + state: + | "new" + | "authorization_created" + | "authorization_pending" + | "authorized" + | "authorization_revoked" + | "issued" + | "uploaded" + | "approved" + | "errored" + | "bad_authz" + | "destroy_pending" + | "dns_changed"; + /** @example Certificate is approved */ + description: string; + /** + * @description Array of the domain set and its alternate name (if it is configured) + * @example [ + * "example.com", + * "www.example.com" + * ] + */ + domains: string[]; + /** Format: date */ + expires_at?: string; + }; + /** + * GitHub Pages + * @description The configuration for GitHub Pages for a repository. + */ + page: { + /** + * Format: uri + * @description The API address for accessing this Page resource. + * @example https://api.github.com/repos/github/hello-world/pages + */ + url: string; + /** + * @description The status of the most recent build of the Page. + * @example built + * @enum {string|null} + */ + status: ("built" | "building" | "errored") | null; + /** + * @description The Pages site's custom domain + * @example example.com + */ + cname: string | null; + /** + * @description The state if the domain is verified + * @example pending + * @enum {string|null} + */ + protected_domain_state?: ("pending" | "verified" | "unverified") | null; + /** + * Format: date-time + * @description The timestamp when a pending domain becomes unverified. + */ + pending_domain_unverified_at?: string | null; + /** + * @description Whether the Page has a custom 404 page. + * @default false + * @example false + */ + custom_404: boolean; + /** + * Format: uri + * @description The web address the Page can be accessed from. + * @example https://example.com + */ + html_url?: string; + /** + * @description The process in which the Page will be built. + * @example legacy + * @enum {string|null} + */ + build_type?: ("legacy" | "workflow") | null; + source?: components["schemas"]["pages-source-hash"]; + /** + * @description Whether the GitHub Pages site is publicly visible. If set to `true`, the site is accessible to anyone on the internet. If set to `false`, the site will only be accessible to users who have at least `read` access to the repository that published the site. + * @example true + */ + public: boolean; + https_certificate?: components["schemas"]["pages-https-certificate"]; + /** + * @description Whether https is enabled on the domain + * @example true + */ + https_enforced?: boolean; + }; + /** + * Page Build + * @description Page Build + */ + "page-build": { + /** Format: uri */ + url: string; + status: string; + error: { + message: string | null; + }; + pusher: components["schemas"]["nullable-simple-user"]; + commit: string; + duration: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Page Build Status + * @description Page Build Status + */ + "page-build-status": { + /** + * Format: uri + * @example https://api.github.com/repos/github/hello-world/pages/builds/latest + */ + url: string; + /** @example queued */ + status: string; + }; + /** + * Pages Health Check Status + * @description Pages Health Check Status + */ + "pages-health-check": { + domain?: { + host?: string; + uri?: string; + nameservers?: string; + dns_resolves?: boolean; + is_proxied?: boolean | null; + is_cloudflare_ip?: boolean | null; + is_fastly_ip?: boolean | null; + is_old_ip_address?: boolean | null; + is_a_record?: boolean | null; + has_cname_record?: boolean | null; + has_mx_records_present?: boolean | null; + is_valid_domain?: boolean; + is_apex_domain?: boolean; + should_be_a_record?: boolean | null; + is_cname_to_github_user_domain?: boolean | null; + is_cname_to_pages_dot_github_dot_com?: boolean | null; + is_cname_to_fastly?: boolean | null; + is_pointed_to_github_pages_ip?: boolean | null; + is_non_github_pages_ip_present?: boolean | null; + is_pages_domain?: boolean; + is_served_by_pages?: boolean | null; + is_valid?: boolean; + reason?: string | null; + responds_to_https?: boolean; + enforces_https?: boolean; + https_error?: string | null; + is_https_eligible?: boolean | null; + caa_error?: string | null; + }; + alt_domain?: { + host?: string; + uri?: string; + nameservers?: string; + dns_resolves?: boolean; + is_proxied?: boolean | null; + is_cloudflare_ip?: boolean | null; + is_fastly_ip?: boolean | null; + is_old_ip_address?: boolean | null; + is_a_record?: boolean | null; + has_cname_record?: boolean | null; + has_mx_records_present?: boolean | null; + is_valid_domain?: boolean; + is_apex_domain?: boolean; + should_be_a_record?: boolean | null; + is_cname_to_github_user_domain?: boolean | null; + is_cname_to_pages_dot_github_dot_com?: boolean | null; + is_cname_to_fastly?: boolean | null; + is_pointed_to_github_pages_ip?: boolean | null; + is_non_github_pages_ip_present?: boolean | null; + is_pages_domain?: boolean; + is_served_by_pages?: boolean | null; + is_valid?: boolean; + reason?: string | null; + responds_to_https?: boolean; + enforces_https?: boolean; + https_error?: string | null; + is_https_eligible?: boolean | null; + caa_error?: string | null; + } | null; + }; + /** + * Team Simple + * @description Groups of organization members that gives permissions on specified repositories. + */ + "team-simple": { + /** + * @description Unique identifier of the team + * @example 1 + */ + id: number; + /** @example MDQ6VGVhbTE= */ + node_id: string; + /** + * Format: uri + * @description URL for the team + * @example https://api.github.com/organizations/1/team/1 + */ + url: string; + /** @example https://api.github.com/organizations/1/team/1/members{/member} */ + members_url: string; + /** + * @description Name of the team + * @example Justice League + */ + name: string; + /** + * @description Description of the team + * @example A great team. + */ + description: string | null; + /** + * @description Permission that the team will have for its repositories + * @example admin + */ + permission: string; + /** + * @description The level of privacy this team should have + * @example closed + */ + privacy?: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/1/repos + */ + repositories_url: string; + /** @example justice-league */ + slug: string; + /** + * @description Distinguished Name (DN) that team maps to within LDAP environment + * @example uid=example,ou=users,dc=github,dc=com + */ + ldap_dn?: string; + }; + /** + * Pull Request + * @description Pull requests let you tell others about changes you've pushed to a repository on GitHub. Once a pull request is sent, interested parties can review the set of changes, discuss potential modifications, and even push follow-up commits if necessary. + */ + "pull-request": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347 + */ + url: string; + /** @example 1 */ + id: number; + /** @example MDExOlB1bGxSZXF1ZXN0MQ== */ + node_id: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347 + */ + html_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.diff + */ + diff_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.patch + */ + patch_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347 + */ + issue_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits + */ + commits_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments + */ + review_comments_url: string; + /** @example https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number} */ + review_comment_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347/comments + */ + comments_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + statuses_url: string; + /** + * @description Number uniquely identifying the pull request within its repository. + * @example 42 + */ + number: number; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @example open + * @enum {string} + */ + state: "open" | "closed"; + /** @example true */ + locked: boolean; + /** + * @description The title of the pull request. + * @example Amazing new feature + */ + title: string; + user: components["schemas"]["nullable-simple-user"]; + /** @example Please pull these awesome changes */ + body: string | null; + labels: { + /** Format: int64 */ + id: number; + node_id: string; + url: string; + name: string; + description: string | null; + color: string; + default: boolean; + }[]; + milestone: components["schemas"]["nullable-milestone"]; + /** @example too heated */ + active_lock_reason?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + merged_at: string | null; + /** @example e5bd3914e2e596debea16f433f57875b5b90bcd6 */ + merge_commit_sha: string | null; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + requested_reviewers?: components["schemas"]["simple-user"][] | null; + requested_teams?: components["schemas"]["team-simple"][] | null; + head: { + label: string; + ref: string; + repo: { + archive_url: string; + assignees_url: string; + blobs_url: string; + branches_url: string; + collaborators_url: string; + comments_url: string; + commits_url: string; + compare_url: string; + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + git_commits_url: string; + git_refs_url: string; + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + issue_comment_url: string; + issue_events_url: string; + issues_url: string; + keys_url: string; + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + milestones_url: string; + name: string; + notifications_url: string; + owner: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + private: boolean; + pulls_url: string; + releases_url: string; + /** Format: uri */ + stargazers_url: string; + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + trees_url: string; + /** Format: uri */ + url: string; + clone_url: string; + default_branch: string; + forks: number; + forks_count: number; + git_url: string; + has_downloads: boolean; + has_issues: boolean; + has_projects: boolean; + has_wiki: boolean; + has_pages: boolean; + /** Format: uri */ + homepage: string | null; + language: string | null; + master_branch?: string; + archived: boolean; + disabled: boolean; + /** @description The repository visibility: public, private, or internal. */ + visibility?: string; + /** Format: uri */ + mirror_url: string | null; + open_issues: number; + open_issues_count: number; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + temp_clone_token?: string; + allow_merge_commit?: boolean; + allow_squash_merge?: boolean; + allow_rebase_merge?: boolean; + license: { + key: string; + name: string; + /** Format: uri */ + url: string | null; + spdx_id: string | null; + node_id: string; + } | null; + /** Format: date-time */ + pushed_at: string; + size: number; + ssh_url: string; + stargazers_count: number; + /** Format: uri */ + svn_url: string; + topics?: string[]; + watchers: number; + watchers_count: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + allow_forking?: boolean; + is_template?: boolean; + } | null; + sha: string; + user: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + }; + base: { + label: string; + ref: string; + repo: { + archive_url: string; + assignees_url: string; + blobs_url: string; + branches_url: string; + collaborators_url: string; + comments_url: string; + commits_url: string; + compare_url: string; + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + git_commits_url: string; + git_refs_url: string; + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + id: number; + is_template?: boolean; + node_id: string; + issue_comment_url: string; + issue_events_url: string; + issues_url: string; + keys_url: string; + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + milestones_url: string; + name: string; + notifications_url: string; + owner: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + private: boolean; + pulls_url: string; + releases_url: string; + /** Format: uri */ + stargazers_url: string; + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + trees_url: string; + /** Format: uri */ + url: string; + clone_url: string; + default_branch: string; + forks: number; + forks_count: number; + git_url: string; + has_downloads: boolean; + has_issues: boolean; + has_projects: boolean; + has_wiki: boolean; + has_pages: boolean; + /** Format: uri */ + homepage: string | null; + language: string | null; + master_branch?: string; + archived: boolean; + disabled: boolean; + /** @description The repository visibility: public, private, or internal. */ + visibility?: string; + /** Format: uri */ + mirror_url: string | null; + open_issues: number; + open_issues_count: number; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + temp_clone_token?: string; + allow_merge_commit?: boolean; + allow_squash_merge?: boolean; + allow_rebase_merge?: boolean; + license: components["schemas"]["nullable-license-simple"]; + /** Format: date-time */ + pushed_at: string; + size: number; + ssh_url: string; + stargazers_count: number; + /** Format: uri */ + svn_url: string; + topics?: string[]; + watchers: number; + watchers_count: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + allow_forking?: boolean; + }; + sha: string; + user: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + }; + _links: { + comments: components["schemas"]["link"]; + commits: components["schemas"]["link"]; + statuses: components["schemas"]["link"]; + html: components["schemas"]["link"]; + issue: components["schemas"]["link"]; + review_comments: components["schemas"]["link"]; + review_comment: components["schemas"]["link"]; + self: components["schemas"]["link"]; + }; + author_association: components["schemas"]["author-association"]; + auto_merge: components["schemas"]["auto-merge"]; + /** + * @description Indicates whether or not the pull request is a draft. + * @example false + */ + draft?: boolean; + merged: boolean; + /** @example true */ + mergeable: boolean | null; + /** @example true */ + rebaseable?: boolean | null; + /** @example clean */ + mergeable_state: string; + merged_by: components["schemas"]["nullable-simple-user"]; + /** @example 10 */ + comments: number; + /** @example 0 */ + review_comments: number; + /** + * @description Indicates whether maintainers can modify the pull request. + * @example true + */ + maintainer_can_modify: boolean; + /** @example 3 */ + commits: number; + /** @example 100 */ + additions: number; + /** @example 3 */ + deletions: number; + /** @example 5 */ + changed_files: number; + }; + /** + * Pull Request Merge Result + * @description Pull Request Merge Result + */ + "pull-request-merge-result": { + sha: string; + merged: boolean; + message: string; + }; + /** + * Pull Request Review Request + * @description Pull Request Review Request + */ + "pull-request-review-request": { + users: components["schemas"]["simple-user"][]; + teams: components["schemas"]["team"][]; + }; + /** + * Pull Request Review + * @description Pull Request Reviews are reviews on pull requests. + */ + "pull-request-review": { + /** + * @description Unique identifier of the review + * @example 42 + */ + id: number; + /** @example MDE3OlB1bGxSZXF1ZXN0UmV2aWV3ODA= */ + node_id: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description The text of the review. + * @example This looks great. + */ + body: string; + /** @example CHANGES_REQUESTED */ + state: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/12#pullrequestreview-80 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/12 + */ + pull_request_url: string; + _links: { + html: { + href: string; + }; + pull_request: { + href: string; + }; + }; + /** Format: date-time */ + submitted_at?: string; + /** + * @description A commit SHA for the review. + * @example 54bb654c9e6025347f57900a4a5c2313a96b8035 + */ + commit_id: string; + body_html?: string; + body_text?: string; + author_association: components["schemas"]["author-association"]; + }; + /** + * Legacy Review Comment + * @description Legacy Review Comment + */ + "review-comment": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 + */ + url: string; + /** @example 42 */ + pull_request_review_id: number | null; + /** @example 10 */ + id: number; + /** @example MDI0OlB1bGxSZXF1ZXN0UmV2aWV3Q29tbWVudDEw */ + node_id: string; + /** @example @@ -16,33 +16,40 @@ public class Connection : IConnection... */ + diff_hunk: string; + /** @example file1.txt */ + path: string; + /** @example 1 */ + position: number | null; + /** @example 4 */ + original_position: number; + /** @example 6dcb09b5b57875f334f61aebed695e2e4193db5e */ + commit_id: string; + /** @example 9c48853fa3dc5c1c3d6f1f1cd1f2743e72652840 */ + original_commit_id: string; + /** @example 8 */ + in_reply_to_id?: number; + user: components["schemas"]["nullable-simple-user"]; + /** @example Great stuff */ + body: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1#discussion-diff-1 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1 + */ + pull_request_url: string; + author_association: components["schemas"]["author-association"]; + _links: { + self: components["schemas"]["link"]; + html: components["schemas"]["link"]; + pull_request: components["schemas"]["link"]; + }; + body_text?: string; + body_html?: string; + reactions?: components["schemas"]["reaction-rollup"]; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string} + */ + side?: "LEFT" | "RIGHT"; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side?: ("LEFT" | "RIGHT") | null; + /** + * @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + line?: number; + /** + * @description The original line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + original_line?: number; + /** + * @description The first line of the range for a multi-line comment. + * @example 2 + */ + start_line?: number | null; + /** + * @description The original first line of the range for a multi-line comment. + * @example 2 + */ + original_start_line?: number | null; + }; + /** + * Release Asset + * @description Data related to a release. + */ + "release-asset": { + /** Format: uri */ + url: string; + /** Format: uri */ + browser_download_url: string; + id: number; + node_id: string; + /** + * @description The file name of the asset. + * @example Team Environment + */ + name: string; + label: string | null; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded" | "open"; + content_type: string; + size: number; + download_count: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + uploader: components["schemas"]["nullable-simple-user"]; + }; + /** + * Release + * @description A release. + */ + release: { + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + assets_url: string; + upload_url: string; + /** Format: uri */ + tarball_url: string | null; + /** Format: uri */ + zipball_url: string | null; + id: number; + node_id: string; + /** + * @description The name of the tag. + * @example v1.0.0 + */ + tag_name: string; + /** + * @description Specifies the commitish value that determines where the Git tag is created from. + * @example master + */ + target_commitish: string; + name: string | null; + body?: string | null; + /** + * @description true to create a draft (unpublished) release, false to create a published one. + * @example false + */ + draft: boolean; + /** + * @description Whether to identify the release as a prerelease or a full release. + * @example false + */ + prerelease: boolean; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + published_at: string | null; + author: components["schemas"]["simple-user"]; + assets: components["schemas"]["release-asset"][]; + body_html?: string; + body_text?: string; + mentions_count?: number; + /** + * Format: uri + * @description The URL of the release discussion. + */ + discussion_url?: string; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Generated Release Notes Content + * @description Generated name and body describing a release + */ + "release-notes-content": { + /** + * @description The generated name of the release + * @example Release v1.0.0 is now available! + */ + name: string; + /** @description The generated body describing the contents of the release supporting markdown formatting */ + body: string; + }; + "secret-scanning-alert": { + number?: components["schemas"]["alert-number"]; + created_at?: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url?: components["schemas"]["alert-url"]; + html_url?: components["schemas"]["alert-html-url"]; + /** + * Format: uri + * @description The REST API URL of the code locations for this alert. + */ + locations_url?: string; + state?: components["schemas"]["secret-scanning-alert-state"]; + resolution?: components["schemas"]["secret-scanning-alert-resolution"]; + /** + * Format: date-time + * @description The time that the alert was resolved in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + resolved_at?: string | null; + resolved_by?: components["schemas"]["nullable-simple-user"]; + /** @description The type of secret that secret scanning detected. */ + secret_type?: string; + /** + * @description User-friendly name for the detected secret, matching the `secret_type`. + * For a list of built-in patterns, see "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)." + */ + secret_type_display_name?: string; + /** @description The secret that was detected. */ + secret?: string; + /** @description Whether push protection was bypassed for the detected secret. */ + push_protection_bypassed?: boolean | null; + push_protection_bypassed_by?: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @description The time that push protection was bypassed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + push_protection_bypassed_at?: string | null; + }; + /** @description Represents a 'commit' secret scanning location type. This location type shows that a secret was detected inside a commit to a repository. */ + "secret-scanning-location-commit": { + /** + * @description The file path in the repository + * @example /example/secrets.txt + */ + path: string; + /** @description Line number at which the secret starts in the file */ + start_line: number; + /** @description Line number at which the secret ends in the file */ + end_line: number; + /** @description The column at which the secret starts within the start line when the file is interpreted as 8BIT ASCII */ + start_column: number; + /** @description The column at which the secret ends within the end line when the file is interpreted as 8BIT ASCII */ + end_column: number; + /** + * @description SHA-1 hash ID of the associated blob + * @example af5626b4a114abcb82d63db7c8082c3c4756e51b + */ + blob_sha: string; + /** @description The API URL to get the associated blob resource */ + blob_url: string; + /** + * @description SHA-1 hash ID of the associated commit + * @example af5626b4a114abcb82d63db7c8082c3c4756e51b + */ + commit_sha: string; + /** @description The API URL to get the associated commit resource */ + commit_url: string; + }; + "secret-scanning-location": { + /** + * @description The location type. Because secrets may be found in different types of resources (ie. code, comments, issues), this field identifies the type of resource where the secret was found. + * @example commit + * @enum {string} + */ + type: "commit"; + details: components["schemas"]["secret-scanning-location-commit"]; + }; + /** + * Stargazer + * @description Stargazer + */ + stargazer: { + /** Format: date-time */ + starred_at: string; + user: components["schemas"]["nullable-simple-user"]; + }; + /** + * Code Frequency Stat + * @description Code Frequency Stat + */ + "code-frequency-stat": number[]; + /** + * Commit Activity + * @description Commit Activity + */ + "commit-activity": { + /** + * @example [ + * 0, + * 3, + * 26, + * 20, + * 39, + * 1, + * 0 + * ] + */ + days: number[]; + /** @example 89 */ + total: number; + /** @example 1336280400 */ + week: number; + }; + /** + * Contributor Activity + * @description Contributor Activity + */ + "contributor-activity": { + author: components["schemas"]["nullable-simple-user"]; + /** @example 135 */ + total: number; + /** + * @example [ + * { + * "w": "1367712000", + * "a": 6898, + * "d": 77, + * "c": 10 + * } + * ] + */ + weeks: { + w?: number; + a?: number; + d?: number; + c?: number; + }[]; + }; + /** Participation Stats */ + "participation-stats": { + all: number[]; + owner: number[]; + }; + /** + * Repository Invitation + * @description Repository invitations let you manage who you collaborate with. + */ + "repository-subscription": { + /** + * @description Determines if notifications should be received from this repository. + * @example true + */ + subscribed: boolean; + /** @description Determines if all notifications should be blocked from this repository. */ + ignored: boolean; + reason: string | null; + /** + * Format: date-time + * @example 2012-10-06T21:34:12Z + */ + created_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/subscription + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + }; + /** + * Tag + * @description Tag + */ + tag: { + /** @example v0.1 */ + name: string; + commit: { + sha: string; + /** Format: uri */ + url: string; + }; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/zipball/v0.1 + */ + zipball_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/tarball/v0.1 + */ + tarball_url: string; + node_id: string; + }; + /** + * Tag protection + * @description Tag protection + */ + "tag-protection": { + /** @example 2 */ + id?: number; + /** @example 2011-01-26T19:01:12Z */ + created_at?: string; + /** @example 2011-01-26T19:01:12Z */ + updated_at?: string; + /** @example true */ + enabled?: boolean; + /** @example v1.* */ + pattern: string; + }; + /** + * Topic + * @description A topic aggregates entities that are related to a subject. + */ + topic: { + names: string[]; + }; + /** Traffic */ + traffic: { + /** Format: date-time */ + timestamp: string; + uniques: number; + count: number; + }; + /** + * Clone Traffic + * @description Clone Traffic + */ + "clone-traffic": { + /** @example 173 */ + count: number; + /** @example 128 */ + uniques: number; + clones: components["schemas"]["traffic"][]; + }; + /** + * Content Traffic + * @description Content Traffic + */ + "content-traffic": { + /** @example /github/hubot */ + path: string; + /** @example github/hubot: A customizable life embetterment robot. */ + title: string; + /** @example 3542 */ + count: number; + /** @example 2225 */ + uniques: number; + }; + /** + * Referrer Traffic + * @description Referrer Traffic + */ + "referrer-traffic": { + /** @example Google */ + referrer: string; + /** @example 4 */ + count: number; + /** @example 3 */ + uniques: number; + }; + /** + * View Traffic + * @description View Traffic + */ + "view-traffic": { + /** @example 14850 */ + count: number; + /** @example 3782 */ + uniques: number; + views: components["schemas"]["traffic"][]; + }; + "scim-group-list-enterprise": { + schemas: string[]; + totalResults: number; + itemsPerPage: number; + startIndex: number; + Resources: { + schemas: string[]; + id: string; + externalId?: string | null; + displayName?: string; + members?: { + value?: string; + $ref?: string; + display?: string; + }[]; + meta?: { + resourceType?: string; + created?: string; + lastModified?: string; + location?: string; + }; + }[]; + }; + "scim-enterprise-group": { + schemas: string[]; + id: string; + externalId?: string | null; + displayName?: string; + members?: { + value?: string; + $ref?: string; + display?: string; + }[]; + meta?: { + resourceType?: string; + created?: string; + lastModified?: string; + location?: string; + }; + }; + "scim-user-list-enterprise": { + schemas: string[]; + totalResults: number; + itemsPerPage: number; + startIndex: number; + Resources: { + schemas: string[]; + id: string; + externalId?: string; + userName?: string; + name?: { + givenName?: string; + familyName?: string; + }; + emails?: { + value?: string; + primary?: boolean; + type?: string; + }[]; + groups?: { + value?: string; + }[]; + active?: boolean; + meta?: { + resourceType?: string; + created?: string; + lastModified?: string; + location?: string; + }; + }[]; + }; + "scim-enterprise-user": { + schemas: string[]; + id: string; + externalId?: string; + userName?: string; + name?: { + givenName?: string; + familyName?: string; + }; + emails?: { + value?: string; + type?: string; + primary?: boolean; + }[]; + groups?: { + value?: string; + }[]; + active?: boolean; + meta?: { + resourceType?: string; + created?: string; + lastModified?: string; + location?: string; + }; + }; + /** + * SCIM /Users + * @description SCIM /Users provisioning endpoints + */ + "scim-user": { + /** @description SCIM schema used. */ + schemas: string[]; + /** + * @description Unique identifier of an external identity + * @example 1b78eada-9baa-11e6-9eb6-a431576d590e + */ + id: string; + /** + * @description The ID of the User. + * @example a7b0f98395 + */ + externalId: string | null; + /** + * @description Configured by the admin. Could be an email, login, or username + * @example someone@example.com + */ + userName: string | null; + /** + * @description The name of the user, suitable for display to end-users + * @example Jon Doe + */ + displayName?: string | null; + /** + * @example { + * "givenName": "Jane", + * "familyName": "User" + * } + */ + name: { + givenName: string | null; + familyName: string | null; + formatted?: string | null; + }; + /** + * @description user emails + * @example [ + * { + * "value": "someone@example.com", + * "primary": true + * }, + * { + * "value": "another@example.com", + * "primary": false + * } + * ] + */ + emails: { + value: string; + primary?: boolean; + }[]; + /** + * @description The active status of the User. + * @example true + */ + active: boolean; + meta: { + /** @example User */ + resourceType?: string; + /** + * Format: date-time + * @example 2019-01-24T22:45:36.000Z + */ + created?: string; + /** + * Format: date-time + * @example 2019-01-24T22:45:36.000Z + */ + lastModified?: string; + /** + * Format: uri + * @example https://api.github.com/scim/v2/organizations/myorg-123abc55141bfd8f/Users/c42772b5-2029-11e9-8543-9264a97dec8d + */ + location?: string; + }; + /** @description The ID of the organization. */ + organization_id?: number; + /** + * @description Set of operations to be performed + * @example [ + * { + * "op": "replace", + * "value": { + * "active": false + * } + * } + * ] + */ + operations?: { + /** @enum {string} */ + op: "add" | "remove" | "replace"; + path?: string; + value?: string | { [key: string]: unknown } | unknown[]; + }[]; + /** @description associated groups */ + groups?: { + value?: string; + display?: string; + }[]; + }; + /** + * SCIM User List + * @description SCIM User List + */ + "scim-user-list": { + /** @description SCIM schema used. */ + schemas: string[]; + /** @example 3 */ + totalResults: number; + /** @example 10 */ + itemsPerPage: number; + /** @example 1 */ + startIndex: number; + Resources: components["schemas"]["scim-user"][]; + }; + /** Search Result Text Matches */ + "search-result-text-matches": { + object_url?: string; + object_type?: string | null; + property?: string; + fragment?: string; + matches?: { + text?: string; + indices?: number[]; + }[]; + }[]; + /** + * Code Search Result Item + * @description Code Search Result Item + */ + "code-search-result-item": { + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string; + /** Format: uri */ + html_url: string; + repository: components["schemas"]["minimal-repository"]; + score: number; + file_size?: number; + language?: string | null; + /** Format: date-time */ + last_modified_at?: string; + /** + * @example [ + * "73..77", + * "77..78" + * ] + */ + line_numbers?: string[]; + text_matches?: components["schemas"]["search-result-text-matches"]; + }; + /** + * Commit Search Result Item + * @description Commit Search Result Item + */ + "commit-search-result-item": { + /** Format: uri */ + url: string; + sha: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + comments_url: string; + commit: { + author: { + name: string; + email: string; + /** Format: date-time */ + date: string; + }; + committer: components["schemas"]["nullable-git-user"]; + comment_count: number; + message: string; + tree: { + sha: string; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + url: string; + verification?: components["schemas"]["verification"]; + }; + author: components["schemas"]["nullable-simple-user"]; + committer: components["schemas"]["nullable-git-user"]; + parents: { + url?: string; + html_url?: string; + sha?: string; + }[]; + repository: components["schemas"]["minimal-repository"]; + score: number; + node_id: string; + text_matches?: components["schemas"]["search-result-text-matches"]; + }; + /** + * Issue Search Result Item + * @description Issue Search Result Item + */ + "issue-search-result-item": { + /** Format: uri */ + url: string; + /** Format: uri */ + repository_url: string; + labels_url: string; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + number: number; + title: string; + locked: boolean; + active_lock_reason?: string | null; + assignees?: components["schemas"]["simple-user"][] | null; + user: components["schemas"]["nullable-simple-user"]; + labels: { + /** Format: int64 */ + id?: number; + node_id?: string; + url?: string; + name?: string; + color?: string; + default?: boolean; + description?: string | null; + }[]; + state: string; + state_reason?: string | null; + assignee: components["schemas"]["nullable-simple-user"]; + milestone: components["schemas"]["nullable-milestone"]; + comments: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** Format: date-time */ + closed_at: string | null; + text_matches?: components["schemas"]["search-result-text-matches"]; + pull_request?: { + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + diff_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + patch_url: string | null; + /** Format: uri */ + url: string | null; + }; + body?: string; + score: number; + author_association: components["schemas"]["author-association"]; + draft?: boolean; + repository?: components["schemas"]["repository"]; + body_html?: string; + body_text?: string; + /** Format: uri */ + timeline_url?: string; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Label Search Result Item + * @description Label Search Result Item + */ + "label-search-result-item": { + id: number; + node_id: string; + /** Format: uri */ + url: string; + name: string; + color: string; + default: boolean; + description: string | null; + score: number; + text_matches?: components["schemas"]["search-result-text-matches"]; + }; + /** + * Repo Search Result Item + * @description Repo Search Result Item + */ + "repo-search-result-item": { + id: number; + node_id: string; + name: string; + full_name: string; + owner: components["schemas"]["nullable-simple-user"]; + private: boolean; + /** Format: uri */ + html_url: string; + description: string | null; + fork: boolean; + /** Format: uri */ + url: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** Format: date-time */ + pushed_at: string; + /** Format: uri */ + homepage: string | null; + size: number; + stargazers_count: number; + watchers_count: number; + language: string | null; + forks_count: number; + open_issues_count: number; + master_branch?: string; + default_branch: string; + score: number; + /** Format: uri */ + forks_url: string; + keys_url: string; + collaborators_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri */ + hooks_url: string; + issue_events_url: string; + /** Format: uri */ + events_url: string; + assignees_url: string; + branches_url: string; + /** Format: uri */ + tags_url: string; + blobs_url: string; + git_tags_url: string; + git_refs_url: string; + trees_url: string; + statuses_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + stargazers_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + commits_url: string; + git_commits_url: string; + comments_url: string; + issue_comment_url: string; + contents_url: string; + compare_url: string; + /** Format: uri */ + merges_url: string; + archive_url: string; + /** Format: uri */ + downloads_url: string; + issues_url: string; + pulls_url: string; + milestones_url: string; + notifications_url: string; + labels_url: string; + releases_url: string; + /** Format: uri */ + deployments_url: string; + git_url: string; + ssh_url: string; + clone_url: string; + /** Format: uri */ + svn_url: string; + forks: number; + open_issues: number; + watchers: number; + topics?: string[]; + /** Format: uri */ + mirror_url: string | null; + has_issues: boolean; + has_projects: boolean; + has_pages: boolean; + has_wiki: boolean; + has_downloads: boolean; + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** @description The repository visibility: public, private, or internal. */ + visibility?: string; + license: components["schemas"]["nullable-license-simple"]; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + text_matches?: components["schemas"]["search-result-text-matches"]; + temp_clone_token?: string; + allow_merge_commit?: boolean; + allow_squash_merge?: boolean; + allow_rebase_merge?: boolean; + allow_auto_merge?: boolean; + delete_branch_on_merge?: boolean; + allow_forking?: boolean; + is_template?: boolean; + }; + /** + * Topic Search Result Item + * @description Topic Search Result Item + */ + "topic-search-result-item": { + name: string; + display_name: string | null; + short_description: string | null; + description: string | null; + created_by: string | null; + released: string | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + featured: boolean; + curated: boolean; + score: number; + repository_count?: number | null; + /** Format: uri */ + logo_url?: string | null; + text_matches?: components["schemas"]["search-result-text-matches"]; + related?: + | { + topic_relation?: { + id?: number; + name?: string; + topic_id?: number; + relation_type?: string; + }; + }[] + | null; + aliases?: + | { + topic_relation?: { + id?: number; + name?: string; + topic_id?: number; + relation_type?: string; + }; + }[] + | null; + }; + /** + * User Search Result Item + * @description User Search Result Item + */ + "user-search-result-item": { + login: string; + id: number; + node_id: string; + /** Format: uri */ + avatar_url: string; + gravatar_id: string | null; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + followers_url: string; + /** Format: uri */ + subscriptions_url: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + repos_url: string; + /** Format: uri */ + received_events_url: string; + type: string; + score: number; + following_url: string; + gists_url: string; + starred_url: string; + events_url: string; + public_repos?: number; + public_gists?: number; + followers?: number; + following?: number; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + name?: string | null; + bio?: string | null; + /** Format: email */ + email?: string | null; + location?: string | null; + site_admin: boolean; + hireable?: boolean | null; + text_matches?: components["schemas"]["search-result-text-matches"]; + blog?: string | null; + company?: string | null; + /** Format: date-time */ + suspended_at?: string | null; + }; + /** + * Private User + * @description Private User + */ + "private-user": { + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + /** @example monalisa octocat */ + name: string | null; + /** @example GitHub */ + company: string | null; + /** @example https://github.com/blog */ + blog: string | null; + /** @example San Francisco */ + location: string | null; + /** + * Format: email + * @example octocat@github.com + */ + email: string | null; + hireable: boolean | null; + /** @example There once was... */ + bio: string | null; + /** @example monalisa */ + twitter_username?: string | null; + /** @example 2 */ + public_repos: number; + /** @example 1 */ + public_gists: number; + /** @example 20 */ + followers: number; + /** @example 0 */ + following: number; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + created_at: string; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + updated_at: string; + /** @example 81 */ + private_gists: number; + /** @example 100 */ + total_private_repos: number; + /** @example 100 */ + owned_private_repos: number; + /** @example 10000 */ + disk_usage: number; + /** @example 8 */ + collaborators: number; + /** @example true */ + two_factor_authentication: boolean; + plan?: { + collaborators: number; + name: string; + space: number; + private_repos: number; + }; + /** Format: date-time */ + suspended_at?: string | null; + business_plus?: boolean; + ldap_dn?: string; + }; + /** + * Codespaces Secret + * @description Secrets for a GitHub Codespace. + */ + "codespaces-secret": { + /** + * @description The name of the secret + * @example SECRET_NAME + */ + name: string; + /** + * Format: date-time + * @description Secret created at + */ + created_at: string; + /** + * Format: date-time + * @description Secret last updated at + */ + updated_at: string; + /** + * @description The type of repositories in the organization that the secret is visible to + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @description API URL at which the list of repositories this secret is vicible can be retrieved + * @example https://api.github.com/user/secrets/SECRET_NAME/repositories + */ + selected_repositories_url: string; + }; + /** + * CodespacesUserPublicKey + * @description The public key used for setting user Codespaces' Secrets. + */ + "codespaces-user-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + }; + /** + * Fetches information about an export of a codespace. + * @description An export of a codespace. Also, latest export details for a codespace can be fetched with id = latest + */ + "codespace-export-details": { + /** + * @description State of the latest export + * @example succeeded | failed | in_progress + */ + state?: string | null; + /** + * Format: date-time + * @description Completion time of the last export operation + * @example 2021-01-01T19:01:12Z + */ + completed_at?: string | null; + /** + * @description Name of the exported branch + * @example codespace-monalisa-octocat-hello-world-g4wpq6h95q + */ + branch?: string | null; + /** + * @description Git commit SHA of the exported branch + * @example fd95a81ca01e48ede9f39c799ecbcef817b8a3b2 + */ + sha?: string | null; + /** + * @description Id for the export details + * @example latest + */ + id?: string; + /** + * @description Url for fetching export details + * @example https://api.github.com/user/codespaces/:name/exports/latest + */ + export_url?: string; + /** + * @description Web url for the exported branch + * @example https://github.com/octocat/hello-world/tree/:branch + */ + html_url?: string | null; + }; + /** + * Email + * @description Email + */ + email: { + /** + * Format: email + * @example octocat@github.com + */ + email: string; + /** @example true */ + primary: boolean; + /** @example true */ + verified: boolean; + /** @example public */ + visibility: string | null; + }; + /** + * GPG Key + * @description A unique encryption key + */ + "gpg-key": { + /** @example 3 */ + id: number; + /** @example Octocat's GPG Key */ + name?: string | null; + primary_key_id: number | null; + /** @example 3262EFF25BA0D270 */ + key_id: string; + /** @example xsBNBFayYZ... */ + public_key: string; + /** + * @example [ + * { + * "email": "octocat@users.noreply.github.com", + * "verified": true + * } + * ] + */ + emails: { + email?: string; + verified?: boolean; + }[]; + /** + * @example [ + * { + * "id": 4, + * "primary_key_id": 3, + * "key_id": "4A595D4C72EE49C7", + * "public_key": "zsBNBFayYZ...", + * "emails": [], + * "subkeys": [], + * "can_sign": false, + * "can_encrypt_comms": true, + * "can_encrypt_storage": true, + * "can_certify": false, + * "created_at": "2016-03-24T11:31:04-06:00", + * "expires_at": null, + * "revoked": false + * } + * ] + */ + subkeys: { + id?: number; + primary_key_id?: number; + key_id?: string; + public_key?: string; + emails?: unknown[]; + subkeys?: unknown[]; + can_sign?: boolean; + can_encrypt_comms?: boolean; + can_encrypt_storage?: boolean; + can_certify?: boolean; + created_at?: string; + expires_at?: string | null; + raw_key?: string | null; + revoked?: boolean; + }[]; + /** @example true */ + can_sign: boolean; + can_encrypt_comms: boolean; + can_encrypt_storage: boolean; + /** @example true */ + can_certify: boolean; + /** + * Format: date-time + * @example 2016-03-24T11:31:04-06:00 + */ + created_at: string; + /** Format: date-time */ + expires_at: string | null; + /** @example true */ + revoked: boolean; + raw_key: string | null; + }; + /** + * Key + * @description Key + */ + key: { + key: string; + id: number; + url: string; + title: string; + /** Format: date-time */ + created_at: string; + verified: boolean; + read_only: boolean; + }; + /** Marketplace Account */ + "marketplace-account": { + /** Format: uri */ + url: string; + id: number; + type: string; + node_id?: string; + login: string; + /** Format: email */ + email?: string | null; + /** Format: email */ + organization_billing_email?: string | null; + }; + /** + * User Marketplace Purchase + * @description User Marketplace Purchase + */ + "user-marketplace-purchase": { + /** @example monthly */ + billing_cycle: string; + /** + * Format: date-time + * @example 2017-11-11T00:00:00Z + */ + next_billing_date: string | null; + unit_count: number | null; + /** @example true */ + on_free_trial: boolean; + /** + * Format: date-time + * @example 2017-11-11T00:00:00Z + */ + free_trial_ends_on: string | null; + /** + * Format: date-time + * @example 2017-11-02T01:12:12Z + */ + updated_at: string | null; + account: components["schemas"]["marketplace-account"]; + plan: components["schemas"]["marketplace-listing-plan"]; + }; + /** + * Starred Repository + * @description Starred Repository + */ + "starred-repository": { + /** Format: date-time */ + starred_at: string; + repo: components["schemas"]["repository"]; + }; + /** + * Hovercard + * @description Hovercard + */ + hovercard: { + contexts: { + message: string; + octicon: string; + }[]; + }; + /** + * Key Simple + * @description Key Simple + */ + "key-simple": { + id: number; + key: string; + }; + }; + responses: { + /** Resource not found */ + not_found: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Validation failed */ + validation_failed_simple: { + content: { + "application/json": components["schemas"]["validation-error-simple"]; + }; + }; + /** Bad Request */ + bad_request: { + content: { + "application/json": components["schemas"]["basic-error"]; + "application/scim+json": components["schemas"]["scim-error"]; + }; + }; + /** Validation failed */ + validation_failed: { + content: { + "application/json": components["schemas"]["validation-error"]; + }; + }; + /** Accepted */ + accepted: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** Forbidden */ + forbidden: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Requires authentication */ + requires_authentication: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Not modified */ + not_modified: unknown; + /** Gone */ + gone: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Response */ + actions_runner_labels: { + content: { + "application/json": { + total_count: number; + labels: components["schemas"]["runner-label"][]; + }; + }; + }; + /** Response */ + actions_runner_labels_readonly: { + content: { + "application/json": { + total_count: number; + labels: components["schemas"]["runner-label"][]; + }; + }; + }; + /** Response if GitHub Advanced Security is not enabled for this repository */ + code_scanning_forbidden_read: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Service unavailable */ + service_unavailable: { + content: { + "application/json": { + code?: string; + message?: string; + documentation_url?: string; + }; + }; + }; + /** Forbidden Gist */ + forbidden_gist: { + content: { + "application/json": { + block?: { + reason?: string; + created_at?: string; + html_url?: string | null; + }; + message?: string; + documentation_url?: string; + }; + }; + }; + /** Moved permanently */ + moved_permanently: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Conflict */ + conflict: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Internal Error */ + internal_error: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Temporary Redirect */ + temporary_redirect: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Response if the repository is archived or if github advanced security is not enabled for this repository */ + code_scanning_forbidden_write: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Found */ + found: unknown; + /** A header with no content is returned. */ + no_content: unknown; + /** Resource not found */ + scim_not_found: { + content: { + "application/json": components["schemas"]["scim-error"]; + "application/scim+json": components["schemas"]["scim-error"]; + }; + }; + /** Forbidden */ + scim_forbidden: { + content: { + "application/json": components["schemas"]["scim-error"]; + "application/scim+json": components["schemas"]["scim-error"]; + }; + }; + /** Bad Request */ + scim_bad_request: { + content: { + "application/json": components["schemas"]["scim-error"]; + "application/scim+json": components["schemas"]["scim-error"]; + }; + }; + /** Too Many Requests */ + scim_too_many_requests: { + content: { + "application/json": components["schemas"]["scim-error"]; + "application/scim+json": components["schemas"]["scim-error"]; + }; + }; + /** Internal Error */ + scim_internal_error: { + content: { + "application/json": components["schemas"]["scim-error"]; + "application/scim+json": components["schemas"]["scim-error"]; + }; + }; + /** Conflict */ + scim_conflict: { + content: { + "application/json": components["schemas"]["scim-error"]; + "application/scim+json": components["schemas"]["scim-error"]; + }; + }; + }; + parameters: { + /** @description The number of results per page (max 100). */ + "per-page": number; + /** @description Used for pagination: the starting delivery from which the page of deliveries is fetched. Refer to the `link` header for the next and previous page cursors. */ + cursor: string; + "delivery-id": number; + /** @description Page number of the results to fetch. */ + page: number; + /** @description Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since: string; + /** @description The unique identifier of the installation. */ + "installation-id": number; + /** @description The unique identifier of the grant. */ + "grant-id": number; + /** @description The client ID of the GitHub app. */ + "client-id": string; + "app-slug": string; + /** @description The client ID of the OAuth app. */ + "oauth-client-id": string; + /** @description The unique identifier of the authorization. */ + "authorization-id": number; + /** @description The slug version of the enterprise name or the login of an organization. */ + "enterprise-or-org": string; + /** @description The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: string; + /** @description The unique identifier of the organization. */ + "org-id": number; + /** @description Only return runner groups that are allowed to be used by this organization. */ + "visible-to-organization": string; + /** @description Unique identifier of the self-hosted runner group. */ + "runner-group-id": number; + /** @description Unique identifier of the self-hosted runner. */ + "runner-id": number; + /** @description The name of a self-hosted runner's custom label. */ + "runner-label-name": string; + /** @description A search phrase. For more information, see [Searching the audit log](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#searching-the-audit-log). */ + "audit-log-phrase": string; + /** + * @description The event types to include: + * + * - `web` - returns web (non-Git) events. + * - `git` - returns Git events. + * - `all` - returns both web and Git events. + * + * The default is `web`. + */ + "audit-log-include": "web" | "git" | "all"; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + "audit-log-after": string; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + "audit-log-before": string; + /** + * @description The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`. + * + * The default is `desc`. + */ + "audit-log-order": "desc" | "asc"; + /** @description The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + "tool-name": components["schemas"]["code-scanning-analysis-tool-name"]; + /** @description The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + "tool-guid": components["schemas"]["code-scanning-analysis-tool-guid"]; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + "pagination-before": string; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + "pagination-after": string; + /** @description The direction to sort the results by. */ + direction: "asc" | "desc"; + /** @description Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ + "secret-scanning-alert-state": "open" | "resolved"; + /** + * @description A comma-separated list of secret types to return. By default all secret types are returned. + * See "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)" + * for a complete list of secret types. + */ + "secret-scanning-alert-secret-type": string; + /** @description A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. Valid resolutions are `false_positive`, `wont_fix`, `revoked`, `pattern_edited`, `pattern_deleted` or `used_in_tests`. */ + "secret-scanning-alert-resolution": string; + /** @description The property to sort the results by. `created` means when the alert was created. `updated` means when the alert was updated or resolved. */ + "secret-scanning-alert-sort": "created" | "updated"; + /** @description The unique identifier of the gist. */ + "gist-id": string; + /** @description The unique identifier of the comment. */ + "comment-id": number; + /** @description A list of comma separated label names. Example: `bug,ui,@high` */ + labels: string; + /** @description account_id parameter */ + "account-id": number; + /** @description The unique identifier of the plan. */ + "plan-id": number; + /** @description The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort: "created" | "updated"; + /** @description The account owner of the repository. The name is not case sensitive. */ + owner: string; + /** @description The name of the repository. The name is not case sensitive. */ + repo: string; + /** @description If `true`, show notifications marked as read. */ + all: boolean; + /** @description If `true`, only shows notifications in which the user is directly participating or mentioned. */ + participating: boolean; + /** @description Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + before: string; + /** @description The unique identifier of the pull request thread. */ + "thread-id": number; + /** @description An organization ID. Only return organizations with an ID greater than this ID. */ + "since-org": number; + /** @description The organization name. The name is not case sensitive. */ + org: string; + /** @description The unique identifier of the repository. */ + "repository-id": number; + /** @description Only return runner groups that are allowed to be used by this repository. */ + "visible-to-repository": string; + /** @description The name of the secret. */ + "secret-name": string; + /** @description The handle for the GitHub user account. */ + username: string; + /** @description The unique identifier of the group. */ + "group-id": number; + /** @description The unique identifier of the hook. */ + "hook-id": number; + /** @description The unique identifier of the invitation. */ + "invitation-id": number; + /** @description The name of the codespace. */ + "codespace-name": string; + /** @description The unique identifier of the migration. */ + "migration-id": number; + /** @description repo_name parameter */ + "repo-name": string; + /** @description The selected visibility of the packages. Only `container` package_types currently support `internal` visibility properly. For other ecosystems `internal` is synonymous with `private`. This parameter is optional and only filters an existing result set. */ + "package-visibility": "public" | "private" | "internal"; + /** @description The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + "package-type": + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** @description The name of the package. */ + "package-name": string; + /** @description Unique identifier of the package version. */ + "package-version-id": number; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. To receive an initial cursor on your first request, include an empty "before" query string. */ + "secret-scanning-pagination-before-org-repo": string; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. To receive an initial cursor on your first request, include an empty "after" query string. */ + "secret-scanning-pagination-after-org-repo": string; + /** @description The slug of the team name. */ + "team-slug": string; + /** @description The number that identifies the discussion. */ + "discussion-number": number; + /** @description The number that identifies the comment. */ + "comment-number": number; + /** @description The unique identifier of the reaction. */ + "reaction-id": number; + /** @description The unique identifier of the project. */ + "project-id": number; + /** @description The unique identifier of the card. */ + "card-id": number; + /** @description The unique identifier of the column. */ + "column-id": number; + /** @description The unique identifier of the artifact. */ + "artifact-id": number; + /** @description The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + "git-ref": components["schemas"]["code-scanning-ref"]; + /** @description An explicit key or prefix for identifying the cache */ + "actions-cache-key": string; + /** @description The property to sort the results by. `created_at` means when the cache was created. `last_accessed_at` means when the cache was last accessed. `size_in_bytes` is the size of the cache in bytes. */ + "actions-cache-list-sort": + | "created_at" + | "last_accessed_at" + | "size_in_bytes"; + /** @description A key for identifying the cache. */ + "actions-cache-key-required": string; + /** @description The unique identifier of the GitHub Actions cache. */ + "cache-id": number; + /** @description The unique identifier of the job. */ + "job-id": number; + /** @description Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ + actor: string; + /** @description Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ + "workflow-run-branch": string; + /** @description Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://docs.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ + event: string; + /** @description Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. For a list of the possible `status` and `conclusion` options, see "[Create a check run](https://docs.github.com/rest/reference/checks#create-a-check-run)." */ + "workflow-run-status": + | "completed" + | "action_required" + | "cancelled" + | "failure" + | "neutral" + | "skipped" + | "stale" + | "success" + | "timed_out" + | "in_progress" + | "queued" + | "requested" + | "waiting"; + /** @description Returns workflow runs created within the given date-time range. For more information on the syntax, see "[Understanding the search syntax](https://docs.github.com/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates)." */ + created: string; + /** @description If `true` pull requests are omitted from the response (empty array). */ + "exclude-pull-requests": boolean; + /** @description Returns workflow runs with the `check_suite_id` that you specify. */ + "workflow-run-check-suite-id": number; + /** @description The unique identifier of the workflow run. */ + "run-id": number; + /** @description The attempt number of the workflow run. */ + "attempt-number": number; + /** @description The ID of the workflow. You can also pass the workflow file name as a string. */ + "workflow-id": number | string; + /** @description The unique identifier of the autolink. */ + "autolink-id": number; + /** @description The name of the branch. */ + branch: string; + /** @description The unique identifier of the check run. */ + "check-run-id": number; + /** @description The unique identifier of the check suite. */ + "check-suite-id": number; + /** @description Returns check runs with the specified `name`. */ + "check-name": string; + /** @description Returns check runs with the specified `status`. */ + status: "queued" | "in_progress" | "completed"; + /** @description The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + "alert-number": components["schemas"]["alert-number"]; + /** @description The SHA of the commit. */ + "commit-sha": string; + /** @description The full path, relative to the repository root, of the dependency manifest file. */ + "manifest-path": string; + /** @description deployment_id parameter */ + "deployment-id": number; + /** @description The name of the environment */ + "environment-name": string; + /** @description A user ID. Only return users with an ID greater than this ID. */ + "since-user": number; + /** @description The number that identifies the issue. */ + "issue-number": number; + /** @description The unique identifier of the key. */ + "key-id": number; + /** @description The number that identifies the milestone. */ + "milestone-number": number; + /** @description The number that identifies the pull request. */ + "pull-number": number; + /** @description The unique identifier of the review. */ + "review-id": number; + /** @description The unique identifier of the asset. */ + "asset-id": number; + /** @description The unique identifier of the release. */ + "release-id": number; + /** @description The unique identifier of the tag protection. */ + "tag-protection-id": number; + /** @description The time frame to display results for. */ + per: "" | "day" | "week"; + /** @description A repository ID. Only return repositories with an ID greater than this ID. */ + "since-repo": number; + /** @description Used for pagination: the index of the first result to return. */ + "start-index": number; + /** @description Used for pagination: the number of results to return. */ + count: number; + /** @description Identifier generated by the GitHub SCIM endpoint. */ + "scim-group-id": string; + /** @description The unique identifier of the SCIM user. */ + "scim-user-id": string; + /** @description Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order: "desc" | "asc"; + /** @description The unique identifier of the team. */ + "team-id": number; + /** @description ID of the Repository to filter on */ + "repository-id-in-query": number; + /** @description The ID of the export operation, or `latest`. Currently only `latest` is currently supported. */ + "export-id": string; + /** @description The unique identifier of the GPG key. */ + "gpg-key-id": number; + }; + headers: { + link?: string; + "content-type"?: string; + "x-common-marker-version"?: string; + "x-rate-limit-limit"?: number; + "x-rate-limit-remaining"?: number; + "x-rate-limit-reset"?: number; + location?: string; + }; +} + +export interface operations { + /** Get Hypermedia links to resources accessible in GitHub's REST API */ + "meta/root": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["root"]; + }; + }; + }; + }; + /** + * Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-authenticated": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"]; + }; + }; + }; + }; + /** Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. */ + "apps/create-from-manifest": { + parameters: { + path: { + code: string; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["integration"] & + ({ + client_id: string; + client_secret: string; + webhook_secret: string | null; + pem: string; + } & { [key: string]: unknown }); + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-webhook-config-for-app": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/update-webhook-config-for-app": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + }; + }; + }; + /** + * Returns a list of webhook deliveries for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/list-webhook-deliveries": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Used for pagination: the starting delivery from which the page of deliveries is fetched. Refer to the `link` header for the next and previous page cursors. */ + cursor?: components["parameters"]["cursor"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery-item"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Returns a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-webhook-delivery": { + parameters: { + path: { + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery"]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Redeliver a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/redeliver-webhook-delivery": { + parameters: { + path: { + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + * + * The permissions the installation has are included under the `permissions` key. + */ + "apps/list-installations": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + outdated?: string; + }; + }; + responses: { + /** The permissions the installation has are included under the `permissions` key. */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["installation"][]; + }; + }; + }; + }; + /** + * Enables an authenticated GitHub App to find an installation's information using the installation id. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-installation": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/delete-installation": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/create-installation-access-token": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["installation-token"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description List of repository names that the token should have access to */ + repositories?: string[]; + /** + * @description List of repository IDs that the token should have access to + * @example [ + * 1 + * ] + */ + repository_ids?: number[]; + permissions?: components["schemas"]["app-permissions"]; + }; + }; + }; + }; + /** + * Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/suspend-installation": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Removes a GitHub App installation suspension. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/unsuspend-installation": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * You can use this API to list the set of OAuth applications that have been granted access to your account. Unlike the [list your authorizations](https://docs.github.com/rest/reference/oauth-authorizations#list-your-authorizations) API, this API does not manage individual tokens. This API will return one entry for each OAuth application that has been granted access to your account, regardless of the number of tokens an application has generated for your user. The list of OAuth applications returned matches what is shown on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). The `scopes` returned are the union of scopes authorized for the application. For example, if an application has one token with `repo` scope and another token with `user` scope, the grant will return `["repo", "user"]`. + */ + "oauth-authorizations/list-grants": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The client ID of your GitHub app. */ + client_id?: string; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["application-grant"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ + "oauth-authorizations/get-grant": { + parameters: { + path: { + /** The unique identifier of the grant. */ + grant_id: components["parameters"]["grant-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["application-grant"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for your user. Once deleted, the application has no access to your account and is no longer listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + "oauth-authorizations/delete-grant": { + parameters: { + path: { + /** The unique identifier of the grant. */ + grant_id: components["parameters"]["grant-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. + * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + "apps/delete-authorization": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The OAuth access token used to authenticate to the GitHub API. */ + access_token: string; + }; + }; + }; + }; + /** OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the OAuth application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. */ + "apps/check-token": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The access_token of the OAuth application. */ + access_token: string; + }; + }; + }; + }; + /** OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. */ + "apps/delete-token": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The OAuth access token used to authenticate to the GitHub API. */ + access_token: string; + }; + }; + }; + }; + /** OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ + "apps/reset-token": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The access_token of the OAuth application. */ + access_token: string; + }; + }; + }; + }; + /** Use a non-scoped user-to-server OAuth access token to create a repository scoped and/or permission scoped user-to-server OAuth access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ + "apps/scope-token": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The OAuth access token used to authenticate to the GitHub API. + * @example e72e16c7e42f292c6912e7710c838347ae178b4a + */ + access_token: string; + /** + * @description The name of the user or organization to scope the user-to-server access token to. **Required** unless `target_id` is specified. + * @example octocat + */ + target?: string; + /** + * @description The ID of the user or organization to scope the user-to-server access token to. **Required** unless `target` is specified. + * @example 1 + */ + target_id?: number; + /** @description The list of repository names to scope the user-to-server access token to. `repositories` may not be specified if `repository_ids` is specified. */ + repositories?: string[]; + /** + * @description The list of repository IDs to scope the user-to-server access token to. `repository_ids` may not be specified if `repositories` is specified. + * @example [ + * 1 + * ] + */ + repository_ids?: number[]; + permissions?: components["schemas"]["app-permissions"]; + }; + }; + }; + }; + /** + * **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). + * + * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/get-by-slug": { + parameters: { + path: { + app_slug: components["parameters"]["app-slug"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ + "oauth-authorizations/list-authorizations": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The client ID of your GitHub app. */ + client_id?: string; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["authorization"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). + * + * Creates OAuth tokens using [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication). If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." + * + * To create tokens for a particular OAuth application using this endpoint, you must authenticate as the user you want to create an authorization for and provide the app's client ID and secret, found on your OAuth application's settings page. If your OAuth application intends to create multiple tokens for one user, use `fingerprint` to differentiate between them. + * + * You can also create tokens on GitHub from the [personal access tokens settings](https://github.com/settings/tokens) page. Read more about these tokens in [the GitHub Help documentation](https://docs.github.com/articles/creating-an-access-token-for-command-line-use). + * + * Organizations that enforce SAML SSO require personal access tokens to be allowed. Read more about allowing tokens in [the GitHub Help documentation](https://docs.github.com/articles/about-identity-and-access-management-with-saml-single-sign-on). + */ + "oauth-authorizations/create-authorization": { + parameters: {}; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description A list of scopes that this authorization is in. + * @example [ + * "public_repo", + * "user" + * ] + */ + scopes?: string[] | null; + /** + * @description A note to remind you what the OAuth token is for. + * @example Update all gems + */ + note?: string; + /** @description A URL to remind you what app the OAuth token is for. */ + note_url?: string; + /** @description The OAuth app client key for which to create the token. */ + client_id?: string; + /** @description The OAuth app client secret for which to create the token. */ + client_secret?: string; + /** @description A unique string to distinguish an authorization from others created for the same client ID and user. */ + fingerprint?: string; + }; + }; + }; + }; + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). + * + * Creates a new authorization for the specified OAuth application, only if an authorization for that application doesn't already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one. + * + * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." + * + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + */ + "oauth-authorizations/get-or-create-authorization-for-app": { + parameters: { + path: { + /** The client ID of the OAuth app. */ + client_id: components["parameters"]["oauth-client-id"]; + }; + }; + responses: { + /** if returning an existing token */ + 200: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The OAuth app client secret for which to create the token. */ + client_secret: string; + /** + * @description A list of scopes that this authorization is in. + * @example [ + * "public_repo", + * "user" + * ] + */ + scopes?: string[] | null; + /** + * @description A note to remind you what the OAuth token is for. + * @example Update all gems + */ + note?: string; + /** @description A URL to remind you what app the OAuth token is for. */ + note_url?: string; + /** @description A unique string to distinguish an authorization from others created for the same client ID and user. */ + fingerprint?: string; + }; + }; + }; + }; + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). + * + * This method will create a new authorization for the specified OAuth application, only if an authorization for that application and fingerprint do not already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. `fingerprint` is a unique string to distinguish an authorization from others created for the same client ID and user. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one. + * + * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." + */ + "oauth-authorizations/get-or-create-authorization-for-app-and-fingerprint": { + parameters: { + path: { + /** The client ID of the OAuth app. */ + client_id: components["parameters"]["oauth-client-id"]; + fingerprint: string; + }; + }; + responses: { + /** if returning an existing token */ + 200: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + /** Response if returning a new token */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The OAuth app client secret for which to create the token. */ + client_secret: string; + /** + * @description A list of scopes that this authorization is in. + * @example [ + * "public_repo", + * "user" + * ] + */ + scopes?: string[] | null; + /** + * @description A note to remind you what the OAuth token is for. + * @example Update all gems + */ + note?: string; + /** @description A URL to remind you what app the OAuth token is for. */ + note_url?: string; + }; + }; + }; + }; + /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ + "oauth-authorizations/get-authorization": { + parameters: { + path: { + /** The unique identifier of the authorization. */ + authorization_id: components["parameters"]["authorization-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ + "oauth-authorizations/delete-authorization": { + parameters: { + path: { + /** The unique identifier of the authorization. */ + authorization_id: components["parameters"]["authorization-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). + * + * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." + * + * You can only send one of these scope keys at a time. + */ + "oauth-authorizations/update-authorization": { + parameters: { + path: { + /** The unique identifier of the authorization. */ + authorization_id: components["parameters"]["authorization-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description A list of scopes that this authorization is in. + * @example [ + * "public_repo", + * "user" + * ] + */ + scopes?: string[] | null; + /** @description A list of scopes to add to this authorization. */ + add_scopes?: string[]; + /** @description A list of scopes to remove from this authorization. */ + remove_scopes?: string[]; + /** + * @description A note to remind you what the OAuth token is for. + * @example Update all gems + */ + note?: string; + /** @description A URL to remind you what app the OAuth token is for. */ + note_url?: string; + /** @description A unique string to distinguish an authorization from others created for the same client ID and user. */ + fingerprint?: string; + }; + }; + }; + }; + "codes-of-conduct/get-all-codes-of-conduct": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-of-conduct"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + "codes-of-conduct/get-conduct-code": { + parameters: { + path: { + key: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-of-conduct"]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists all the emojis available to use on GitHub. */ + "emojis/get": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": { [key: string]: string }; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Returns aggregate usage metrics for your GitHub Enterprise Server 3.5+ instance for a specified time period up to 365 days. + * + * To use this endpoint, your GitHub Enterprise Server instance must be connected to GitHub Enterprise Cloud using GitHub Connect. You must enable Server Statistics, and for the API request provide your enterprise account name or organization name connected to the GitHub Enterprise Server. For more information, see "[Enabling Server Statistics for your enterprise](/admin/configuration/configuring-github-connect/enabling-server-statistics-for-your-enterprise)" in the GitHub Enterprise Server documentation. + * + * You'll need to use a personal access token: + * - If you connected your GitHub Enterprise Server to an enterprise account and enabled Server Statistics, you'll need a personal access token with the `read:enterprise` permission. + * - If you connected your GitHub Enterprise Server to an organization account and enabled Server Statistics, you'll need a personal access token with the `read:org` permission. + * + * For more information on creating a personal access token, see "[Creating a personal access token](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token)." + */ + "enterprise-admin/get-server-statistics": { + parameters: { + path: { + /** The slug version of the enterprise name or the login of an organization. */ + enterprise_or_org: components["parameters"]["enterprise-or-org"]; + }; + query: { + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + date_start?: string; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + date_end?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["server-statistics"]; + }; + }; + }; + }; + /** + * Gets the total GitHub Actions cache usage for an enterprise. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "actions/get-actions-cache-usage-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["actions-cache-usage-org-enterprise"]; + }; + }; + }; + }; + /** + * Sets the GitHub Actions OpenID Connect (OIDC) custom issuer policy for an enterprise. + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + "actions/set-actions-oidc-custom-issuer-policy-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-oidc-custom-issuer-policy-for-enterprise"]; + }; + }; + }; + /** + * Gets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/get-github-actions-permissions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-enterprise-permissions"]; + }; + }; + }; + }; + /** + * Sets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-github-actions-permissions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + enabled_organizations: components["schemas"]["enabled-organizations"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + }; + }; + }; + }; + /** + * Lists the organizations that are selected to have GitHub Actions enabled in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-selected-organizations-enabled-github-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + organizations: components["schemas"]["organization-simple"][]; + }; + }; + }; + }; + }; + /** + * Replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-selected-organizations-enabled-github-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of organization IDs to enable for GitHub Actions. */ + selected_organization_ids: number[]; + }; + }; + }; + }; + /** + * Adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/enable-selected-organization-github-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** The unique identifier of the organization. */ + org_id: components["parameters"]["org-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/disable-selected-organization-github-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** The unique identifier of the organization. */ + org_id: components["parameters"]["org-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Gets the selected actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/get-allowed-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + }; + /** + * Sets the actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-allowed-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + "actions/get-github-actions-default-workflow-permissions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Success response */ + 200: { + content: { + "application/json": components["schemas"]["actions-get-default-workflow-permissions"]; + }; + }; + }; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, and sets + * whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + "actions/set-github-actions-default-workflow-permissions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Success response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-set-default-workflow-permissions"]; + }; + }; + }; + /** + * Lists all self-hosted runner groups for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-self-hosted-runner-groups-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Only return runner groups that are allowed to be used by this organization. */ + visible_to_organization?: components["parameters"]["visible-to-organization"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + runner_groups: components["schemas"]["runner-groups-enterprise"][]; + }; + }; + }; + }; + }; + /** + * Creates a new self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/create-self-hosted-runner-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["runner-groups-enterprise"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Name of the runner group. */ + name: string; + /** + * @description Visibility of a runner group. You can select all organizations or select individual organization. + * @enum {string} + */ + visibility?: "selected" | "all"; + /** @description List of organization IDs that can access the runner group. */ + selected_organization_ids?: number[]; + /** @description List of runner IDs to add to the runner group. */ + runners?: number[]; + /** + * @description Whether the runner group can be used by `public` repositories. + * @default false + */ + allows_public_repositories?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + }; + }; + }; + /** + * Gets a specific self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/get-self-hosted-runner-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-groups-enterprise"]; + }; + }; + }; + }; + /** + * Deletes a self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/delete-self-hosted-runner-group-from-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Updates the `name` and `visibility` of a self-hosted runner group in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/update-self-hosted-runner-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-groups-enterprise"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Name of the runner group. */ + name?: string; + /** + * @description Visibility of a runner group. You can select all organizations or select individual organizations. + * @default all + * @enum {string} + */ + visibility?: "selected" | "all"; + /** + * @description Whether the runner group can be used by `public` repositories. + * @default false + */ + allows_public_repositories?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + }; + }; + }; + /** + * Lists the organizations with access to a self-hosted runner group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-org-access-to-self-hosted-runner-group-in-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + organizations: components["schemas"]["organization-simple"][]; + }; + }; + }; + }; + }; + /** + * Replaces the list of organizations that have access to a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-org-access-to-self-hosted-runner-group-in-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of organization IDs that can access the runner group. */ + selected_organization_ids: number[]; + }; + }; + }; + }; + /** + * Adds an organization to the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/add-org-access-to-self-hosted-runner-group-in-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** The unique identifier of the organization. */ + org_id: components["parameters"]["org-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Removes an organization from the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/remove-org-access-to-self-hosted-runner-group-in-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** The unique identifier of the organization. */ + org_id: components["parameters"]["org-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists the self-hosted runners that are in a specific enterprise group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-self-hosted-runners-in-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * Replaces the list of self-hosted runners that are part of an enterprise runner group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-self-hosted-runners-in-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of runner IDs to add to the runner group. */ + runners: number[]; + }; + }; + }; + }; + /** + * Adds a self-hosted runner to a runner group configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` + * scope to use this endpoint. + */ + "enterprise-admin/add-self-hosted-runner-to-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Removes a self-hosted runner from a group configured in an enterprise. The runner is then returned to the default group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/remove-self-hosted-runner-from-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all self-hosted runners configured for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-self-hosted-runners-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count?: number; + runners?: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-runner-applications-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-application"][]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/enterprises/octo-enterprise --token TOKEN + * ``` + */ + "enterprise-admin/create-registration-token-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an enterprise. The token expires after one hour. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an enterprise, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + "enterprise-admin/create-remove-token-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Gets a specific self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/get-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner"]; + }; + }; + }; + }; + /** + * Forces the removal of a self-hosted runner from an enterprise. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/delete-self-hosted-runner-from-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all labels for a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-labels-for-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-custom-labels-for-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to set for the runner. You can pass an empty array to remove all custom labels. */ + labels: string[]; + }; + }; + }; + }; + /** + * Add custom labels to a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/add-custom-labels-to-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to add to the runner. */ + labels: string[]; + }; + }; + }; + }; + /** + * Remove all custom labels from a self-hosted runner configured in an + * enterprise. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/remove-all-custom-labels-from-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels_readonly"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in an enterprise. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/remove-custom-label-from-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + /** The name of a self-hosted runner's custom label. */ + name: components["parameters"]["runner-label-name"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Gets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the `admin:enterprise` scope. */ + "enterprise-admin/get-audit-log": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** A search phrase. For more information, see [Searching the audit log](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#searching-the-audit-log). */ + phrase?: components["parameters"]["audit-log-phrase"]; + /** + * The event types to include: + * + * - `web` - returns web (non-Git) events. + * - `git` - returns Git events. + * - `all` - returns both web and Git events. + * + * The default is `web`. + */ + include?: components["parameters"]["audit-log-include"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + after?: components["parameters"]["audit-log-after"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + before?: components["parameters"]["audit-log-before"]; + /** + * The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`. + * + * The default is `desc`. + */ + order?: components["parameters"]["audit-log-order"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["audit-log-event"][]; + }; + }; + }; + }; + /** + * Lists code scanning alerts for the default branch for all eligible repositories in an enterprise. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be a member of the enterprise, + * and you must use an access token with the `repo` scope or `security_events` scope. + */ + "code-scanning/list-alerts-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + tool_name?: components["parameters"]["tool-name"]; + /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + tool_guid?: components["parameters"]["tool-guid"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + before?: components["parameters"]["pagination-before"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + after?: components["parameters"]["pagination-after"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** If specified, only code scanning alerts with this state will be returned. */ + state?: components["schemas"]["code-scanning-alert-state"]; + /** The property by which to sort the results. */ + sort?: "created" | "updated"; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["code-scanning-organization-alert-items"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. + * To use this endpoint, you must be a member of the enterprise, and you must use an access token with the `repo` scope or `security_events` scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a [security manager](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization). + */ + "secret-scanning/list-alerts-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ + state?: components["parameters"]["secret-scanning-alert-state"]; + /** + * A comma-separated list of secret types to return. By default all secret types are returned. + * See "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)" + * for a complete list of secret types. + */ + secret_type?: components["parameters"]["secret-scanning-alert-secret-type"]; + /** A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. Valid resolutions are `false_positive`, `wont_fix`, `revoked`, `pattern_edited`, `pattern_deleted` or `used_in_tests`. */ + resolution?: components["parameters"]["secret-scanning-alert-resolution"]; + /** The property to sort the results by. `created` means when the alert was created. `updated` means when the alert was updated or resolved. */ + sort?: components["parameters"]["secret-scanning-alert-sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + before?: components["parameters"]["pagination-before"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + after?: components["parameters"]["pagination-after"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-secret-scanning-alert"][]; + }; + }; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * The authenticated user must be an enterprise admin. + */ + "billing/get-github-actions-billing-ghe": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the GitHub Advanced Security active committers for an enterprise per repository. + * + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the `total_advanced_security_committers` is not the sum of active_users for each repository. + * + * The total number of repositories with committer information is tracked by the `total_count` field. + */ + "billing/get-github-advanced-security-billing-ghe": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Success */ + 200: { + content: { + "application/json": components["schemas"]["advanced-security-active-committers"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + }; + }; + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * The authenticated user must be an enterprise admin. + */ + "billing/get-github-packages-billing-ghe": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["packages-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * The authenticated user must be an enterprise admin. + */ + "billing/get-shared-storage-billing-ghe": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-billing-usage"]; + }; + }; + }; + }; + /** We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. */ + "activity/list-public-events": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: + * + * * **Timeline**: The GitHub global public timeline + * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) + * * **Current user public**: The public timeline for the authenticated user + * * **Current user**: The private timeline for the authenticated user + * * **Current user actor**: The private timeline for activity created by the authenticated user + * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. + * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. + * + * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. + */ + "activity/get-feeds": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["feed"]; + }; + }; + }; + }; + /** Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: */ + "gists/list": { + parameters: { + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Allows you to add a new gist with one or more files. + * + * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. + */ + "gists/create": { + parameters: {}; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Description of the gist + * @example Example Ruby script + */ + description?: string; + /** + * @description Names and content for the files that make up the gist + * @example { + * "hello.rb": { + * "content": "puts \"Hello, World!\"" + * } + * } + */ + files: { + [key: string]: { + /** @description Content of the file */ + content: string; + }; + }; + public?: boolean | ("true" | "false"); + }; + }; + }; + }; + /** + * List public gists sorted by most recently updated to least recently updated. + * + * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. + */ + "gists/list-public": { + parameters: { + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** List the authenticated user's starred gists: */ + "gists/list-starred": { + parameters: { + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "gists/get": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden_gist"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/delete": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Allows you to update or delete a gist file and rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. */ + "gists/update": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Description of the gist + * @example Example Ruby script + */ + description?: string; + /** + * @description Names of files to be updated + * @example { + * "hello.rb": { + * "content": "blah", + * "filename": "goodbye.rb" + * } + * } + */ + files?: { [key: string]: Partial<{ [key: string]: unknown }> }; + } | null; + }; + }; + }; + "gists/list-comments": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["gist-comment"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/create-comment": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["gist-comment"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The comment text. + * @example Body of the attachment + */ + body: string; + }; + }; + }; + }; + "gists/get-comment": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-comment"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden_gist"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/delete-comment": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/update-comment": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The comment text. + * @example Body of the attachment + */ + body: string; + }; + }; + }; + }; + "gists/list-commits": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["gist-commit"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/list-forks": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["gist-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** **Note**: This was previously `/gists/:gist_id/fork`. */ + "gists/fork": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["base-gist"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "gists/check-is-starred": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response if gist is starred */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + /** Not Found if gist is not starred */ + 404: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + }; + }; + /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ + "gists/star": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/unstar": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/get-revision": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + sha: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). */ + "gitignore/get-all-templates": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * The API also allows fetching the source of a single template. + * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. + */ + "gitignore/get-template": { + parameters: { + path: { + name: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gitignore-template"]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * List repositories that an app installation can access. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/list-repos-accessible-to-installation": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["repository"][]; + /** @example selected */ + repository_selection?: string; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Revokes the installation token you're using to authenticate as an installation and access this endpoint. + * + * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/revoke-installation-access-token": { + parameters: {}; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List issues assigned to the authenticated user across all visible repositories including owned repositories, member + * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not + * necessarily assigned to you. + * + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list": { + parameters: { + query: { + /** Indicates which sorts of issues to return. `assigned` means issues assigned to you. `created` means issues created by you. `mentioned` means issues mentioning you. `subscribed` means issues you're subscribed to updates for. `all` or `repos` means all issues you can see, regardless of participation or creation. */ + filter?: + | "assigned" + | "created" + | "mentioned" + | "subscribed" + | "repos" + | "all"; + /** Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** A list of comma separated label names. Example: `bug,ui,@high` */ + labels?: components["parameters"]["labels"]; + /** What to sort results by. Can be either `created`, `updated`, `comments`. */ + sort?: "created" | "updated" | "comments"; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + collab?: boolean; + orgs?: boolean; + owned?: boolean; + pulls?: boolean; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "licenses/get-all-commonly-used": { + parameters: { + query: { + featured?: boolean; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["license-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + "licenses/get": { + parameters: { + path: { + license: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["license"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "markdown/render": { + parameters: {}; + responses: { + /** Response */ + 200: { + headers: { + "Content-Length"?: string; + }; + content: { + "text/html": string; + }; + }; + 304: components["responses"]["not_modified"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The Markdown text to render in HTML. */ + text: string; + /** + * @description The rendering mode. Can be either `markdown` or `gfm`. + * @default markdown + * @example markdown + * @enum {string} + */ + mode?: "markdown" | "gfm"; + /** @description The repository context to use when creating references in `gfm` mode. For example, setting `context` to `octo-org/octo-repo` will change the text `#42` into an HTML link to issue 42 in the `octo-org/octo-repo` repository. */ + context?: string; + }; + }; + }; + }; + /** You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. */ + "markdown/render-raw": { + parameters: {}; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "text/html": string; + }; + }; + 304: components["responses"]["not_modified"]; + }; + requestBody: { + content: { + "text/plain": string; + "text/x-markdown": string; + }; + }; + }; + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/get-subscription-plan-for-account": { + parameters: { + path: { + /** account_id parameter */ + account_id: components["parameters"]["account-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["marketplace-purchase"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + /** Not Found when the account has not purchased the listing */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-plans": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["marketplace-listing-plan"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-accounts-for-plan": { + parameters: { + path: { + /** The unique identifier of the plan. */ + plan_id: components["parameters"]["plan-id"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** To return the oldest accounts first, set to `asc`. Ignored without the `sort` parameter. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["marketplace-purchase"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/get-subscription-plan-for-account-stubbed": { + parameters: { + path: { + /** account_id parameter */ + account_id: components["parameters"]["account-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["marketplace-purchase"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + /** Not Found when the account has not purchased the listing */ + 404: unknown; + }; + }; + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-plans-stubbed": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["marketplace-listing-plan"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + }; + }; + /** + * Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-accounts-for-plan-stubbed": { + parameters: { + path: { + /** The unique identifier of the plan. */ + plan_id: components["parameters"]["plan-id"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** To return the oldest accounts first, set to `asc`. Ignored without the `sort` parameter. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["marketplace-purchase"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + }; + }; + /** + * Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://docs.github.com/articles/about-github-s-ip-addresses/)." + * + * **Note:** The IP addresses shown in the documentation's response are only example values. You must always query the API directly to get the latest list of IP addresses. + */ + "meta/get": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["api-overview"]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + "activity/list-public-events-for-repo-network": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** List all notifications for the current user, sorted by most recently updated. */ + "activity/list-notifications-for-authenticated-user": { + parameters: { + query: { + /** If `true`, show notifications marked as read. */ + all?: components["parameters"]["all"]; + /** If `true`, only shows notifications in which the user is directly participating or mentioned. */ + participating?: components["parameters"]["participating"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + before?: components["parameters"]["before"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["thread"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Marks all notifications as "read" removes it from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ + "activity/mark-notifications-as-read": { + parameters: {}; + responses: { + /** Response */ + 202: { + content: { + "application/json": { + message?: string; + }; + }; + }; + /** Reset Content */ + 205: unknown; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + requestBody: { + content: { + "application/json": { + /** + * Format: date-time + * @description Describes the last point that notifications were checked. Anything updated since this time will not be marked as read. If you omit this parameter, all notifications are marked as read. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. Default: The current timestamp. + */ + last_read_at?: string; + /** @description Whether the notification has been read. */ + read?: boolean; + }; + }; + }; + }; + "activity/get-thread": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["thread"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "activity/mark-thread-as-read": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Reset Content */ + 205: unknown; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). + * + * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. + */ + "activity/get-thread-subscription-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["thread-subscription"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. + * + * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. + * + * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. + */ + "activity/set-thread-subscription": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["thread-subscription"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to block all notifications from a thread. + * @default false + */ + ignored?: boolean; + }; + }; + }; + }; + /** Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. */ + "activity/delete-thread-subscription": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Get the octocat as ASCII art */ + "meta/get-octocat": { + parameters: { + query: { + /** The words to show in Octocat's speech bubble */ + s?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/octocat-stream": string; + }; + }; + }; + }; + /** + * Lists all organizations, in the order that they were created on GitHub. + * + * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of organizations. + */ + "orgs/list": { + parameters: { + query: { + /** An organization ID. Only return organizations with an ID greater than this ID. */ + since?: components["parameters"]["since-org"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["organization-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * List the custom repository roles available in this organization. In order to see custom + * repository roles in an organization, the authenticated user must be an organization owner. + * + * For more information on custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)". + */ + "orgs/list-custom-roles": { + parameters: { + path: { + organization_id: string; + }; + }; + responses: { + /** Response - list of custom role names */ + 200: { + content: { + "application/json": { + /** + * @description The number of custom roles in this organization + * @example 3 + */ + total_count?: number; + custom_roles?: components["schemas"]["organization-custom-repository-role"][]; + }; + }; + }; + }; + }; + /** + * To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://docs.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). + * + * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." + */ + "orgs/get": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-full"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). + * + * Enables an authenticated organization owner with the `admin:org` scope to update the organization's profile and member privileges. + */ + "orgs/update": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-full"]; + }; + }; + 409: components["responses"]["conflict"]; + /** Validation failed */ + 422: { + content: { + "application/json": + | components["schemas"]["validation-error"] + | components["schemas"]["validation-error-simple"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Billing email address. This address is not publicized. */ + billing_email?: string; + /** @description The company name. */ + company?: string; + /** @description The publicly visible email address. */ + email?: string; + /** @description The Twitter username of the company. */ + twitter_username?: string; + /** @description The location. */ + location?: string; + /** @description The shorthand name of the company. */ + name?: string; + /** @description The description of the company. */ + description?: string; + /** @description Whether an organization can use organization projects. */ + has_organization_projects?: boolean; + /** @description Whether repositories that belong to the organization can use repository projects. */ + has_repository_projects?: boolean; + /** + * @description Default permission level members have for organization repositories. + * @default read + * @enum {string} + */ + default_repository_permission?: "read" | "write" | "admin" | "none"; + /** + * @description Whether of non-admin organization members can create repositories. **Note:** A parameter can override this parameter. See `members_allowed_repository_creation_type` in this table for details. + * @default true + */ + members_can_create_repositories?: boolean; + /** @description Whether organization members can create internal repositories, which are visible to all enterprise members. You can only allow members to create internal repositories if your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see "[Restricting repository creation in your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. */ + members_can_create_internal_repositories?: boolean; + /** @description Whether organization members can create private repositories, which are visible to organization members with permission. For more information, see "[Restricting repository creation in your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. */ + members_can_create_private_repositories?: boolean; + /** @description Whether organization members can create public repositories, which are visible to anyone. For more information, see "[Restricting repository creation in your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. */ + members_can_create_public_repositories?: boolean; + /** + * @description Specifies which types of repositories non-admin organization members can create. `private` is only available to repositories that are part of an organization on GitHub Enterprise Cloud. + * **Note:** This parameter is deprecated and will be removed in the future. Its return value ignores internal repositories. Using this parameter overrides values set in `members_can_create_repositories`. See the parameter deprecation notice in the operation description for details. + * @enum {string} + */ + members_allowed_repository_creation_type?: "all" | "private" | "none"; + /** + * @description Whether organization members can create GitHub Pages sites. Existing published sites will not be impacted. + * @default true + */ + members_can_create_pages?: boolean; + /** + * @description Whether organization members can create public GitHub Pages sites. Existing published sites will not be impacted. + * @default true + */ + members_can_create_public_pages?: boolean; + /** + * @description Whether organization members can create private GitHub Pages sites. Existing published sites will not be impacted. + * @default true + */ + members_can_create_private_pages?: boolean; + /** + * @description Whether organization members can fork private organization repositories. + * @default false + */ + members_can_fork_private_repositories?: boolean; + /** @example "http://github.blog" */ + blog?: string; + }; + }; + }; + }; + /** + * Gets the total GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + "actions/get-actions-cache-usage-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["actions-cache-usage-org-enterprise"]; + }; + }; + }; + }; + /** + * Lists repositories and their GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + "actions/get-actions-cache-usage-by-repo-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + repository_cache_usages: components["schemas"]["actions-cache-usage-by-repository"][]; + }; + }; + }; + }; + }; + /** + * Gets the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * GitHub Apps must have the `organization_administration:write` permission to use this endpoint. + */ + "oidc/get-oidc-custom-sub-template-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** A JSON serialized template for OIDC subject claim customization */ + 200: { + content: { + "application/json": components["schemas"]["oidc-custom-sub"]; + }; + }; + }; + }; + /** + * Creates or updates the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `write:org` scope to use this endpoint. + * GitHub Apps must have the `admin:org` permission to use this endpoint. + */ + "oidc/update-oidc-custom-sub-template-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Empty response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": components["schemas"]["oidc-custom-sub"]; + }; + }; + }; + /** + * Gets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/get-github-actions-permissions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-organization-permissions"]; + }; + }; + }; + }; + /** + * Sets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * If the organization belongs to an enterprise that has set restrictive permissions at the enterprise level, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-github-actions-permissions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + enabled_repositories: components["schemas"]["enabled-repositories"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + }; + }; + }; + }; + /** + * Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/list-selected-repositories-enabled-github-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["repository"][]; + }; + }; + }; + }; + }; + /** + * Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-selected-repositories-enabled-github-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of repository IDs to enable for GitHub Actions. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** + * Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/enable-selected-repository-github-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/disable-selected-repository-github-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Gets the selected actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/get-allowed-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + }; + /** + * Sets the actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * If the organization belongs to an enterprise that has `selected` actions and reusable workflows set at the enterprise level, then you cannot override any of the enterprise's allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the organization must belong to an enterprise. If the organization does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories in the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-allowed-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/get-github-actions-default-workflow-permissions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-get-default-workflow-permissions"]; + }; + }; + }; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, and sets if GitHub Actions + * can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-github-actions-default-workflow-permissions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Success response */ + 204: never; + /** Conflict response when changing a setting is prevented by the owning enterprise */ + 409: unknown; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-set-default-workflow-permissions"]; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists all self-hosted runner groups configured in an organization and inherited from an enterprise. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-self-hosted-runner-groups-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Only return runner groups that are allowed to be used by this repository. */ + visible_to_repository?: components["parameters"]["visible-to-repository"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + runner_groups: components["schemas"]["runner-groups-org"][]; + }; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Creates a new self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/create-self-hosted-runner-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["runner-groups-org"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Name of the runner group. */ + name: string; + /** + * @description Visibility of a runner group. You can select all repositories, select individual repositories, or limit access to private repositories. + * @default all + * @enum {string} + */ + visibility?: "selected" | "all" | "private"; + /** @description List of repository IDs that can access the runner group. */ + selected_repository_ids?: number[]; + /** @description List of runner IDs to add to the runner group. */ + runners?: number[]; + /** + * @description Whether the runner group can be used by `public` repositories. + * @default false + */ + allows_public_repositories?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Gets a specific self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/get-self-hosted-runner-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-groups-org"]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Deletes a self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/delete-self-hosted-runner-group-from-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Updates the `name` and `visibility` of a self-hosted runner group in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/update-self-hosted-runner-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-groups-org"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Name of the runner group. */ + name: string; + /** + * @description Visibility of a runner group. You can select all repositories, select individual repositories, or all private repositories. + * @enum {string} + */ + visibility?: "selected" | "all" | "private"; + /** + * @description Whether the runner group can be used by `public` repositories. + * @default false + */ + allows_public_repositories?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists the repositories with access to a self-hosted runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-repo-access-to-self-hosted-runner-group-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Replaces the list of repositories that have access to a self-hosted runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/set-repo-access-to-self-hosted-runner-group-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of repository IDs that can access the runner group. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Adds a repository to the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` + * scope to use this endpoint. + */ + "actions/add-repo-access-to-self-hosted-runner-group-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Removes a repository from the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-repo-access-to-self-hosted-runner-group-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists self-hosted runners that are in a specific organization group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-self-hosted-runners-in-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Replaces the list of self-hosted runners that are part of an organization runner group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/set-self-hosted-runners-in-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of runner IDs to add to the runner group. */ + runners: number[]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Adds a self-hosted runner to a runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` + * scope to use this endpoint. + */ + "actions/add-self-hosted-runner-to-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Removes a self-hosted runner from a group configured in an organization. The runner is then returned to the default group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-self-hosted-runner-from-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all self-hosted runners configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-self-hosted-runners-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-runner-applications-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-application"][]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org --token TOKEN + * ``` + */ + "actions/create-registration-token-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + "actions/create-remove-token-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Gets a specific self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/get-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner"]; + }; + }; + }; + }; + /** + * Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/delete-self-hosted-runner-from-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all labels for a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-labels-for-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/set-custom-labels-for-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to set for the runner. You can pass an empty array to remove all custom labels. */ + labels: string[]; + }; + }; + }; + }; + /** + * Add custom labels to a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/add-custom-labels-to-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to add to the runner. */ + labels: string[]; + }; + }; + }; + }; + /** + * Remove all custom labels from a self-hosted runner configured in an + * organization. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-all-custom-labels-from-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels_readonly"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in an organization. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-custom-label-from-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + /** The name of a self-hosted runner's custom label. */ + name: components["parameters"]["runner-label-name"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/list-org-secrets": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["organization-actions-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/get-org-public-key": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-public-key"]; + }; + }; + }; + }; + /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/get-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-actions-secret"]; + }; + }; + }; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to + * use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "actions/create-or-update-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/actions#get-an-organization-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + /** + * @description Which type of organization repositories have access to the organization secret. `selected` means only the repositories specified by `selected_repository_ids` can access the secret. + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids?: (Partial & Partial)[]; + }; + }; + }; + }; + /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/delete-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/list-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + }; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/set-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/add-selected-repo-to-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** No Content when repository was added to the selected list */ + 204: never; + /** Conflict when visibility type is not set to selected */ + 409: unknown; + }; + }; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/remove-selected-repo-from-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** Response when repository was removed from the selected list */ + 204: never; + /** Conflict when visibility type not set to selected */ + 409: unknown; + }; + }; + /** + * Gets the audit log for an organization. For more information, see "[Reviewing the audit log for your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization)." + * + * This endpoint is available for organizations on GitHub Enterprise Cloud. To use this endpoint, you must be an organization owner, and you must use an access token with the `admin:org` scope. GitHub Apps must have the `organization_administration` read permission to use this endpoint. + * + * By default, the response includes up to 30 events from the past three months. Use the `phrase` parameter to filter results and retrieve older events. For example, use the `phrase` parameter with the `created` qualifier to filter events based on when the events occurred. For more information, see "[Reviewing the audit log for your organization](https://docs.github.com/organizations/keeping-your-organization-secure/managing-security-settings-for-your-organization/reviewing-the-audit-log-for-your-organization#searching-the-audit-log)." + * + * Use pagination to retrieve fewer or more than 30 events. For more information, see "[Resources in the REST API](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination)." + */ + "orgs/get-audit-log": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** A search phrase. For more information, see [Searching the audit log](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#searching-the-audit-log). */ + phrase?: components["parameters"]["audit-log-phrase"]; + /** + * The event types to include: + * + * - `web` - returns web (non-Git) events. + * - `git` - returns Git events. + * - `all` - returns both web and Git events. + * + * The default is `web`. + */ + include?: components["parameters"]["audit-log-include"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + after?: components["parameters"]["audit-log-after"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + before?: components["parameters"]["audit-log-before"]; + /** + * The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`. + * + * The default is `desc`. + */ + order?: components["parameters"]["audit-log-order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["audit-log-event"][]; + }; + }; + }; + }; + /** List the users blocked by an organization. */ + "orgs/list-blocked-users": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + "orgs/check-blocked-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** If the user is blocked: */ + 204: never; + /** If the user is not blocked: */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + "orgs/block-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + }; + "orgs/unblock-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists code scanning alerts for the default branch for all eligible repositories in an organization. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + "code-scanning/list-alerts-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + tool_name?: components["parameters"]["tool-name"]; + /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + tool_guid?: components["parameters"]["tool-guid"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + before?: components["parameters"]["pagination-before"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + after?: components["parameters"]["pagination-after"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** If specified, only code scanning alerts with this state will be returned. */ + state?: components["schemas"]["code-scanning-alert-state"]; + /** The property by which to sort the results. */ + sort?: "created" | "updated"; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["code-scanning-organization-alert-items"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists the codespaces associated to a specified organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/list-in-organization": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products). + * + * An authenticated organization owner with the `read:org` scope can list all credential authorizations for an organization that uses SAML single sign-on (SSO). The credentials are either personal access tokens or SSH keys that organization members have authorized for the organization. For more information, see [About authentication with SAML single sign-on](https://docs.github.com/en/articles/about-authentication-with-saml-single-sign-on). + */ + "orgs/list-saml-sso-authorizations": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page token */ + page?: number; + /** Limits the list of credentials authorizations for an organization to a specific login */ + login?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["credential-authorization"][]; + }; + }; + }; + }; + /** + * Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products). + * + * An authenticated organization owner with the `admin:org` scope can remove a credential authorization for an organization that uses SAML SSO. Once you remove someone's credential authorization, they will need to create a new personal access token or SSH key and authorize it for the organization they want to access. + */ + "orgs/remove-saml-sso-authorization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + credential_id: number; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/list-org-secrets": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["organization-dependabot-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/get-org-public-key": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-public-key"]; + }; + }; + }; + }; + /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/get-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-dependabot-secret"]; + }; + }; + }; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "dependabot/create-or-update-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/dependabot#get-an-organization-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + /** + * @description Which type of organization repositories have access to the organization secret. `selected` means only the repositories specified by `selected_repository_ids` can access the secret. + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/dependabot#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids?: (Partial & Partial)[]; + }; + }; + }; + }; + /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/delete-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/list-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + }; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/set-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/add-selected-repo-to-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** No Content when repository was added to the selected list */ + 204: never; + /** Conflict when visibility type is not set to selected */ + 409: unknown; + }; + }; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/remove-selected-repo-from-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** Response when repository was removed from the selected list */ + 204: never; + /** Conflict when visibility type not set to selected */ + 409: unknown; + }; + }; + "activity/list-public-org-events": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** + * Displays information about the specific group's usage. Provides a list of the group's external members as well as a list of teams that this group is connected to. + * + * You can manage team membership with your identity provider using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)" in the GitHub Help documentation. + */ + "teams/external-idp-group-info-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the group. */ + group_id: components["parameters"]["group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["external-group"]; + }; + }; + }; + }; + /** + * Lists external groups available in an organization. You can query the groups using the `display_name` parameter, only groups with a `group_name` containing the text provided in the `display_name` parameter will be returned. You can also limit your page results using the `per_page` parameter. GitHub generates a url-encoded `page` token using a cursor value for where the next page begins. For more information on cursor pagination, see "[Offset and Cursor Pagination explained](https://dev.to/jackmarchant/offset-and-cursor-pagination-explained-b89)." + * + * You can manage team membership with your identity provider using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)" in the GitHub Help documentation. + */ + "teams/list-external-idp-groups-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page token */ + page?: number; + /** Limits the list to groups containing the text in the group name */ + display_name?: string; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["external-groups"]; + }; + }; + }; + }; + /** The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. */ + "orgs/list-failed-invitations": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "orgs/list-webhooks": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["org-hook"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Here's how you can create a hook that posts payloads in JSON format: */ + "orgs/create-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["org-hook"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Must be passed as "web". */ + name: string; + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#create-hook-config-params). */ + config: { + url: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + /** @example "kdaigle" */ + username?: string; + /** @example "password" */ + password?: string; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + }; + }; + }; + }; + /** Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." */ + "orgs/get-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-hook"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "orgs/delete-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." */ + "orgs/update-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-hook"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#update-hook-config-params). */ + config?: { + url: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + /** @example "web" */ + name?: string; + }; + }; + }; + }; + /** + * Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. + */ + "orgs/get-webhook-config-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. + */ + "orgs/update-webhook-config-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + }; + }; + }; + /** Returns a list of webhook deliveries for a webhook configured in an organization. */ + "orgs/list-webhook-deliveries": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Used for pagination: the starting delivery from which the page of deliveries is fetched. Refer to the `link` header for the next and previous page cursors. */ + cursor?: components["parameters"]["cursor"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery-item"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Returns a delivery for a webhook configured in an organization. */ + "orgs/get-webhook-delivery": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery"]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Redeliver a delivery for a webhook configured in an organization. */ + "orgs/redeliver-webhook-delivery": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ + "orgs/ping-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Enables an authenticated GitHub App to find the organization's installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-org-installation": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + }; + }; + /** Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. */ + "orgs/list-app-installations": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + installations: components["schemas"]["installation"][]; + }; + }; + }; + }; + }; + /** Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. */ + "interactions/get-restrictions-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": Partial< + components["schemas"]["interaction-limit-response"] + > & + Partial<{ [key: string]: unknown }>; + }; + }; + }; + }; + /** Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. */ + "interactions/set-restrictions-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["interaction-limit-response"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": components["schemas"]["interaction-limit"]; + }; + }; + }; + /** Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. */ + "interactions/remove-restrictions-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. */ + "orgs/list-pending-invitations": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "orgs/create-invitation": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["organization-invitation"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description **Required unless you provide `email`**. GitHub user ID for the person you are inviting. */ + invitee_id?: number; + /** @description **Required unless you provide `invitee_id`**. Email address of the person you are inviting, which can be an existing GitHub user. */ + email?: string; + /** + * @description The role for the new member. + * \* `admin` - Organization owners with full administrative rights to the organization and complete access to all repositories and teams. + * \* `direct_member` - Non-owner organization members with ability to see other members and join teams by invitation. + * \* `billing_manager` - Non-owner organization members with ability to manage the billing settings of your organization. + * @default direct_member + * @enum {string} + */ + role?: "admin" | "direct_member" | "billing_manager"; + /** @description Specify IDs for the teams you want to invite new members to. */ + team_ids?: number[]; + }; + }; + }; + }; + /** + * Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). + */ + "orgs/cancel-invitation": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. */ + "orgs/list-invitation-teams": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List issues in an organization assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Indicates which sorts of issues to return. `assigned` means issues assigned to you. `created` means issues created by you. `mentioned` means issues mentioning you. `subscribed` means issues you're subscribed to updates for. `all` or `repos` means all issues you can see, regardless of participation or creation. */ + filter?: + | "assigned" + | "created" + | "mentioned" + | "subscribed" + | "repos" + | "all"; + /** Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** A list of comma separated label names. Example: `bug,ui,@high` */ + labels?: components["parameters"]["labels"]; + /** What to sort results by. Can be either `created`, `updated`, `comments`. */ + sort?: "created" | "updated" | "comments"; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. */ + "orgs/list-members": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Filter members returned in the list. `2fa_disabled` means that only members without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled will be returned. This options is only available for organization owners. */ + filter?: "2fa_disabled" | "all"; + /** Filter members returned by their role. */ + role?: "all" | "admin" | "member"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + /** Response if requester is not an organization member */ + 302: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Check if a user is, publicly or privately, a member of the organization. */ + "orgs/check-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response if requester is an organization member and user is a member */ + 204: never; + /** Response if requester is not an organization member */ + 302: never; + /** Not Found if requester is an organization member and user is not a member */ + 404: unknown; + }; + }; + /** Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. */ + "orgs/remove-member": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/delete-from-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/stop-in-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. */ + "orgs/get-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Only authenticated organization owners can add a member to the organization or update the member's role. + * + * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. + * + * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. + * + * **Rate limits** + * + * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. + */ + "orgs/set-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The role to give the user in the organization. Can be one of: + * \* `admin` - The user will become an owner of the organization. + * \* `member` - The user will become a non-owner member of the organization. + * @default member + * @enum {string} + */ + role?: "admin" | "member"; + }; + }; + }; + }; + /** + * In order to remove a user's membership with an organization, the authenticated user must be an organization owner. + * + * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. + */ + "orgs/remove-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the most recent migrations. */ + "migrations/list-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Exclude attributes from the API response to improve performance */ + exclude?: "repositories"[]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["migration"][]; + }; + }; + }; + }; + /** Initiates the generation of a migration archive. */ + "migrations/start-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A list of arrays indicating which repositories should be migrated. */ + repositories: string[]; + /** + * @description Indicates whether repositories should be locked (to prevent manipulation) while migrating data. + * @default false + * @example true + */ + lock_repositories?: boolean; + /** + * @description Indicates whether metadata should be excluded and only git source should be included for the migration. + * @default false + */ + exclude_metadata?: boolean; + /** + * @description Indicates whether the repository git data should be excluded from the migration. + * @default false + */ + exclude_git_data?: boolean; + /** + * @description Indicates whether attachments should be excluded from the migration (to reduce migration archive file size). + * @default false + * @example true + */ + exclude_attachments?: boolean; + /** + * @description Indicates whether releases should be excluded from the migration (to reduce migration archive file size). + * @default false + * @example true + */ + exclude_releases?: boolean; + /** + * @description Indicates whether projects owned by the organization or users should be excluded. from the migration. + * @default false + * @example true + */ + exclude_owner_projects?: boolean; + /** + * @description Indicates whether this should only include organization metadata (repositories array should be empty and will ignore other flags). + * @default false + * @example true + */ + org_metadata_only?: boolean; + /** @description Exclude related items from being returned in the response in order to improve performance of the request. The array can include any of: `"repositories"`. */ + exclude?: "repositories"[]; + }; + }; + }; + }; + /** + * Fetches the status of a migration. + * + * The `state` of a migration can be one of the following values: + * + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + "migrations/get-status-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + query: { + /** Exclude attributes from the API response to improve performance */ + exclude?: "repositories"[]; + }; + }; + responses: { + /** + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + 200: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Fetches the URL to a migration archive. */ + "migrations/download-archive-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** Response */ + 302: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Deletes a previous migration archive. Migration archives are automatically deleted after seven days. */ + "migrations/delete-archive-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/reference/repos#delete-a-repository) when the migration is complete and you no longer need the source data. */ + "migrations/unlock-repo-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + /** repo_name parameter */ + repo_name: components["parameters"]["repo-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** List all the repositories for this organization migration. */ + "migrations/list-repos-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** List all users who are outside collaborators of an organization. */ + "orgs/list-outside-collaborators": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Filter the list of outside collaborators. `2fa_disabled` means that only outside collaborators without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled will be returned. */ + filter?: "2fa_disabled" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://docs.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". Converting an organization member to an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." */ + "orgs/convert-member-to-outside-collaborator": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** User is getting converted asynchronously */ + 202: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** User was converted */ + 204: never; + /** Forbidden if user is the last owner of the organization, not a member of the organization, or if the enterprise enforces a policy for inviting outside collaborators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/en/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." */ + 403: unknown; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description When set to `true`, the request will be performed asynchronously. Returns a 202 status code when the job is successfully queued. + * @default false + */ + async?: boolean; + }; + }; + }; + }; + /** Removing a user from this list will remove them from all the organization's repositories. */ + "orgs/remove-outside-collaborator": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Unprocessable Entity if user is a member of the organization */ + 422: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + }; + }; + /** + * Lists all packages in an organization readable by the user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/list-packages-for-organization": { + parameters: { + query: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** The selected visibility of the packages. Only `container` package_types currently support `internal` visibility properly. For other ecosystems `internal` is synonymous with `private`. This parameter is optional and only filters an existing result set. */ + visibility?: components["parameters"]["package-visibility"]; + }; + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Gets a specific package in an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-for-organization": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"]; + }; + }; + }; + }; + /** + * Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + "packages/delete-package-for-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores an entire package in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + "packages/restore-package-for-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** package token */ + token?: string; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Returns all package versions for a package owned by an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-all-package-versions-for-package-owned-by-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The state of the package, either active or deleted. */ + state?: "active" | "deleted"; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a specific package version in an organization. + * + * You must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-version-for-organization": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"]; + }; + }; + }; + }; + /** + * Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + "packages/delete-package-version-for-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores a specific package version in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + "packages/restore-package-version-for-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/list-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Indicates the state of the projects to return. Can be either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project"][]; + }; + }; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Creates an organization project board. Returns a `410 Gone` status if projects are disabled in the organization or if the organization does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/create-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the project. */ + name: string; + /** @description The description of the project. */ + body?: string; + }; + }; + }; + }; + /** Members of an organization can choose to have their membership publicized or not. */ + "orgs/list-public-members": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + "orgs/check-public-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response if user is a public member */ + 204: never; + /** Not Found if user is not a public member */ + 404: unknown; + }; + }; + /** + * The user can publicize their own membership. (A user cannot publicize the membership for another user.) + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "orgs/set-public-membership-for-authenticated-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + }; + }; + "orgs/remove-public-membership-for-authenticated-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Lists repositories for the specified organization. */ + "repos/list-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Specifies the types of repositories you want returned. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `type` can also be `internal`. However, the `internal` value is not yet supported when a GitHub App calls this API with an installation access token. */ + type?: + | "all" + | "public" + | "private" + | "forks" + | "sources" + | "member" + | "internal"; + /** The property to sort the results by. */ + sort?: "created" | "updated" | "pushed" | "full_name"; + /** The order to sort by. Default: `asc` when using `full_name`, otherwise `desc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Creates a new repository in the specified organization. The authenticated user must be a member of the organization. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + "repos/create-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["repository"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the repository. */ + name: string; + /** @description A short description of the repository. */ + description?: string; + /** @description A URL with more information about the repository. */ + homepage?: string; + /** + * @description Whether the repository is private. + * @default false + */ + private?: boolean; + /** + * @description Can be `public` or `private`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `visibility` can also be `internal`. Note: For GitHub Enterprise Server and GitHub AE, this endpoint will only list repositories available to all users on the enterprise. For more information, see "[Creating an internal repository](https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/about-repository-visibility#about-internal-repositories)" in the GitHub Help documentation. + * @enum {string} + */ + visibility?: "public" | "private" | "internal"; + /** + * @description Either `true` to enable issues for this repository or `false` to disable them. + * @default true + */ + has_issues?: boolean; + /** + * @description Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error. + * @default true + */ + has_projects?: boolean; + /** + * @description Either `true` to enable the wiki for this repository or `false` to disable it. + * @default true + */ + has_wiki?: boolean; + /** + * @description Either `true` to make this repo available as a template repository or `false` to prevent it. + * @default false + */ + is_template?: boolean; + /** @description The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization. */ + team_id?: number; + /** + * @description Pass `true` to create an initial commit with empty README. + * @default false + */ + auto_init?: boolean; + /** @description Desired language or platform [.gitignore template](https://github.com/github/gitignore) to apply. Use the name of the template without the extension. For example, "Haskell". */ + gitignore_template?: string; + /** @description Choose an [open source license template](https://choosealicense.com/) that best suits your needs, and then use the [license keyword](https://docs.github.com/articles/licensing-a-repository/#searching-github-by-license-type) as the `license_template` string. For example, "mit" or "mpl-2.0". */ + license_template?: string; + /** + * @description Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. + * @default true + */ + allow_squash_merge?: boolean; + /** + * @description Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Either `true` to allow auto-merge on pull requests, or `false` to disallow auto-merge. + * @default false + */ + allow_auto_merge?: boolean; + /** + * @description Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description Either `true` to allow squash-merge commits to use pull request title, or `false` to use commit message. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + }; + }; + }; + }; + /** + * Lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/list-alerts-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ + state?: components["parameters"]["secret-scanning-alert-state"]; + /** + * A comma-separated list of secret types to return. By default all secret types are returned. + * See "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)" + * for a complete list of secret types. + */ + secret_type?: components["parameters"]["secret-scanning-alert-secret-type"]; + /** A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. Valid resolutions are `false_positive`, `wont_fix`, `revoked`, `pattern_edited`, `pattern_deleted` or `used_in_tests`. */ + resolution?: components["parameters"]["secret-scanning-alert-resolution"]; + /** The property to sort the results by. `created` means when the alert was created. `updated` means when the alert was updated or resolved. */ + sort?: components["parameters"]["secret-scanning-alert-sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. To receive an initial cursor on your first request, include an empty "before" query string. */ + before?: components["parameters"]["secret-scanning-pagination-before-org-repo"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. To receive an initial cursor on your first request, include an empty "after" query string. */ + after?: components["parameters"]["secret-scanning-pagination-after-org-repo"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-secret-scanning-alert"][]; + }; + }; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + "billing/get-github-actions-billing-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the GitHub Advanced Security active committers for an organization per repository. + * + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the `total_advanced_security_committers` is not the sum of advanced_security_committers for each repository. + * + * If this organization defers to an enterprise for billing, the `total_advanced_security_committers` returned from the organization API may include some users that are in more than one organization, so they will only consume a single Advanced Security seat at the enterprise level. + * + * The total number of repositories with committer information is tracked by the `total_count` field. + */ + "billing/get-github-advanced-security-billing-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Success */ + 200: { + content: { + "application/json": components["schemas"]["advanced-security-active-committers"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + }; + }; + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + "billing/get-github-packages-billing-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["packages-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + "billing/get-shared-storage-billing-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-billing-usage"]; + }; + }; + }; + }; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * List IdP groups available in an organization. You can limit your page results using the `per_page` parameter. GitHub generates a url-encoded `page` token using a cursor value for where the next page begins. For more information on cursor pagination, see "[Offset and Cursor Pagination explained](https://dev.to/jackmarchant/offset-and-cursor-pagination-explained-b89)." + */ + "teams/list-idp-groups-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page token */ + page?: string; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["group-mapping"]; + }; + }; + }; + }; + /** Lists all teams in an organization that are visible to the authenticated user. */ + "teams/list": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://docs.github.com/en/articles/setting-team-creation-permissions-in-your-organization)." + * + * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/about-teams)". + */ + "teams/create": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the team. */ + name: string; + /** @description The description of the team. */ + description?: string; + /** @description List GitHub IDs for organization members who will become team maintainers. */ + maintainers?: string[]; + /** @description The full name (e.g., "organization-name/repository-name") of repositories to add the team to. */ + repo_names?: string[]; + /** + * @description The level of privacy this team should have. The options are: + * **For a non-nested team:** + * \* `secret` - only visible to organization owners and members of this team. + * \* `closed` - visible to all members of this organization. + * Default: `secret` + * **For a parent or child team:** + * \* `closed` - visible to all members of this organization. + * Default for child team: `closed` + * @enum {string} + */ + privacy?: "secret" | "closed"; + /** + * @description **Deprecated**. The permission that new repositories will be added to the team with when none is specified. + * @default pull + * @enum {string} + */ + permission?: "pull" | "push"; + /** @description The ID of a team to set as the parent team. */ + parent_team_id?: number; + }; + }; + }; + }; + /** + * Gets a team using the team's `slug`. GitHub generates the `slug` from the team `name`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. + */ + "teams/get-by-name": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. + */ + "teams/delete-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. + */ + "teams/update-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the team. */ + name?: string; + /** @description The description of the team. */ + description?: string; + /** + * @description The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. When a team is nested, the `privacy` for parent teams cannot be `secret`. The options are: + * **For a non-nested team:** + * \* `secret` - only visible to organization owners and members of this team. + * \* `closed` - visible to all members of this organization. + * **For a parent or child team:** + * \* `closed` - visible to all members of this organization. + * @enum {string} + */ + privacy?: "secret" | "closed"; + /** + * @description **Deprecated**. The permission that new repositories will be added to the team with when none is specified. + * @default pull + * @enum {string} + */ + permission?: "pull" | "push" | "admin"; + /** @description The ID of a team to set as the parent team. */ + parent_team_id?: number | null; + }; + }; + }; + }; + /** + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. + */ + "teams/list-discussions-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Pinned discussions only filter */ + pinned?: string; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-discussion"][]; + }; + }; + }; + }; + /** + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. + */ + "teams/create-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title: string; + /** @description The discussion post's body text. */ + body: string; + /** + * @description Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post. + * @default false + */ + private?: boolean; + }; + }; + }; + }; + /** + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + "teams/get-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + "teams/delete-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + "teams/update-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title?: string; + /** @description The discussion post's body text. */ + body?: string; + }; + }; + }; + }; + /** + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + "teams/list-discussion-comments-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + query: { + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-discussion-comment"][]; + }; + }; + }; + }; + /** + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + "teams/create-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + }; + /** + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + "teams/get-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + "teams/delete-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + "teams/update-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + }; + /** + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + "reactions/list-for-team-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + "reactions/create-for-team-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response when the reaction type has already been added to this team discussion comment */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/delete-for-team-discussion-comment": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + "reactions/list-for-team-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + "reactions/create-for-team-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/delete-for-team-discussion": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists a connection between a team and an external group. + * + * You can manage team membership with your identity provider using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)" in the GitHub Help documentation. + */ + "teams/list-linked-external-idp-groups-to-team-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["external-groups"]; + }; + }; + }; + }; + /** + * Deletes a connection between a team and an external group. + * + * You can manage team membership with your IdP using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "teams/unlink-external-idp-group-from-team-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Creates a connection between a team and an external group. Only one external group can be linked to a team. + * + * You can manage team membership with your identity provider using Enterprise Managed Users for GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)" in the GitHub Help documentation. + */ + "teams/link-external-idp-group-to-team-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["external-group"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description External Group Id + * @example 1 + */ + group_id: number; + }; + }; + }; + }; + /** + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. + */ + "teams/list-pending-invitations-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + }; + }; + /** + * Team members will include the members of child teams. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + "teams/list-members-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** Filters members returned by their role in the team. */ + role?: "member" | "maintainer" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + "teams/get-membership-for-user-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + /** if user has no team membership */ + 404: unknown; + }; + }; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + "teams/add-or-update-membership-for-user-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + /** Forbidden if team synchronization is set up */ + 403: unknown; + /** Unprocessable Entity if you attempt to add an organization to a team */ + 422: unknown; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The role that this user should have in the team. + * @default member + * @enum {string} + */ + role?: "member" | "maintainer"; + }; + }; + }; + }; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + "teams/remove-membership-for-user-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Forbidden if team synchronization is set up */ + 403: unknown; + }; + }; + /** + * Lists the organization projects for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. + */ + "teams/list-projects-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-project"][]; + }; + }; + }; + }; + /** + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + "teams/check-permissions-for-project-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-project"]; + }; + }; + /** Not Found if project is not managed by this team */ + 404: unknown; + }; + }; + /** + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + "teams/add-or-update-project-permissions-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Forbidden if the project is not owned by the organization */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant to the team for this project. Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * @enum {string} + */ + permission?: "read" | "write" | "admin"; + } | null; + }; + }; + }; + /** + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + "teams/remove-project-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists a team's repositories visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. + */ + "teams/list-repos-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. + * + * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + "teams/check-permissions-for-repo-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Alternative response with repository permissions */ + 200: { + content: { + "application/json": components["schemas"]["team-repository"]; + }; + }; + /** Response if team has permission for the repository. This is the response when the repository media type hasn't been provded in the Accept header. */ + 204: never; + /** Not Found if team does not have permission for the repository */ + 404: unknown; + }; + }; + /** + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + * + * For more information about the permission levels, see "[Repository permission levels for an organization](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". + */ + "teams/add-or-update-repo-permissions-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant the team on this repository. In addition to the enumerated values, you can also specify a custom repository role name, if the owning organization has defined any. If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository. + * @default push + * @enum {string} + */ + permission?: "pull" | "push" | "admin" | "maintain" | "triage"; + }; + }; + }; + }; + /** + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + "teams/remove-repo-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * List IdP groups connected to a team on GitHub. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`. + */ + "teams/list-idp-groups-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["group-mapping"]; + }; + }; + }; + }; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`. + */ + "teams/create-or-update-idp-group-connections-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["group-mapping"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The IdP groups you want to connect to a GitHub team. When updating, the new `groups` object will replace the original one. You must include any existing groups that you don't want to remove. */ + groups?: { + /** @description ID of the IdP group. */ + group_id: string; + /** @description Name of the IdP group. */ + group_name: string; + /** @description Description of the IdP group. */ + group_description: string; + }[]; + }; + }; + }; + }; + /** + * Lists the child teams of the team specified by `{team_slug}`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. + */ + "teams/list-child-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** if child teams exist */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + }; + }; + "projects/get-card": { + parameters: { + path: { + /** The unique identifier of the card. */ + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-card"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "projects/delete-card": { + parameters: { + path: { + /** The unique identifier of the card. */ + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: string[]; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "projects/update-card": { + parameters: { + path: { + /** The unique identifier of the card. */ + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-card"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The project card's note + * @example Update all gems + */ + note?: string | null; + /** + * @description Whether or not the card is archived + * @example false + */ + archived?: boolean; + }; + }; + }; + }; + "projects/move-card": { + parameters: { + path: { + /** The unique identifier of the card. */ + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: { + code?: string; + message?: string; + resource?: string; + field?: string; + }[]; + }; + }; + }; + 422: components["responses"]["validation_failed"]; + /** Response */ + 503: { + content: { + "application/json": { + code?: string; + message?: string; + documentation_url?: string; + errors?: { + code?: string; + message?: string; + }[]; + }; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The position of the card in a column. Can be one of: `top`, `bottom`, or `after:` to place after the specified card. + * @example bottom + */ + position: string; + /** + * @description The unique identifier of the column the card should be moved to + * @example 42 + */ + column_id?: number; + }; + }; + }; + }; + "projects/get-column": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-column"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "projects/delete-column": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "projects/update-column": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-column"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project column + * @example Remaining tasks + */ + name: string; + }; + }; + }; + }; + "projects/list-cards": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + query: { + /** Filters the project cards that are returned by the card's state. */ + archived_state?: "all" | "archived" | "not_archived"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project-card"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "projects/create-card": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project-card"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** Validation failed */ + 422: { + content: { + "application/json": + | components["schemas"]["validation-error"] + | components["schemas"]["validation-error-simple"]; + }; + }; + /** Response */ + 503: { + content: { + "application/json": { + code?: string; + message?: string; + documentation_url?: string; + errors?: { + code?: string; + message?: string; + }[]; + }; + }; + }; + }; + requestBody: { + content: { + "application/json": + | { + /** + * @description The project card's note + * @example Update all gems + */ + note: string | null; + } + | { + /** + * @description The unique identifier of the content associated with the card + * @example 42 + */ + content_id: number; + /** + * @description The piece of content associated with the card + * @example PullRequest + */ + content_type: string; + }; + }; + }; + }; + "projects/move-column": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The position of the column in a project. Can be one of: `first`, `last`, or `after:` to place after the specified column. + * @example last + */ + position: string; + }; + }; + }; + }; + /** Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/get": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Deletes a project board. Returns a `404 Not Found` status if projects are disabled. */ + "projects/delete": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Delete Success */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: string[]; + }; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/update": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: string[]; + }; + }; + }; + /** Not Found if the authenticated user does not have access to the project */ + 404: unknown; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project + * @example Week One Sprint + */ + name?: string; + /** + * @description Body of the project + * @example This project represents the sprint of the first week in January + */ + body?: string | null; + /** + * @description State of the project; either 'open' or 'closed' + * @example open + */ + state?: string; + /** + * @description The baseline permission that all organization members have on this project + * @enum {string} + */ + organization_permission?: "read" | "write" | "admin" | "none"; + /** @description Whether or not this project can be seen by everyone. */ + private?: boolean; + }; + }; + }; + }; + /** Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. */ + "projects/list-collaborators": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + query: { + /** Filters the collaborators by their affiliation. `outside` means outside collaborators of a project that are not a member of the project's organization. `direct` means collaborators with permissions to a project, regardless of organization membership status. `all` means all collaborators the authenticated user can see. */ + affiliation?: "outside" | "direct" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. */ + "projects/add-collaborator": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant the collaborator. + * @default write + * @example write + * @enum {string} + */ + permission?: "read" | "write" | "admin"; + } | null; + }; + }; + }; + /** Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. */ + "projects/remove-collaborator": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. */ + "projects/get-permission-for-user": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-collaborator-permission"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "projects/list-columns": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project-column"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "projects/create-column": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project-column"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project column + * @example Remaining tasks + */ + name: string; + }; + }; + }; + }; + /** + * **Note:** Accessing this endpoint does not count against your REST API rate limit. + * + * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. + */ + "rate-limit/get": { + parameters: {}; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["rate-limit-overview"]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + }; + }; + /** The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. */ + "repos/get": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["full-repository"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. + * + * If an organization owner has configured the organization to prevent members from deleting organization-owned + * repositories, you will get a `403 Forbidden` response. + */ + "repos/delete": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 307: components["responses"]["temporary_redirect"]; + /** If an organization owner has configured the organization to prevent members from deleting organization-owned repositories, a member will get this response: */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. */ + "repos/update": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["full-repository"]; + }; + }; + 307: components["responses"]["temporary_redirect"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the repository. */ + name?: string; + /** @description A short description of the repository. */ + description?: string; + /** @description A URL with more information about the repository. */ + homepage?: string; + /** + * @description Either `true` to make the repository private or `false` to make it public. Default: `false`. + * **Note**: You will get a `422` error if the organization restricts [changing repository visibility](https://docs.github.com/articles/repository-permission-levels-for-an-organization#changing-the-visibility-of-repositories) to organization owners and a non-owner tries to change the value of private. **Note**: You will get a `422` error if the organization restricts [changing repository visibility](https://docs.github.com/articles/repository-permission-levels-for-an-organization#changing-the-visibility-of-repositories) to organization owners and a non-owner tries to change the value of private. + * @default false + */ + private?: boolean; + /** + * @description Can be `public` or `private`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `visibility` can also be `internal`." + * @enum {string} + */ + visibility?: "public" | "private" | "internal"; + /** @description Specify which security and analysis features to enable or disable. For example, to enable GitHub Advanced Security, use this data in the body of the PATCH request: `{"security_and_analysis": {"advanced_security": {"status": "enabled"}}}`. If you have admin permissions for a private repository covered by an Advanced Security license, you can check which security and analysis features are currently enabled by using a `GET /repos/{owner}/{repo}` request. */ + security_and_analysis?: { + /** @description Use the `status` property to enable or disable GitHub Advanced Security for this repository. For more information, see "[About GitHub Advanced Security](/github/getting-started-with-github/learning-about-github/about-github-advanced-security)." */ + advanced_security?: { + /** @description Can be `enabled` or `disabled`. */ + status?: string; + }; + /** @description Use the `status` property to enable or disable secret scanning for this repository. For more information, see "[About secret scanning](/code-security/secret-security/about-secret-scanning)." */ + secret_scanning?: { + /** @description Can be `enabled` or `disabled`. */ + status?: string; + }; + /** @description Use the `status` property to enable or disable secret scanning push protection for this repository. For more information, see "[Protecting pushes with secret scanning](/code-security/secret-scanning/protecting-pushes-with-secret-scanning)." */ + secret_scanning_push_protection?: { + /** @description Can be `enabled` or `disabled`. */ + status?: string; + }; + } | null; + /** + * @description Either `true` to enable issues for this repository or `false` to disable them. + * @default true + */ + has_issues?: boolean; + /** + * @description Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error. + * @default true + */ + has_projects?: boolean; + /** + * @description Either `true` to enable the wiki for this repository or `false` to disable it. + * @default true + */ + has_wiki?: boolean; + /** + * @description Either `true` to make this repo available as a template repository or `false` to prevent it. + * @default false + */ + is_template?: boolean; + /** @description Updates the default branch for this repository. */ + default_branch?: string; + /** + * @description Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. + * @default true + */ + allow_squash_merge?: boolean; + /** + * @description Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Either `true` to allow auto-merge on pull requests, or `false` to disallow auto-merge. + * @default false + */ + allow_auto_merge?: boolean; + /** + * @description Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description Either `true` to always allow a pull request head branch that is behind its base branch to be updated even if it is not required to be up to date before merging, or false otherwise. + * @default false + */ + allow_update_branch?: boolean; + /** + * @description Either `true` to allow squash-merge commits to use pull request title, or `false` to use commit message. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description `true` to archive this repository. **Note**: You cannot unarchive repositories through the API. + * @default false + */ + archived?: boolean; + /** + * @description Either `true` to allow private forks, or `false` to prevent private forks. + * @default false + */ + allow_forking?: boolean; + }; + }; + }; + }; + /** Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/list-artifacts-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + artifacts: components["schemas"]["artifact"][]; + }; + }; + }; + }; + }; + /** Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-artifact": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the artifact. */ + artifact_id: components["parameters"]["artifact-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["artifact"]; + }; + }; + }; + }; + /** Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/delete-artifact": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the artifact. */ + artifact_id: components["parameters"]["artifact-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in + * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/download-artifact": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the artifact. */ + artifact_id: components["parameters"]["artifact-id"]; + archive_format: string; + }; + }; + responses: { + /** Response */ + 302: never; + 410: components["responses"]["gone"]; + }; + }; + /** + * Gets GitHub Actions cache usage for a repository. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-actions-cache-usage": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-cache-usage-by-repository"]; + }; + }; + }; + }; + /** + * Lists the GitHub Actions caches for a repository. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-actions-cache-list": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["parameters"]["git-ref"]; + /** An explicit key or prefix for identifying the cache */ + key?: components["parameters"]["actions-cache-key"]; + /** The property to sort the results by. `created_at` means when the cache was created. `last_accessed_at` means when the cache was last accessed. `size_in_bytes` is the size of the cache in bytes. */ + sort?: components["parameters"]["actions-cache-list-sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["actions-cache-list"]; + }; + }; + }; + }; + /** + * Deletes one or more GitHub Actions caches for a repository, using a complete cache key. By default, all caches that match the provided key are deleted, but you can optionally provide a Git ref to restrict deletions to caches that match both the provided key and the Git ref. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/delete-actions-cache-by-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** A key for identifying the cache. */ + key: components["parameters"]["actions-cache-key-required"]; + /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["parameters"]["git-ref"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-cache-list"]; + }; + }; + }; + }; + /** + * Deletes a GitHub Actions cache for a repository, using a cache ID. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/delete-actions-cache-by-id": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the GitHub Actions cache. */ + cache_id: components["parameters"]["cache-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-job-for-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the job. */ + job_id: components["parameters"]["job-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["job"]; + }; + }; + }; + }; + /** + * Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look + * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can + * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must + * have the `actions:read` permission to use this endpoint. + */ + "actions/download-job-logs-for-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the job. */ + job_id: components["parameters"]["job-id"]; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + /** Re-run a job and its dependent jobs in a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/re-run-job-for-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the job. */ + job_id: components["parameters"]["job-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to enable debug logging for the re-run. + * @default false + */ + enable_debug_logging?: boolean; + } | null; + }; + }; + }; + /** + * Gets the `opt-out` flag of a GitHub Actions OpenID Connect (OIDC) subject claim customization for a repository. + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. GitHub Apps must have the `organization_administration:read` permission to use this endpoint. + */ + "actions/get-custom-oidc-sub-claim-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Status response */ + 200: { + content: { + "application/json": components["schemas"]["opt-out-oidc-custom-sub"]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Sets the `opt-out` flag of a GitHub Actions OpenID Connect (OIDC) subject claim customization for a repository. + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/set-custom-oidc-sub-claim-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Empty response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": components["schemas"]["opt-out-oidc-custom-sub"]; + }; + }; + }; + /** + * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/get-github-actions-permissions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-repository-permissions"]; + }; + }; + }; + }; + /** + * Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions and reusable workflows in the repository. + * + * If the repository belongs to an organization or enterprise that has set restrictive permissions at the organization or enterprise levels, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/set-github-actions-permissions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + enabled: components["schemas"]["actions-enabled"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + }; + }; + }; + }; + /** + * Gets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + "actions/get-workflow-access-to-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-workflow-access-to-repository"]; + }; + }; + }; + }; + /** + * Sets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + "actions/set-workflow-access-to-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-workflow-access-to-repository"]; + }; + }; + }; + /** + * Gets the settings for selected actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/get-allowed-actions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + }; + /** + * Sets the actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * If the repository belongs to an organization or enterprise that has `selected` actions and reusable workflows set at the organization or enterprise levels, then you cannot override any of the allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the repository must belong to an enterprise. If the repository does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/set-allowed-actions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, + * as well as if GitHub Actions can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + "actions/get-github-actions-default-workflow-permissions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-get-default-workflow-permissions"]; + }; + }; + }; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, and sets if GitHub Actions + * can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + "actions/set-github-actions-default-workflow-permissions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Success response */ + 204: never; + /** Conflict response when changing a setting is prevented by the owning organization or enterprise */ + 409: unknown; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-set-default-workflow-permissions"]; + }; + }; + }; + /** Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. */ + "actions/list-self-hosted-runners-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + "actions/list-runner-applications-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-application"][]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate + * using an access token with the `repo` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN + * ``` + */ + "actions/create-registration-token-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + "actions/create-remove-token-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Gets a specific self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/get-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner"]; + }; + }; + }; + }; + /** + * Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `repo` + * scope to use this endpoint. + */ + "actions/delete-self-hosted-runner-from-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all labels for a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/list-labels-for-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/set-custom-labels-for-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to set for the runner. You can pass an empty array to remove all custom labels. */ + labels: string[]; + }; + }; + }; + }; + /** + * Add custom labels to a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/add-custom-labels-to-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to add to the runner. */ + labels: string[]; + }; + }; + }; + }; + /** + * Remove all custom labels from a self-hosted runner configured in a + * repository. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/remove-all-custom-labels-from-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels_readonly"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in a repository. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/remove-custom-label-from-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + /** The name of a self-hosted runner's custom label. */ + name: components["parameters"]["runner-label-name"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/list-workflow-runs-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ + actor?: components["parameters"]["actor"]; + /** Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ + branch?: components["parameters"]["workflow-run-branch"]; + /** Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://docs.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ + event?: components["parameters"]["event"]; + /** Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. For a list of the possible `status` and `conclusion` options, see "[Create a check run](https://docs.github.com/rest/reference/checks#create-a-check-run)." */ + status?: components["parameters"]["workflow-run-status"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Returns workflow runs created within the given date-time range. For more information on the syntax, see "[Understanding the search syntax](https://docs.github.com/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates)." */ + created?: components["parameters"]["created"]; + /** If `true` pull requests are omitted from the response (empty array). */ + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + /** Returns workflow runs with the `check_suite_id` that you specify. */ + check_suite_id?: components["parameters"]["workflow-run-check-suite-id"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + workflow_runs: components["schemas"]["workflow-run"][]; + }; + }; + }; + }; + }; + /** Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + query: { + /** If `true` pull requests are omitted from the response (empty array). */ + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-run"]; + }; + }; + }; + }; + /** + * Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is + * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use + * this endpoint. + */ + "actions/delete-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-reviews-for-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["environment-approvals"][]; + }; + }; + }; + }; + /** + * Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/approve-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/list-workflow-run-artifacts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + artifacts: components["schemas"]["artifact"][]; + }; + }; + }; + }; + }; + /** + * Gets a specific workflow run attempt. Anyone with read access to the repository + * can use this endpoint. If the repository is private you must use an access token + * with the `repo` scope. GitHub Apps must have the `actions:read` permission to + * use this endpoint. + */ + "actions/get-workflow-run-attempt": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + /** The attempt number of the workflow run. */ + attempt_number: components["parameters"]["attempt-number"]; + }; + query: { + /** If `true` pull requests are omitted from the response (empty array). */ + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-run"]; + }; + }; + }; + }; + /** Lists jobs for a specific workflow run attempt. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ + "actions/list-jobs-for-workflow-run-attempt": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + /** The attempt number of the workflow run. */ + attempt_number: components["parameters"]["attempt-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + jobs: components["schemas"]["job"][]; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a redirect URL to download an archive of log files for a specific workflow run attempt. This link expires after + * 1 minute. Look for `Location:` in the response header to find the URL for the download. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/download-workflow-run-attempt-logs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + /** The attempt number of the workflow run. */ + attempt_number: components["parameters"]["attempt-number"]; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + /** Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/cancel-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + 409: components["responses"]["conflict"]; + }; + }; + /** Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ + "actions/list-jobs-for-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + query: { + /** Filters jobs by their `completed_at` timestamp. `latest` returns jobs from the most recent execution of the workflow run. `all` returns all jobs for a workflow run, including from old executions of the workflow run. */ + filter?: "latest" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + jobs: components["schemas"]["job"][]; + }; + }; + }; + }; + }; + /** + * Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for + * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use + * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have + * the `actions:read` permission to use this endpoint. + */ + "actions/download-workflow-run-logs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + /** Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/delete-workflow-run-logs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get all deployment environments for a workflow run that are waiting for protection rules to pass. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-pending-deployments-for-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pending-deployment"][]; + }; + }; + }; + }; + /** + * Approve or reject pending deployments that are waiting on approval by a required reviewer. + * + * Required reviewers with read access to the repository contents and deployments can use this endpoint. Required reviewers must authenticate using an access token with the `repo` scope to use this endpoint. + */ + "actions/review-pending-deployments-for-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment"][]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The list of environment ids to approve or reject + * @example [ + * 161171787, + * 161171795 + * ] + */ + environment_ids: number[]; + /** + * @description Whether to approve or reject deployment to the specified environments. + * @example approved + * @enum {string} + */ + state: "approved" | "rejected"; + /** + * @description A comment to accompany the deployment review + * @example Ship it! + */ + comment: string; + }; + }; + }; + }; + /** Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/re-run-workflow": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to enable debug logging for the re-run. + * @default false + */ + enable_debug_logging?: boolean; + } | null; + }; + }; + }; + /** Re-run all of the failed jobs and their dependent jobs in a workflow run using the `id` of the workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. */ + "actions/re-run-workflow-failed-jobs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to enable debug logging for the re-run. + * @default false + */ + enable_debug_logging?: boolean; + } | null; + }; + }; + }; + /** + * Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-workflow-run-usage": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-run-usage"]; + }; + }; + }; + }; + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/list-repo-secrets": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["actions-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/get-repo-public-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-public-key"]; + }; + }; + }; + }; + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/get-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-secret"]; + }; + }; + }; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "actions/create-or-update-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/actions#get-a-repository-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + }; + }; + }; + }; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/delete-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/list-repo-workflows": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + workflows: components["schemas"]["workflow"][]; + }; + }; + }; + }; + }; + /** Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-workflow": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow"]; + }; + }; + }; + }; + /** + * Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/disable-workflow": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)." + */ + "actions/create-workflow-dispatch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description The git reference for the workflow. The reference can be a branch or tag name. */ + ref: string; + /** @description Input keys and values configured in the workflow file. The maximum number of properties is 10. Any default properties configured in the workflow file will be used when `inputs` are omitted. */ + inputs?: { [key: string]: string }; + }; + }; + }; + }; + /** + * Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/enable-workflow": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + */ + "actions/list-workflow-runs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + query: { + /** Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ + actor?: components["parameters"]["actor"]; + /** Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ + branch?: components["parameters"]["workflow-run-branch"]; + /** Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://docs.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ + event?: components["parameters"]["event"]; + /** Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. For a list of the possible `status` and `conclusion` options, see "[Create a check run](https://docs.github.com/rest/reference/checks#create-a-check-run)." */ + status?: components["parameters"]["workflow-run-status"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Returns workflow runs created within the given date-time range. For more information on the syntax, see "[Understanding the search syntax](https://docs.github.com/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates)." */ + created?: components["parameters"]["created"]; + /** If `true` pull requests are omitted from the response (empty array). */ + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + /** Returns workflow runs with the `check_suite_id` that you specify. */ + check_suite_id?: components["parameters"]["workflow-run-check-suite-id"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + workflow_runs: components["schemas"]["workflow-run"][]; + }; + }; + }; + }; + }; + /** + * Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-workflow-usage": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-usage"]; + }; + }; + }; + }; + /** Lists the [available assignees](https://docs.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. */ + "issues/list-assignees": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Checks if a user has permission to be assigned to an issue in this repository. + * + * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + "issues/check-user-can-be-assigned": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + assignee: string; + }; + }; + responses: { + /** If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. */ + 204: never; + /** Otherwise a `404` status code is returned. */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * This returns a list of autolinks configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + "repos/list-autolinks": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["autolink"][]; + }; + }; + }; + }; + /** Users with admin access to the repository can create an autolink. */ + "repos/create-autolink": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["autolink"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The prefix appended by alphanumeric characters will generate a link any time it is found in an issue, pull request, or commit. */ + key_prefix: string; + /** @description The URL must contain `` for the reference number. `` matches alphanumeric characters `A-Z` (case insensitive), `0-9`, and `-`. */ + url_template: string; + }; + }; + }; + }; + /** + * This returns a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + "repos/get-autolink": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the autolink. */ + autolink_id: components["parameters"]["autolink-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["autolink"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * This deletes a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + "repos/delete-autolink": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the autolink. */ + autolink_id: components["parameters"]["autolink-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". */ + "repos/enable-automated-security-fixes": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". */ + "repos/disable-automated-security-fixes": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "repos/list-branches": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Setting to `true` returns only protected branches. When set to `false`, only unprotected branches are returned. Omitting this parameter returns all branches. */ + protected?: boolean; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["short-branch"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "repos/get-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-with-protection"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-protection"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Protecting a branch requires admin or owner permissions to the repository. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + * + * **Note**: The list of users, apps, and teams in total is limited to 100 items. + */ + "repos/update-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Require status checks to pass before merging. Set to `null` to disable. */ + required_status_checks: { + /** @description Require branches to be up to date before merging. */ + strict: boolean; + /** + * @deprecated + * @description **Deprecated**: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use `checks` instead of `contexts` for more fine-grained control. + */ + contexts: string[]; + /** @description The list of status checks to require in order to merge into this branch. */ + checks?: { + /** @description The name of the required check */ + context: string; + /** @description The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status. */ + app_id?: number; + }[]; + } | null; + /** @description Enforce all configured restrictions for administrators. Set to `true` to enforce required status checks for repository administrators. Set to `null` to disable. */ + enforce_admins: boolean | null; + /** @description Require at least one approving review on a pull request, before merging. Set to `null` to disable. */ + required_pull_request_reviews: { + /** @description Specify which users, teams, and apps can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories. */ + dismissal_restrictions?: { + /** @description The list of user `login`s with dismissal access */ + users?: string[]; + /** @description The list of team `slug`s with dismissal access */ + teams?: string[]; + /** @description The list of app `slug`s with dismissal access */ + apps?: string[]; + }; + /** @description Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit. */ + dismiss_stale_reviews?: boolean; + /** @description Blocks merging pull requests until [code owners](https://docs.github.com/articles/about-code-owners/) review them. */ + require_code_owner_reviews?: boolean; + /** @description Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. */ + required_approving_review_count?: number; + /** @description Allow specific users, teams, or apps to bypass pull request requirements. */ + bypass_pull_request_allowances?: { + /** @description The list of user `login`s allowed to bypass pull request requirements. */ + users?: string[]; + /** @description The list of team `slug`s allowed to bypass pull request requirements. */ + teams?: string[]; + /** @description The list of app `slug`s allowed to bypass pull request requirements. */ + apps?: string[]; + }; + } | null; + /** @description Restrict who can push to the protected branch. User, app, and team `restrictions` are only available for organization-owned repositories. Set to `null` to disable. */ + restrictions: { + /** @description The list of user `login`s with push access */ + users: string[]; + /** @description The list of team `slug`s with push access */ + teams: string[]; + /** @description The list of app `slug`s with push access */ + apps?: string[]; + } | null; + /** @description Enforces a linear commit Git history, which prevents anyone from pushing merge commits to a branch. Set to `true` to enforce a linear commit history. Set to `false` to disable a linear commit Git history. Your repository must allow squash merging or rebase merging before you can enable a linear commit history. Default: `false`. For more information, see "[Requiring a linear commit history](https://docs.github.com/github/administering-a-repository/requiring-a-linear-commit-history)" in the GitHub Help documentation. */ + required_linear_history?: boolean; + /** @description Permits force pushes to the protected branch by anyone with write access to the repository. Set to `true` to allow force pushes. Set to `false` or `null` to block force pushes. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://docs.github.com/en/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation." */ + allow_force_pushes?: boolean | null; + /** @description Allows deletion of the protected branch by anyone with write access to the repository. Set to `false` to prevent deletion of the protected branch. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://docs.github.com/en/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation. */ + allow_deletions?: boolean; + /** @description If set to `true`, the `restrictions` branch protection settings which limits who can push will also block pushes which create new branches, unless the push is initiated by a user, team, or app which has the ability to push. Set to `true` to restrict new branch creation. Default: `false`. */ + block_creations?: boolean; + /** @description Requires all conversations on code to be resolved before a pull request can be merged into a branch that matches this rule. Set to `false` to disable. Default: `false`. */ + required_conversation_resolution?: boolean; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/delete-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-admin-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + "repos/set-admin-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + "repos/delete-admin-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-pull-request-review-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-pull-request-review"]; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/delete-pull-request-review-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + */ + "repos/update-pull-request-review-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-pull-request-review"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Specify which users, teams, and apps can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories. */ + dismissal_restrictions?: { + /** @description The list of user `login`s with dismissal access */ + users?: string[]; + /** @description The list of team `slug`s with dismissal access */ + teams?: string[]; + /** @description The list of app `slug`s with dismissal access */ + apps?: string[]; + }; + /** @description Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit. */ + dismiss_stale_reviews?: boolean; + /** @description Blocks merging pull requests until [code owners](https://docs.github.com/articles/about-code-owners/) have reviewed. */ + require_code_owner_reviews?: boolean; + /** @description Specifies the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. */ + required_approving_review_count?: number; + /** @description Allow specific users, teams, or apps to bypass pull request requirements. */ + bypass_pull_request_allowances?: { + /** @description The list of user `login`s allowed to bypass pull request requirements. */ + users?: string[]; + /** @description The list of team `slug`s allowed to bypass pull request requirements. */ + teams?: string[]; + /** @description The list of app `slug`s allowed to bypass pull request requirements. */ + apps?: string[]; + }; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://docs.github.com/articles/signing-commits-with-gpg) in GitHub Help. + * + * **Note**: You must enable branch protection to require signed commits. + */ + "repos/get-commit-signature-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. + */ + "repos/create-commit-signature-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. + */ + "repos/delete-commit-signature-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-status-checks-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["status-check-policy"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/remove-status-check-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + */ + "repos/update-status-check-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["status-check-policy"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Require branches to be up to date before merging. */ + strict?: boolean; + /** + * @deprecated + * @description **Deprecated**: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use `checks` instead of `contexts` for more fine-grained control. + */ + contexts?: string[]; + /** @description The list of status checks to require in order to merge into this branch. */ + checks?: { + /** @description The name of the required check */ + context: string; + /** @description The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status. */ + app_id?: number; + }[]; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-all-status-check-contexts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/set-status-check-contexts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description contexts parameter */ + contexts: string[]; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/add-status-check-contexts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description contexts parameter */ + contexts: string[]; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/remove-status-check-contexts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description contexts parameter */ + contexts: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists who has access to this protected branch. + * + * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. + */ + "repos/get-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-restriction-policy"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Disables the ability to restrict who can push to this branch. + */ + "repos/delete-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + "repos/get-apps-with-access-to-protected-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/set-app-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description apps parameter */ + apps: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/add-app-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description apps parameter */ + apps: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/remove-app-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description apps parameter */ + apps: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the teams who have push access to this branch. The list includes child teams. + */ + "repos/get-teams-with-access-to-protected-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/set-team-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description teams parameter */ + teams: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified teams push access for this branch. You can also give push access to child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/add-team-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description teams parameter */ + teams: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a team to push to this branch. You can also remove push access for child teams. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Teams that should no longer have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/remove-team-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description teams parameter */ + teams: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the people who have push access to this branch. + */ + "repos/get-users-with-access-to-protected-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/set-user-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description users parameter */ + users: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified people push access for this branch. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/add-user-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description users parameter */ + users: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a user to push to this branch. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/remove-user-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description users parameter */ + users: string[]; + }; + }; + }; + }; + /** + * Renames a branch in a repository. + * + * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". + * + * The permissions required to use this endpoint depends on whether you are renaming the default branch. + * + * To rename a non-default branch: + * + * * Users must have push access. + * * GitHub Apps must have the `contents:write` repository permission. + * + * To rename the default branch: + * + * * Users must have admin or owner permissions. + * * GitHub Apps must have the `administration:write` repository permission. + */ + "repos/rename-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["branch-with-protection"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The new name of the branch. */ + new_name: string; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. + * + * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. + */ + "checks/create": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["check-run"]; + }; + }; + }; + requestBody: { + content: { + "application/json": ( + | ({ + /** @enum {undefined} */ + status: "completed"; + } & { + conclusion: unknown; + } & { [key: string]: unknown }) + | ({ + /** @enum {undefined} */ + status?: "queued" | "in_progress"; + } & { [key: string]: unknown }) + ) & { + /** @description The name of the check. For example, "code-coverage". */ + name: string; + /** @description The SHA of the commit. */ + head_sha: string; + /** @description The URL of the integrator's site that has the full details of the check. If the integrator does not provide this, then the homepage of the GitHub app is used. */ + details_url?: string; + /** @description A reference for the run on the integrator's system. */ + external_id?: string; + /** + * @description The current status. + * @default queued + * @enum {string} + */ + status?: "queued" | "in_progress" | "completed"; + /** + * Format: date-time + * @description The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at?: string; + /** + * @description **Required if you provide `completed_at` or a `status` of `completed`**. The final conclusion of the check. + * **Note:** Providing `conclusion` will automatically set the `status` parameter to `completed`. You cannot change a check run conclusion to `stale`, only GitHub can set this. + * @enum {string} + */ + conclusion?: + | "action_required" + | "cancelled" + | "failure" + | "neutral" + | "success" + | "skipped" + | "stale" + | "timed_out"; + /** + * Format: date-time + * @description The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at?: string; + /** @description Check runs can accept a variety of data in the `output` object, including a `title` and `summary` and can optionally provide descriptive details about the run. See the [`output` object](https://docs.github.com/rest/reference/checks#output-object) description. */ + output?: { + /** @description The title of the check run. */ + title: string; + /** @description The summary of the check run. This parameter supports Markdown. */ + summary: string; + /** @description The details of the check run. This parameter supports Markdown. */ + text?: string; + /** @description Adds information from your analysis to specific lines of code. Annotations are visible on GitHub in the **Checks** and **Files changed** tab of the pull request. The Checks API limits the number of annotations to a maximum of 50 per API request. To create more than 50 annotations, you have to make multiple requests to the [Update a check run](https://docs.github.com/rest/reference/checks#update-a-check-run) endpoint. Each time you update the check run, annotations are appended to the list of annotations that already exist for the check run. For details about how you can view annotations on GitHub, see "[About status checks](https://docs.github.com/articles/about-status-checks#checks)". See the [`annotations` object](https://docs.github.com/rest/reference/checks#annotations-object) description for details about how to use this parameter. */ + annotations?: { + /** @description The path of the file to add an annotation to. For example, `assets/css/main.css`. */ + path: string; + /** @description The start line of the annotation. */ + start_line: number; + /** @description The end line of the annotation. */ + end_line: number; + /** @description The start column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ + start_column?: number; + /** @description The end column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ + end_column?: number; + /** + * @description The level of the annotation. + * @enum {string} + */ + annotation_level: "notice" | "warning" | "failure"; + /** @description A short description of the feedback for these lines of code. The maximum size is 64 KB. */ + message: string; + /** @description The title that represents the annotation. The maximum size is 255 characters. */ + title?: string; + /** @description Details about this annotation. The maximum size is 64 KB. */ + raw_details?: string; + }[]; + /** @description Adds images to the output displayed in the GitHub pull request UI. See the [`images` object](https://docs.github.com/rest/reference/checks#images-object) description for details. */ + images?: { + /** @description The alternative text for the image. */ + alt: string; + /** @description The full URL of the image. */ + image_url: string; + /** @description A short image description. */ + caption?: string; + }[]; + }; + /** @description Displays a button on GitHub that can be clicked to alert your app to do additional tasks. For example, a code linting app can display a button that automatically fixes detected errors. The button created in this object is displayed after the check run completes. When a user clicks the button, GitHub sends the [`check_run.requested_action` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) to your app. Each action includes a `label`, `identifier` and `description`. A maximum of three actions are accepted. See the [`actions` object](https://docs.github.com/rest/reference/checks#actions-object) description. To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)." */ + actions?: { + /** @description The text to be displayed on a button in the web UI. The maximum size is 20 characters. */ + label: string; + /** @description A short explanation of what this action would do. The maximum size is 40 characters. */ + description: string; + /** @description A reference for the action on the integrator's system. The maximum size is 20 characters. */ + identifier: string; + }[]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + "checks/get": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check run. */ + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["check-run"]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. + */ + "checks/update": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check run. */ + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["check-run"]; + }; + }; + }; + requestBody: { + content: { + "application/json": (Partial< + { + /** @enum {undefined} */ + status?: "completed"; + } & { + conclusion: unknown; + } & { [key: string]: unknown } + > & + Partial< + { + /** @enum {undefined} */ + status?: "queued" | "in_progress"; + } & { [key: string]: unknown } + >) & { + /** @description The name of the check. For example, "code-coverage". */ + name?: string; + /** @description The URL of the integrator's site that has the full details of the check. */ + details_url?: string; + /** @description A reference for the run on the integrator's system. */ + external_id?: string; + /** + * Format: date-time + * @description This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at?: string; + /** + * @description The current status. + * @enum {string} + */ + status?: "queued" | "in_progress" | "completed"; + /** + * @description **Required if you provide `completed_at` or a `status` of `completed`**. The final conclusion of the check. + * **Note:** Providing `conclusion` will automatically set the `status` parameter to `completed`. You cannot change a check run conclusion to `stale`, only GitHub can set this. + * @enum {string} + */ + conclusion?: + | "action_required" + | "cancelled" + | "failure" + | "neutral" + | "success" + | "skipped" + | "stale" + | "timed_out"; + /** + * Format: date-time + * @description The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at?: string; + /** @description Check runs can accept a variety of data in the `output` object, including a `title` and `summary` and can optionally provide descriptive details about the run. See the [`output` object](https://docs.github.com/rest/reference/checks#output-object-1) description. */ + output?: { + /** @description **Required**. */ + title?: string; + /** @description Can contain Markdown. */ + summary: string; + /** @description Can contain Markdown. */ + text?: string; + /** @description Adds information from your analysis to specific lines of code. Annotations are visible in GitHub's pull request UI. Annotations are visible in GitHub's pull request UI. The Checks API limits the number of annotations to a maximum of 50 per API request. To create more than 50 annotations, you have to make multiple requests to the [Update a check run](https://docs.github.com/rest/reference/checks#update-a-check-run) endpoint. Each time you update the check run, annotations are appended to the list of annotations that already exist for the check run. For details about annotations in the UI, see "[About status checks](https://docs.github.com/articles/about-status-checks#checks)". See the [`annotations` object](https://docs.github.com/rest/reference/checks#annotations-object-1) description for details. */ + annotations?: { + /** @description The path of the file to add an annotation to. For example, `assets/css/main.css`. */ + path: string; + /** @description The start line of the annotation. */ + start_line: number; + /** @description The end line of the annotation. */ + end_line: number; + /** @description The start column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ + start_column?: number; + /** @description The end column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ + end_column?: number; + /** + * @description The level of the annotation. + * @enum {string} + */ + annotation_level: "notice" | "warning" | "failure"; + /** @description A short description of the feedback for these lines of code. The maximum size is 64 KB. */ + message: string; + /** @description The title that represents the annotation. The maximum size is 255 characters. */ + title?: string; + /** @description Details about this annotation. The maximum size is 64 KB. */ + raw_details?: string; + }[]; + /** @description Adds images to the output displayed in the GitHub pull request UI. See the [`images` object](https://docs.github.com/rest/reference/checks#annotations-object-1) description for details. */ + images?: { + /** @description The alternative text for the image. */ + alt: string; + /** @description The full URL of the image. */ + image_url: string; + /** @description A short image description. */ + caption?: string; + }[]; + }; + /** @description Possible further actions the integrator can perform, which a user may trigger. Each action includes a `label`, `identifier` and `description`. A maximum of three actions are accepted. See the [`actions` object](https://docs.github.com/rest/reference/checks#actions-object) description. To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)." */ + actions?: { + /** @description The text to be displayed on a button in the web UI. The maximum size is 20 characters. */ + label: string; + /** @description A short explanation of what this action would do. The maximum size is 40 characters. */ + description: string; + /** @description A reference for the action on the integrator's system. The maximum size is 20 characters. */ + identifier: string; + }[]; + }; + }; + }; + }; + /** Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. */ + "checks/list-annotations": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check run. */ + check_run_id: components["parameters"]["check-run-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["check-annotation"][]; + }; + }; + }; + }; + /** + * Triggers GitHub to rerequest an existing check run, without pushing new code to a repository. This endpoint will trigger the [`check_run` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) event with the action `rerequested`. When a check run is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check run, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + "checks/rerequest-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check run. */ + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** Forbidden if the check run is not rerequestable or doesn't belong to the authenticated GitHub App */ + 403: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + 404: components["responses"]["not_found"]; + /** Validation error if the check run is not rerequestable */ + 422: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. + */ + "checks/create-suite": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response when the suite already exists */ + 200: { + content: { + "application/json": components["schemas"]["check-suite"]; + }; + }; + /** Response when the suite was created */ + 201: { + content: { + "application/json": components["schemas"]["check-suite"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The sha of the head commit. */ + head_sha: string; + }; + }; + }; + }; + /** Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. */ + "checks/set-suites-preferences": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["check-suite-preference"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Enables or disables automatic creation of CheckSuite events upon pushes to the repository. Enabled by default. See the [`auto_trigger_checks` object](https://docs.github.com/rest/reference/checks#auto_trigger_checks-object) description for details. */ + auto_trigger_checks?: { + /** @description The `id` of the GitHub App. */ + app_id: number; + /** + * @description Set to `true` to enable automatic creation of CheckSuite events upon pushes to the repository, or `false` to disable them. + * @default true + */ + setting: boolean; + }[]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + "checks/get-suite": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check suite. */ + check_suite_id: components["parameters"]["check-suite-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["check-suite"]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + "checks/list-for-suite": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check suite. */ + check_suite_id: components["parameters"]["check-suite-id"]; + }; + query: { + /** Returns check runs with the specified `name`. */ + check_name?: components["parameters"]["check-name"]; + /** Returns check runs with the specified `status`. */ + status?: components["parameters"]["status"]; + /** Filters check runs by their `completed_at` timestamp. `latest` returns the most recent check runs. */ + filter?: "latest" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + check_runs: components["schemas"]["check-run"][]; + }; + }; + }; + }; + }; + /** + * Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + "checks/rerequest-suite": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check suite. */ + check_suite_id: components["parameters"]["check-suite-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + }; + }; + /** + * Lists all open code scanning alerts for the default branch (usually `main` + * or `master`). You must use an access token with the `security_events` scope to use + * this endpoint with private repos, the `public_repo` scope also grants permission to read + * security events on public repos only. GitHub Apps must have the `security_events` read + * permission to use this endpoint. + * + * The response includes a `most_recent_instance` object. + * This provides details of the most recent instance of this alert + * for the default branch or for the specified Git reference + * (if you used `ref` in the request). + */ + "code-scanning/list-alerts-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + tool_name?: components["parameters"]["tool-name"]; + /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + tool_guid?: components["parameters"]["tool-guid"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["parameters"]["git-ref"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The property by which to sort the results. . `number` is deprecated - we recommend that you use `created` instead. */ + sort?: "created" | "number" | "updated"; + /** Set to `open`, `closed, `fixed`, or `dismissed` to list code scanning alerts in a specific state. */ + state?: components["schemas"]["code-scanning-alert-state"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert-items"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The instances field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The same information can now be retrieved via a GET request to the URL specified by `instances_url`. + */ + "code-scanning/get-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. */ + "code-scanning/update-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_write"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + state: components["schemas"]["code-scanning-alert-set-state"]; + dismissed_reason?: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + }; + }; + }; + }; + /** + * Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + "code-scanning/list-alert-instances": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["parameters"]["git-ref"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert-instance"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists the details of all code scanning analyses for a repository, + * starting with the most recent. + * The response is paginated and you can use the `page` and `per_page` parameters + * to list the analyses you're interested in. + * By default 30 analyses are listed per page. + * + * The `rules_count` field in the response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. + */ + "code-scanning/list-recent-analyses": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + tool_name?: components["parameters"]["tool-name"]; + /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + tool_guid?: components["parameters"]["tool-guid"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The Git reference for the analyses you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["schemas"]["code-scanning-ref"]; + /** Filter analyses belonging to the same SARIF upload. */ + sarif_id?: components["schemas"]["code-scanning-analysis-sarif-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-analysis"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets a specified code scanning analysis for a repository. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * The default JSON response contains fields that describe the analysis. + * This includes the Git reference and commit SHA to which the analysis relates, + * the datetime of the analysis, the name of the code scanning tool, + * and the number of alerts. + * + * The `rules_count` field in the default response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * If you use the Accept header `application/sarif+json`, + * the response contains the analysis data that was uploaded. + * This is formatted as + * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). + */ + "code-scanning/get-analysis": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation. */ + analysis_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-analysis"]; + "application/json+sarif": { [key: string]: unknown }; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Deletes a specified code scanning analysis from a repository. For + * private repositories, you must use an access token with the `repo` scope. For public repositories, + * you must use an access token with `public_repo` scope. + * GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * You can delete one analysis at a time. + * To delete a series of analyses, start with the most recent analysis and work backwards. + * Conceptually, the process is similar to the undo function in a text editor. + * + * When you list the analyses for a repository, + * one or more will be identified as deletable in the response: + * + * ``` + * "deletable": true + * ``` + * + * An analysis is deletable when it's the most recent in a set of analyses. + * Typically, a repository will have multiple sets of analyses + * for each enabled code scanning tool, + * where a set is determined by a unique combination of analysis values: + * + * * `ref` + * * `tool` + * * `analysis_key` + * * `environment` + * + * If you attempt to delete an analysis that is not the most recent in a set, + * you'll get a 400 response with the message: + * + * ``` + * Analysis specified is not deletable. + * ``` + * + * The response from a successful `DELETE` operation provides you with + * two alternative URLs for deleting the next analysis in the set: + * `next_analysis_url` and `confirm_delete_url`. + * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis + * in a set. This is a useful option if you want to preserve at least one analysis + * for the specified tool in your repository. + * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. + * When you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url` + * in the 200 response is `null`. + * + * As an example of the deletion process, + * let's imagine that you added a workflow that configured a particular code scanning tool + * to analyze the code in a repository. This tool has added 15 analyses: + * 10 on the default branch, and another 5 on a topic branch. + * You therefore have two separate sets of analyses for this tool. + * You've now decided that you want to remove all of the analyses for the tool. + * To do this you must make 15 separate deletion requests. + * To start, you must find an analysis that's identified as deletable. + * Each set of analyses always has one that's identified as deletable. + * Having found the deletable analysis for one of the two sets, + * delete this analysis and then continue deleting the next analysis in the set until they're all deleted. + * Then repeat the process for the second set. + * The procedure therefore consists of a nested loop: + * + * **Outer loop**: + * * List the analyses for the repository, filtered by tool. + * * Parse this list to find a deletable analysis. If found: + * + * **Inner loop**: + * * Delete the identified analysis. + * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. + * + * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. + */ + "code-scanning/delete-analysis": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation. */ + analysis_id: number; + }; + query: { + /** Allow deletion if the specified analysis is the last in a set. If you attempt to delete the final analysis in a set without setting this parameter to `true`, you'll get a 400 response with the message: `Analysis is last of its type and deletion may result in the loss of historical alert data. Please specify confirm_delete.` */ + confirm_delete?: string | null; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-analysis-deletion"]; + }; + }; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["code_scanning_forbidden_write"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint for private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * There are two places where you can upload code scanning results. + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * + * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: + * + * ``` + * gzip -c analysis-data.sarif | base64 -w0 + * ``` + * + * SARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries. + * + * The `202 Accepted`, response includes an `id` value. + * You can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint. + * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." + */ + "code-scanning/upload-sarif": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": components["schemas"]["code-scanning-sarifs-receipt"]; + }; + }; + /** Bad Request if the sarif field is invalid */ + 400: unknown; + 403: components["responses"]["code_scanning_forbidden_write"]; + 404: components["responses"]["not_found"]; + /** Payload Too Large if the sarif field is too large */ + 413: unknown; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + commit_sha: components["schemas"]["code-scanning-analysis-commit-sha"]; + ref: components["schemas"]["code-scanning-ref"]; + sarif: components["schemas"]["code-scanning-analysis-sarif-file"]; + /** + * Format: uri + * @description The base directory used in the analysis, as it appears in the SARIF file. + * This property is used to convert file paths from absolute to relative, so that alerts can be mapped to their correct location in the repository. + * @example file:///github/workspace/ + */ + checkout_uri?: string; + /** + * Format: date-time + * @description The time that the analysis run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at?: string; + /** @description The name of the tool used to generate the code scanning analysis. If this parameter is not used, the tool name defaults to "API". If the uploaded SARIF contains a tool GUID, this will be available for filtering using the `tool_guid` parameter of operations such as `GET /repos/{owner}/{repo}/code-scanning/alerts`. */ + tool_name?: string; + }; + }; + }; + }; + /** Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. */ + "code-scanning/get-sarif": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SARIF ID obtained after uploading. */ + sarif_id: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-sarifs-status"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + /** Not Found if the sarif id does not match any upload */ + 404: unknown; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List any syntax errors that are detected in the CODEOWNERS + * file. + * + * For more information about the correct CODEOWNERS syntax, + * see "[About code owners](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)." + */ + "repos/codeowners-errors": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** A branch, tag or commit name used to determine which version of the CODEOWNERS file to use. Default: the repository's default branch (e.g. `main`) */ + ref?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codeowners-errors"]; + }; + }; + /** Resource not found */ + 404: unknown; + }; + }; + /** + * Lists the codespaces associated to a specified repository and the authenticated user. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/list-in-repository-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Creates a codespace owned by the authenticated user in the specified repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/create-with-repo-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response when the codespace was successfully created */ + 201: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + /** Response when the codespace creation partially failed but is being retried in the background */ + 202: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Git ref (typically a branch name) for this codespace */ + ref?: string; + /** @description Location for this codespace. Assigned by IP if not provided */ + location?: string; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Whether to authorize requested permissions from devcontainer.json */ + multi_repo_permissions_opt_out?: boolean; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). */ + retention_period_minutes?: number; + } | null; + }; + }; + }; + /** + * Lists the devcontainer.json files associated with a specified repository and the authenticated user. These files + * specify launchpoint configurations for codespaces created within the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + "codespaces/list-devcontainers-in-repository-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + devcontainers: { + path: string; + name?: string; + }[]; + }; + }; + }; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List the machine types available for a given repository based on its configuration. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_metadata` repository permission to use this endpoint. + */ + "codespaces/repo-machines-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The location to check for available machines. Assigned by IP if not provided. */ + location?: string; + /** IP for location auto-detection when proxying a request */ + client_ip?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + machines: components["schemas"]["codespace-machine"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Gets the default attributes for codespaces created by the user with the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/pre-flight-with-repo-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The branch or commit to check for a default devcontainer path. If not specified, the default branch will be checked. */ + ref?: string; + /** An alternative IP for default location auto-detection, such as when proxying a request. */ + client_ip?: string; + }; + }; + responses: { + /** Response when a user is able to create codespaces from the repository. */ + 200: { + content: { + "application/json": { + billable_owner?: components["schemas"]["simple-user"]; + defaults?: { + location: string; + devcontainer_path: string | null; + }; + }; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + "codespaces/list-repo-secrets": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["repo-codespaces-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + "codespaces/get-repo-public-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-public-key"]; + }; + }; + }; + }; + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + "codespaces/get-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["repo-codespaces-secret"]; + }; + }; + }; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository + * permission to use this endpoint. + * + * #### Example of encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example of encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example of encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example of encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "codespaces/create-or-update-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/codespaces#get-a-repository-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + }; + }; + }; + }; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + "codespaces/delete-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * Organization members with write, maintain, or admin privileges on the organization-owned repository can use this endpoint. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + "repos/list-collaborators": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Filter collaborators returned by their affiliation. `outside` means all outside collaborators of an organization-owned repository. `direct` means all collaborators with permissions to an organization-owned repository, regardless of organization membership status. `all` means all collaborators the authenticated user can see. */ + affiliation?: "outside" | "direct" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["collaborator"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + "repos/check-collaborator": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response if user is a collaborator */ + 204: never; + /** Not Found if user is not a collaborator */ + 404: unknown; + }; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Adding an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + * + * For more information on permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". There are restrictions on which permissions can be granted to organization members when an organization base role is in place. In this case, the permission being given must be equal to or higher than the org base permission. Otherwise, the request will fail with: + * + * ``` + * Cannot assign {member} permission of {role name} + * ``` + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). + * + * **Updating an existing collaborator's permission level** + * + * The endpoint can also be used to change the permissions of an existing collaborator without first removing and re-adding the collaborator. To change the permissions, use the same endpoint and pass a different `permission` parameter. The response will be a `204`, with no other indication that the permission level changed. + * + * **Rate limits** + * + * You are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. + */ + "repos/add-collaborator": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response when a new invitation is created */ + 201: { + content: { + "application/json": components["schemas"]["repository-invitation"]; + }; + }; + /** + * Response when: + * - an existing collaborator is added as a collaborator + * - an organization member is added as an individual collaborator + * - an existing team member (whose team is also a repository collaborator) is added as an individual collaborator + */ + 204: never; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant the collaborator. **Only valid on organization-owned repositories.** In addition to the enumerated values, you can also specify a custom repository role name, if the owning organization has defined any. + * @default push + * @enum {string} + */ + permission?: "pull" | "push" | "admin" | "maintain" | "triage"; + }; + }; + }; + }; + "repos/remove-collaborator": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. */ + "repos/get-collaborator-permission-level": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** if user has admin permissions */ + 200: { + content: { + "application/json": components["schemas"]["repository-collaborator-permission"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). + * + * Comments are ordered by ascending ID. + */ + "repos/list-commit-comments-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["commit-comment"][]; + }; + }; + }; + }; + "repos/get-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "repos/delete-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + "repos/update-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment */ + body: string; + }; + }; + }; + }; + /** List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). */ + "reactions/list-for-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a commit comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with an HTTP `200` status means that you already added the reaction type to this commit comment. */ + "reactions/create-for-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the commit comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + "reactions/delete-for-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/list-commits": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** SHA or branch to start listing commits from. Default: the repository’s default branch (usually `master`). */ + sha?: string; + /** Only commits containing this file path will be returned. */ + path?: string; + /** GitHub login or email address by which to filter by commit author. */ + author?: string; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** Only commits before this date will be returned. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + until?: string; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["commit"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. + */ + "repos/list-branches-for-head-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-short"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Use the `:commit_sha` to specify the commit that will have its comments listed. */ + "repos/list-comments-for-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["commit-comment"][]; + }; + }; + }; + }; + /** + * Create a comment for a commit using its `:commit_sha`. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "repos/create-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["commit-comment"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment. */ + body: string; + /** @description Relative path of the file to comment on. */ + path?: string; + /** @description Line index in the diff to comment on. */ + position?: number; + /** @description **Deprecated**. Use **position** parameter instead. Line number in the file to comment on. */ + line?: number; + }; + }; + }; + }; + /** Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, additionally returns open pull requests associated with the commit. The results may include open and closed pull requests. */ + "repos/list-pull-requests-associated-with-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-simple"][]; + }; + }; + }; + }; + /** + * Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. + * + * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. + * + * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. + * + * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/get-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + "checks/list-for-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** Returns check runs with the specified `name`. */ + check_name?: components["parameters"]["check-name"]; + /** Returns check runs with the specified `status`. */ + status?: components["parameters"]["status"]; + /** Filters check runs by their `completed_at` timestamp. `latest` returns the most recent check runs. */ + filter?: "latest" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + app_id?: number; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + check_runs: components["schemas"]["check-run"][]; + }; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + "checks/list-suites-for-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** Filters check suites by GitHub App `id`. */ + app_id?: number; + /** Returns check runs with the specified `name`. */ + check_name?: components["parameters"]["check-name"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + check_suites: components["schemas"]["check-suite"][]; + }; + }; + }; + }; + }; + /** + * Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. + * + * + * Additionally, a combined `state` is returned. The `state` is one of: + * + * * **failure** if any of the contexts report as `error` or `failure` + * * **pending** if there are no statuses or a context is `pending` + * * **success** if the latest status for all contexts is `success` + */ + "repos/get-combined-status-for-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-commit-status"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. + * + * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. + */ + "repos/list-commit-statuses-for-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["status"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + }; + }; + /** + * This endpoint will return all community profile metrics, including an + * overall health score, repository description, the presence of documentation, detected + * code of conduct, detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, + * README, and CONTRIBUTING files. + * + * The `health_percentage` score is defined as a percentage of how many of + * these four documents are present: README, CONTRIBUTING, LICENSE, and + * CODE_OF_CONDUCT. For example, if all four documents are present, then + * the `health_percentage` is `100`. If only one is present, then the + * `health_percentage` is `25`. + * + * `content_reports_enabled` is only returned for organization-owned repositories. + */ + "repos/get-community-profile-metrics": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["community-profile"]; + }; + }; + }; + }; + /** + * The `basehead` param is comprised of two parts: `base` and `head`. Both must be branch names in `repo`. To compare branches across other repositories in the same network as `repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/compare-commits-with-basehead": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The base branch and head branch to compare. This parameter expects the format `{base}...{head}`. */ + basehead: string; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comparison"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit + * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. + * + * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for + * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media + * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent + * object format. + * + * **Note**: + * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). + * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees + * API](https://docs.github.com/rest/reference/git#get-a-tree). + * + * #### Size limits + * If the requested file's size is: + * * 1 MB or smaller: All features of this endpoint are supported. + * * Between 1-100 MB: Only the `raw` or `object` [custom media types](https://docs.github.com/rest/repos/contents#custom-media-types-for-repository-contents) are supported. Both will work as normal, except that when using the `object` media type, the `content` field will be an empty string and the `encoding` field will be `"none"`. To get the contents of these larger files, use the `raw` media type. + * * Greater than 100 MB: This endpoint is not supported. + * + * #### If the content is a directory + * The response will be an array of objects, one object for each item in the directory. + * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value + * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). + * In the next major version of the API, the type will be returned as "submodule". + * + * #### If the content is a symlink + * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the + * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object + * describing the symlink itself. + * + * #### If the content is a submodule + * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific + * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out + * the submodule at that specific commit. + * + * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the + * github.com URLs (`html_url` and `_links["html"]`) will have null values. + */ + "repos/get-content": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** path parameter */ + path: string; + }; + query: { + /** The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`) */ + ref?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/vnd.github.v3.object": components["schemas"]["content-tree"]; + "application/json": + | components["schemas"]["content-directory"] + | components["schemas"]["content-file"] + | components["schemas"]["content-symlink"] + | components["schemas"]["content-submodule"]; + }; + }; + 302: components["responses"]["found"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Creates a new file or replaces an existing file in a repository. */ + "repos/create-or-update-file-contents": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** path parameter */ + path: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["file-commit"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["file-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The commit message. */ + message: string; + /** @description The new file content, using Base64 encoding. */ + content: string; + /** @description **Required if you are updating a file**. The blob SHA of the file being replaced. */ + sha?: string; + /** @description The branch name. Default: the repository’s default branch (usually `master`) */ + branch?: string; + /** @description The person that committed the file. Default: the authenticated user. */ + committer?: { + /** @description The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted. */ + name: string; + /** @description The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted. */ + email: string; + /** @example "2013-01-05T13:13:22+05:00" */ + date?: string; + }; + /** @description The author of the file. Default: The `committer` or the authenticated user if you omit `committer`. */ + author?: { + /** @description The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted. */ + name: string; + /** @description The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted. */ + email: string; + /** @example "2013-01-15T17:13:22+05:00" */ + date?: string; + }; + }; + }; + }; + }; + /** + * Deletes a file in a repository. + * + * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. + * + * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. + * + * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. + */ + "repos/delete-file": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** path parameter */ + path: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["file-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The commit message. */ + message: string; + /** @description The blob SHA of the file being replaced. */ + sha: string; + /** @description The branch name. Default: the repository’s default branch (usually `master`) */ + branch?: string; + /** @description object containing information about the committer. */ + committer?: { + /** @description The name of the author (or committer) of the commit */ + name?: string; + /** @description The email of the author (or committer) of the commit */ + email?: string; + }; + /** @description object containing information about the author. */ + author?: { + /** @description The name of the author (or committer) of the commit */ + name?: string; + /** @description The email of the author (or committer) of the commit */ + email?: string; + }; + }; + }; + }; + }; + /** + * Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API v3 caches contributor data to improve performance. + * + * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. + */ + "repos/list-contributors": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Set to `1` or `true` to include anonymous contributors in results. */ + anon?: string; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** if repository contains content */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["contributor"][]; + }; + }; + /** Response if repository is empty */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + "dependabot/list-repo-secrets": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["dependabot-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + "dependabot/get-repo-public-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-public-key"]; + }; + }; + }; + }; + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + "dependabot/get-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-secret"]; + }; + }; + }; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "dependabot/create-or-update-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/dependabot#get-a-repository-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + }; + }; + }; + }; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + "dependabot/delete-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Gets the diff of the dependency changes between two commits of a repository, based on the changes to the dependency manifests made in those commits. */ + "dependency-graph/diff-range": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The base and head Git revisions to compare. The Git revisions will be resolved to commit SHAs. Named revisions will be resolved to their corresponding HEAD commits, and an appropriate merge base will be determined. This parameter expects the format `{base}...{head}`. */ + basehead: string; + }; + query: { + /** The full path, relative to the repository root, of the dependency manifest file. */ + name?: components["parameters"]["manifest-path"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["dependency-graph-diff"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a new snapshot of a repository's dependencies. You must authenticate using an access token with the `repo` scope to use this endpoint for a repository that the requesting user has access to. */ + "dependency-graph/create-repository-snapshot": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { + /** @description ID of the created snapshot. */ + id: number; + /** @description The time at which the snapshot was created. */ + created_at: string; + /** @description Either "SUCCESS", "ACCEPTED", or "INVALID". "SUCCESS" indicates that the snapshot was successfully created and the repository's dependencies were updated. "ACCEPTED" indicates that the snapshot was successfully created, but the repository's dependencies were not updated. "INVALID" indicates that the snapshot was malformed. */ + result: string; + /** @description A message providing further details about the result, such as why the dependencies were not updated. */ + message: string; + }; + }; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["snapshot"]; + }; + }; + }; + /** Simple filtering of deployments is available via query parameters: */ + "repos/list-deployments": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The SHA recorded at creation time. */ + sha?: string; + /** The name of the ref. This can be a branch, tag, or SHA. */ + ref?: string; + /** The name of the task for the deployment (e.g., `deploy` or `deploy:migrations`). */ + task?: string; + /** The name of the environment that was deployed to (e.g., `staging` or `production`). */ + environment?: string | null; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["deployment"][]; + }; + }; + }; + }; + /** + * Deployments offer a few configurable parameters with certain defaults. + * + * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them + * before we merge a pull request. + * + * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have + * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter + * makes it easier to track which environments have requested deployments. The default environment is `production`. + * + * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If + * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, + * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will + * return a failure response. + * + * By default, [commit statuses](https://docs.github.com/rest/commits/statuses) for every submitted context must be in a `success` + * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to + * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do + * not require any contexts or create any commit statuses, the deployment will always succeed. + * + * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text + * field that will be passed on when a deployment event is dispatched. + * + * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might + * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an + * application with debugging enabled. + * + * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. + * + * #### Merged branch response + * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating + * a deployment. This auto-merge happens when: + * * Auto-merge option is enabled in the repository + * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example + * * There are no merge conflicts + * + * If there are no new commits in the base branch, a new request to create a deployment should give a successful + * response. + * + * #### Merge conflict response + * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't + * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. + * + * #### Failed commit status checks + * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` + * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. + */ + "repos/create-deployment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["deployment"]; + }; + }; + /** Merged branch response */ + 202: { + content: { + "application/json": { + message?: string; + }; + }; + }; + /** Conflict when there is a merge conflict or the commit's status checks failed */ + 409: unknown; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The ref to deploy. This can be a branch, tag, or SHA. */ + ref: string; + /** + * @description Specifies a task to execute (e.g., `deploy` or `deploy:migrations`). + * @default deploy + */ + task?: string; + /** + * @description Attempts to automatically merge the default branch into the requested ref, if it's behind the default branch. + * @default true + */ + auto_merge?: boolean; + /** @description The [status](https://docs.github.com/rest/commits/statuses) contexts to verify against commit status checks. If you omit this parameter, GitHub verifies all unique contexts before creating a deployment. To bypass checking entirely, pass an empty array. Defaults to all unique contexts. */ + required_contexts?: string[]; + payload?: { [key: string]: unknown } | string; + /** + * @description Name for the target deployment environment (e.g., `production`, `staging`, `qa`). + * @default production + */ + environment?: string; + /** + * @description Short description of the deployment. + * @default + */ + description?: string | null; + /** + * @description Specifies if the given environment is specific to the deployment and will no longer exist at some point in the future. Default: `false` + * @default false + */ + transient_environment?: boolean; + /** @description Specifies if the given environment is one that end-users directly interact with. Default: `true` when `environment` is `production` and `false` otherwise. */ + production_environment?: boolean; + }; + }; + }; + }; + "repos/get-deployment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * If the repository only has one deployment, you can delete the deployment regardless of its status. If the repository has more than one deployment, you can only delete inactive deployments. This ensures that repositories with multiple deployments will always have an active deployment. Anyone with `repo` or `repo_deployment` scopes can delete a deployment. + * + * To set a deployment as inactive, you must: + * + * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. + * * Mark the active deployment as inactive by adding any non-successful deployment status. + * + * For more information, see "[Create a deployment](https://docs.github.com/rest/reference/repos/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/reference/repos#create-a-deployment-status)." + */ + "repos/delete-deployment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Users with pull access can view deployment statuses for a deployment: */ + "repos/list-deployment-statuses": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["deployment-status"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Users with `push` access can create deployment statuses for a given deployment. + * + * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. + */ + "repos/create-deployment-status": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["deployment-status"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state of the status. When you set a transient deployment to `inactive`, the deployment will be shown as `destroyed` in GitHub. + * @enum {string} + */ + state: + | "error" + | "failure" + | "inactive" + | "in_progress" + | "queued" + | "pending" + | "success"; + /** + * @description The target URL to associate with this status. This URL should contain output to keep the user updated while the task is running or serve as historical information for what happened in the deployment. **Note:** It's recommended to use the `log_url` parameter, which replaces `target_url`. + * @default + */ + target_url?: string; + /** + * @description The full URL of the deployment's output. This parameter replaces `target_url`. We will continue to accept `target_url` to support legacy uses, but we recommend replacing `target_url` with `log_url`. Setting `log_url` will automatically set `target_url` to the same value. Default: `""` + * @default + */ + log_url?: string; + /** + * @description A short description of the status. The maximum description length is 140 characters. + * @default + */ + description?: string; + /** + * @description Name for the target deployment environment, which can be changed when setting a deploy status. For example, `production`, `staging`, or `qa`. + * @enum {string} + */ + environment?: "production" | "staging" | "qa"; + /** + * @description Sets the URL for accessing your environment. Default: `""` + * @default + */ + environment_url?: string; + /** @description Adds a new `inactive` status to all prior non-transient, non-production environment deployments with the same repository and `environment` name as the created status's deployment. An `inactive` status is only added to deployments that had a `success` state. Default: `true` */ + auto_inactive?: boolean; + }; + }; + }; + }; + /** Users with pull access can view a deployment status for a deployment: */ + "repos/get-deployment-status": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + status_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-status"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." + * + * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. + * + * This endpoint requires write access to the repository by providing either: + * + * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. + * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. + * + * This input example shows how you can use the `client_payload` as a test to debug your workflow. + */ + "repos/create-dispatch-event": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A custom webhook event name. Must be 100 characters or fewer. */ + event_type: string; + /** @description JSON payload with extra information about the webhook event that your action or workflow may use. */ + client_payload?: { [key: string]: unknown }; + }; + }; + }; + }; + /** + * Get all environments for a repository. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "repos/get-all-environments": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + /** + * @description The number of environments in this repository + * @example 5 + */ + total_count?: number; + environments?: components["schemas"]["environment"][]; + }; + }; + }; + }; + }; + /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "repos/get-environment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment */ + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["environment"]; + }; + }; + }; + }; + /** + * Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." + * + * **Note:** Although you can use this operation to specify that only branches that match specified name patterns can deploy to this environment, you must use the UI to set the name patterns. For more information, see "[Environments](/actions/reference/environments#deployment-branches)." + * + * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." + * + * You must authenticate using an access token with the repo scope to use this endpoint. + */ + "repos/create-or-update-environment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment */ + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["environment"]; + }; + }; + /** Validation error when the environment name is invalid or when `protected_branches` and `custom_branch_policies` in `deployment_branch_policy` are set to the same value */ + 422: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + wait_timer?: components["schemas"]["wait-timer"]; + /** @description The people or teams that may review jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ + reviewers?: + | { + type?: components["schemas"]["deployment-reviewer-type"]; + /** + * @description The id of the user or team who can review the deployment + * @example 4532992 + */ + id?: number; + }[] + | null; + deployment_branch_policy?: components["schemas"]["deployment-branch-policy"]; + } | null; + }; + }; + }; + /** You must authenticate using an access token with the repo scope to use this endpoint. */ + "repos/delete-an-environment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment */ + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** Default response */ + 204: never; + }; + }; + "activity/list-repo-events": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + "repos/list-forks": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The sort order. Can be either `newest`, `oldest`, or `stargazers`. */ + sort?: "newest" | "oldest" | "stargazers" | "watchers"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 400: components["responses"]["bad_request"]; + }; + }; + /** + * Create a fork for the authenticated user. + * + * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + */ + "repos/create-fork": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": components["schemas"]["full-repository"]; + }; + }; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Optional parameter to specify the organization name if forking into an organization. */ + organization?: string; + /** @description When forking from an existing repository, a new name for the fork. */ + name?: string; + } | null; + }; + }; + }; + "git/create-blob": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["short-blob"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The new blob's content. */ + content: string; + /** + * @description The encoding used for `content`. Currently, `"utf-8"` and `"base64"` are supported. + * @default utf-8 + */ + encoding?: string; + }; + }; + }; + }; + /** + * The `content` in the response will always be Base64 encoded. + * + * _Note_: This API supports blobs up to 100 megabytes in size. + */ + "git/get-blob": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + file_sha: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["blob"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/create-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The commit message */ + message: string; + /** @description The SHA of the tree object this commit points to */ + tree: string; + /** @description The SHAs of the commits that were the parents of this commit. If omitted or empty, the commit will be written as a root commit. For a single parent, an array of one SHA should be provided; for a merge commit, an array of more than one should be provided. */ + parents?: string[]; + /** @description Information about the author of the commit. By default, the `author` will be the authenticated user and the current date. See the `author` and `committer` object below for details. */ + author?: { + /** @description The name of the author (or committer) of the commit */ + name: string; + /** @description The email of the author (or committer) of the commit */ + email: string; + /** + * Format: date-time + * @description Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + date?: string; + }; + /** @description Information about the person who is making the commit. By default, `committer` will use the information set in `author`. See the `author` and `committer` object below for details. */ + committer?: { + /** @description The name of the author (or committer) of the commit */ + name?: string; + /** @description The email of the author (or committer) of the commit */ + email?: string; + /** + * Format: date-time + * @description Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + date?: string; + }; + /** @description The [PGP signature](https://en.wikipedia.org/wiki/Pretty_Good_Privacy) of the commit. GitHub adds the signature to the `gpgsig` header of the created commit. For a commit signature to be verifiable by Git or GitHub, it must be an ASCII-armored detached PGP signature over the string commit as it would be written to the object database. To pass a `signature` parameter, you need to first manually create a valid PGP signature, which can be complicated. You may find it easier to [use the command line](https://git-scm.com/book/id/v2/Git-Tools-Signing-Your-Work) to create signed commits. */ + signature?: string; + }; + }; + }; + }; + /** + * Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/get-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. + * + * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. + */ + "git/list-matching-refs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["git-ref"][]; + }; + }; + }; + }; + /** + * Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + */ + "git/get-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-ref"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. */ + "git/create-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-ref"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the fully qualified reference (ie: `refs/heads/master`). If it doesn't start with 'refs' and have at least two slashes, it will be rejected. */ + ref: string; + /** @description The SHA1 value for this reference. */ + sha: string; + /** @example "refs/heads/newbranch" */ + key?: string; + }; + }; + }; + }; + "git/delete-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + }; + "git/update-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-ref"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The SHA1 value to set this reference to */ + sha: string; + /** + * @description Indicates whether to force the update or to make sure the update is a fast-forward update. Leaving this out or setting it to `false` will make sure you're not overwriting work. + * @default false + */ + force?: boolean; + }; + }; + }; + }; + /** + * Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/create-tag": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-tag"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The tag's name. This is typically a version (e.g., "v0.0.1"). */ + tag: string; + /** @description The tag message. */ + message: string; + /** @description The SHA of the git object this is tagging. */ + object: string; + /** + * @description The type of the object we're tagging. Normally this is a `commit` but it can also be a `tree` or a `blob`. + * @enum {string} + */ + type: "commit" | "tree" | "blob"; + /** @description An object with information about the individual creating the tag. */ + tagger?: { + /** @description The name of the author of the tag */ + name: string; + /** @description The email of the author of the tag */ + email: string; + /** + * Format: date-time + * @description When this object was tagged. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + date?: string; + }; + }; + }; + }; + }; + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/get-tag": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + tag_sha: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-tag"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. + * + * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." + */ + "git/create-tree": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-tree"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Objects (of `path`, `mode`, `type`, and `sha`) specifying a tree structure. */ + tree: { + /** @description The file referenced in the tree. */ + path?: string; + /** + * @description The file mode; one of `100644` for file (blob), `100755` for executable (blob), `040000` for subdirectory (tree), `160000` for submodule (commit), or `120000` for a blob that specifies the path of a symlink. + * @enum {string} + */ + mode?: "100644" | "100755" | "040000" | "160000" | "120000"; + /** + * @description Either `blob`, `tree`, or `commit`. + * @enum {string} + */ + type?: "blob" | "tree" | "commit"; + /** + * @description The SHA1 checksum ID of the object in the tree. Also called `tree.sha`. If the value is `null` then the file will be deleted. + * + * **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error. + */ + sha?: string | null; + /** + * @description The content you want this file to have. GitHub will write this blob out and use that SHA for this entry. Use either this, or `tree.sha`. + * + * **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error. + */ + content?: string; + }[]; + /** + * @description The SHA1 of an existing Git tree object which will be used as the base for the new tree. If provided, a new Git tree object will be created from entries in the Git tree object pointed to by `base_tree` and entries defined in the `tree` parameter. Entries defined in the `tree` parameter will overwrite items from `base_tree` with the same `path`. If you're creating new changes on a branch, then normally you'd set `base_tree` to the SHA1 of the Git tree object of the current latest commit on the branch you're working on. + * If not provided, GitHub will create a new Git tree object from only the entries defined in the `tree` parameter. If you create a new commit pointing to such a tree, then all files which were a part of the parent commit's tree and were not defined in the `tree` parameter will be listed as deleted by the new commit. + */ + base_tree?: string; + }; + }; + }; + }; + /** + * Returns a single tree using the SHA1 value for that tree. + * + * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. + */ + "git/get-tree": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + tree_sha: string; + }; + query: { + /** Setting this parameter to any value returns the objects or subtrees referenced by the tree specified in `:tree_sha`. For example, setting `recursive` to any of the following will enable returning objects or subtrees: `0`, `1`, `"true"`, and `"false"`. Omit this parameter to prevent recursively returning objects or subtrees. */ + recursive?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-tree"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "repos/list-webhooks": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["hook"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can + * share the same `config` as long as those webhooks do not have any `events` that overlap. + */ + "repos/create-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["hook"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Use `web` to create a webhook. Default: `web`. This parameter only accepts the value `web`. */ + name?: string; + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params). */ + config?: { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + /** @example "abc" */ + token?: string; + /** @example "sha256" */ + digest?: string; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + } | null; + }; + }; + }; + /** Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." */ + "repos/get-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "repos/delete-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." */ + "repos/update-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params). */ + config?: { + url: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + /** @example "bar@example.com" */ + address?: string; + /** @example "The Serious Room" */ + room?: string; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. This replaces the entire array of events. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** @description Determines a list of events to be added to the list of events that the Hook triggers for. */ + add_events?: string[]; + /** @description Determines a list of events to be removed from the list of events that the Hook triggers for. */ + remove_events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + }; + }; + }; + }; + /** + * Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." + * + * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. + */ + "repos/get-webhook-config-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." + * + * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. + */ + "repos/update-webhook-config-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + }; + }; + }; + /** Returns a list of webhook deliveries for a webhook configured in a repository. */ + "repos/list-webhook-deliveries": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Used for pagination: the starting delivery from which the page of deliveries is fetched. Refer to the `link` header for the next and previous page cursors. */ + cursor?: components["parameters"]["cursor"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery-item"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Returns a delivery for a webhook configured in a repository. */ + "repos/get-webhook-delivery": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery"]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Redeliver a webhook delivery for a webhook configured in a repository. */ + "repos/redeliver-webhook-delivery": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ + "repos/ping-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. + * + * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` + */ + "repos/test-push-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * View the progress of an import. + * + * **Import status** + * + * This section includes details about the possible values of the `status` field of the Import Progress response. + * + * An import that does not have errors will progress through these steps: + * + * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. + * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). + * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. + * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". + * * `complete` - the import is complete, and the repository is ready on GitHub. + * + * If there are problems, you will see one of these in the `status` field: + * + * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api) for more information. + * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/reference/migrations#cancel-an-import) and [retry](https://docs.github.com/rest/reference/migrations#start-an-import) with the correct URL. + * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * + * **The project_choices field** + * + * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. + * + * **Git LFS related fields** + * + * This section includes details about Git LFS related fields that may be present in the Import Progress response. + * + * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. + * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. + * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. + * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. + */ + "migrations/get-import-status": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Start a source import to a GitHub repository using GitHub Importer. */ + "migrations/start-import": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The URL of the originating repository. */ + vcs_url: string; + /** + * @description The originating VCS type. Without this parameter, the import job will take additional time to detect the VCS type before beginning the import. This detection step will be reflected in the response. + * @enum {string} + */ + vcs?: "subversion" | "git" | "mercurial" | "tfvc"; + /** @description If authentication is required, the username to provide to `vcs_url`. */ + vcs_username?: string; + /** @description If authentication is required, the password to provide to `vcs_url`. */ + vcs_password?: string; + /** @description For a tfvc import, the name of the project that is being imported. */ + tfvc_project?: string; + }; + }; + }; + }; + /** Stop an import for a repository. */ + "migrations/cancel-import": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API + * request. If no parameters are provided, the import will be restarted. + * + * Some servers (e.g. TFS servers) can have several projects at a single URL. In those cases the import progress will + * have the status `detection_found_multiple` and the Import Progress response will include a `project_choices` array. + * You can select the project to import by providing one of the objects in the `project_choices` array in the update request. + */ + "migrations/update-import": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["import"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The username to provide to the originating repository. */ + vcs_username?: string; + /** @description The password to provide to the originating repository. */ + vcs_password?: string; + /** + * @description The type of version control system you are migrating from. + * @example "git" + * @enum {string} + */ + vcs?: "subversion" | "tfvc" | "git" | "mercurial"; + /** + * @description For a tfvc import, the name of the project that is being imported. + * @example "project1" + */ + tfvc_project?: string; + } | null; + }; + }; + }; + /** + * Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. + * + * This endpoint and the [Map a commit author](https://docs.github.com/rest/reference/migrations#map-a-commit-author) endpoint allow you to provide correct Git author information. + */ + "migrations/get-commit-authors": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** A user ID. Only return users with an ID greater than this ID. */ + since?: components["parameters"]["since-user"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["porter-author"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Update an author's identity for the import. Your application can continue updating authors any time before you push new commits to the repository. */ + "migrations/map-commit-author": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + author_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["porter-author"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The new Git author email. */ + email?: string; + /** @description The new Git author name. */ + name?: string; + }; + }; + }; + }; + /** List files larger than 100MB found during the import */ + "migrations/get-large-files": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["porter-large-file"][]; + }; + }; + }; + }; + /** You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability is powered by [Git LFS](https://git-lfs.github.com). You can learn more about our LFS feature and working with large files [on our help site](https://docs.github.com/articles/versioning-large-files/). */ + "migrations/set-lfs-preference": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to store large files during the import. `opt_in` means large files will be stored using Git LFS. `opt_out` means large files will be removed during the import. + * @enum {string} + */ + use_lfs: "opt_in" | "opt_out"; + }; + }; + }; + }; + /** + * Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-repo-installation": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. */ + "interactions/get-restrictions-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": Partial< + components["schemas"]["interaction-limit-response"] + > & + Partial<{ [key: string]: unknown }>; + }; + }; + }; + }; + /** Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ + "interactions/set-restrictions-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["interaction-limit-response"]; + }; + }; + /** Response */ + 409: unknown; + }; + requestBody: { + content: { + "application/json": components["schemas"]["interaction-limit"]; + }; + }; + }; + /** Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ + "interactions/remove-restrictions-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Response */ + 409: unknown; + }; + }; + /** When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. */ + "repos/list-invitations": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["repository-invitation"][]; + }; + }; + }; + }; + "repos/delete-invitation": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "repos/update-invitation": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-invitation"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permissions that the associated user will have on the repository. Valid values are `read`, `write`, `maintain`, `triage`, and `admin`. + * @enum {string} + */ + permissions?: "read" | "write" | "maintain" | "triage" | "admin"; + }; + }; + }; + }; + /** + * List issues in a repository. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** If an `integer` is passed, it should refer to a milestone by its `number` field. If the string `*` is passed, issues with any milestone are accepted. If the string `none` is passed, issues without milestones are returned. */ + milestone?: string; + /** Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** Can be the name of a user. Pass in `none` for issues with no assigned user, and `*` for issues assigned to any user. */ + assignee?: string; + /** The user that created the issue. */ + creator?: string; + /** A user that's mentioned in the issue. */ + mentioned?: string; + /** A list of comma separated label names. Example: `bug,ui,@high` */ + labels?: components["parameters"]["labels"]; + /** What to sort results by. Can be either `created`, `updated`, `comments`. */ + sort?: "created" | "updated" | "comments"; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://docs.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "issues/create": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the issue. */ + title: string | number; + /** @description The contents of the issue. */ + body?: string; + /** @description Login for the user that this issue should be assigned to. _NOTE: Only users with push access can set the assignee for new issues. The assignee is silently dropped otherwise. **This field is deprecated.**_ */ + assignee?: string | null; + milestone?: (string | number) | null; + /** @description Labels to associate with this issue. _NOTE: Only users with push access can set labels for new issues. Labels are silently dropped otherwise._ */ + labels?: ( + | string + | { + id?: number; + name?: string; + description?: string | null; + color?: string | null; + } + )[]; + /** @description Logins for Users to assign to this issue. _NOTE: Only users with push access can set assignees for new issues. Assignees are silently dropped otherwise._ */ + assignees?: string[]; + }; + }; + }; + }; + /** By default, Issue Comments are ordered by ascending ID. */ + "issues/list-comments-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** Either `asc` or `desc`. Ignored without the `sort` parameter. */ + direction?: "asc" | "desc"; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue-comment"][]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "issues/get-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/delete-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/update-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue-comment"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment. */ + body: string; + }; + }; + }; + }; + /** List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). */ + "reactions/list-for-issue-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with an HTTP `200` status means that you already added the reaction type to this issue comment. */ + "reactions/create-for-issue-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + "reactions/delete-for-issue-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/list-events-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue-event"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + "issues/get-event": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + event_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue-event"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was + * [transferred](https://docs.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If + * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API + * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read + * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe + * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/get": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** Issue owners and users with push access can edit an issue. */ + "issues/update": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the issue. */ + title?: (string | number) | null; + /** @description The contents of the issue. */ + body?: string | null; + /** @description Login for the user that this issue should be assigned to. **This field is deprecated.** */ + assignee?: string | null; + /** + * @description State of the issue. Either `open` or `closed`. + * @enum {string} + */ + state?: "open" | "closed"; + milestone?: (string | number) | null; + /** @description Labels to associate with this issue. Pass one or more Labels to _replace_ the set of Labels on this Issue. Send an empty array (`[]`) to clear all Labels from the Issue. _NOTE: Only users with push access can set labels for issues. Labels are silently dropped otherwise._ */ + labels?: ( + | string + | { + id?: number; + name?: string; + description?: string | null; + color?: string | null; + } + )[]; + /** @description Logins for Users to assign to this issue. Pass one or more user logins to _replace_ the set of assignees on this Issue. Send an empty array (`[]`) to clear all assignees from the Issue. _NOTE: Only users with push access can set assignees for new issues. Assignees are silently dropped otherwise._ */ + assignees?: string[]; + }; + }; + }; + }; + /** Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. */ + "issues/add-assignees": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Usernames of people to assign this issue to. _NOTE: Only users with push access can add assignees to an issue. Assignees are silently ignored otherwise._ */ + assignees?: string[]; + }; + }; + }; + }; + /** Removes one or more assignees from an issue. */ + "issues/remove-assignees": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Usernames of assignees to remove from an issue. _NOTE: Only users with push access can remove assignees from an issue. Assignees are silently ignored otherwise._ */ + assignees: string[]; + }; + }; + }; + }; + /** Issue Comments are ordered by ascending ID. */ + "issues/list-comments": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue-comment"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + "issues/create-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["issue-comment"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment. */ + body: string; + }; + }; + }; + }; + "issues/list-events": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue-event-for-issue"][]; + }; + }; + 410: components["responses"]["gone"]; + }; + }; + "issues/list-labels-on-issue": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 410: components["responses"]["gone"]; + }; + }; + /** Removes any previous labels and sets the new labels for an issue. */ + "issues/set-labels": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": + | { + /** @description The names of the labels to set for the issue. The labels you set replace any existing labels. You can pass an empty array to remove all labels. Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key. You can also add labels to the existing labels for an issue. For more information, see "[Add labels to an issue](https://docs.github.com/rest/reference/issues#add-labels-to-an-issue)." */ + labels?: string[]; + } + | { + labels?: { + name: string; + }[]; + }; + }; + }; + }; + "issues/add-labels": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": + | { + /** @description The names of the labels to add to the issue's existing labels. You can pass an empty array to remove all labels. Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key. You can also replace all of the labels for an issue. For more information, see "[Set labels for an issue](https://docs.github.com/rest/reference/issues#set-labels-for-an-issue)." */ + labels?: string[]; + } + | { + labels?: { + name: string; + }[]; + }; + }; + }; + }; + "issues/remove-all-labels": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + 410: components["responses"]["gone"]; + }; + }; + /** Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. */ + "issues/remove-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + name: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Users with push access can lock an issue or pull request's conversation. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "issues/lock": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The reason for locking the issue or pull request conversation. Lock will fail if you don't use one of these reasons: + * \* `off-topic` + * \* `too heated` + * \* `resolved` + * \* `spam` + * @enum {string} + */ + lock_reason?: "off-topic" | "too heated" | "resolved" | "spam"; + } | null; + }; + }; + }; + /** Users with push access can unlock an issue's conversation. */ + "issues/unlock": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** List the reactions to an [issue](https://docs.github.com/rest/reference/issues). */ + "reactions/list-for-issue": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with an HTTP `200` status means that you already added the reaction type to this issue. */ + "reactions/create-for-issue": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. + * + * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). + */ + "reactions/delete-for-issue": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/list-events-for-timeline": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["timeline-issue-events"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + "repos/list-deploy-keys": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["deploy-key"][]; + }; + }; + }; + }; + /** You can create a read-only deploy key. */ + "repos/create-deploy-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["deploy-key"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A name for the key. */ + title?: string; + /** @description The contents of the key. */ + key: string; + /** + * @description If `true`, the key will only be able to read repository contents. Otherwise, the key will be able to read and write. + * + * Deploy keys with write access can perform the same actions as an organization member with admin access, or a collaborator on a personal repository. For more information, see "[Repository permission levels for an organization](https://docs.github.com/articles/repository-permission-levels-for-an-organization/)" and "[Permission levels for a user account repository](https://docs.github.com/articles/permission-levels-for-a-user-account-repository/)." + */ + read_only?: boolean; + }; + }; + }; + }; + "repos/get-deploy-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the key. */ + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deploy-key"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. */ + "repos/delete-deploy-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the key. */ + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/list-labels-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/create-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["label"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji ![:strawberry:](https://github.githubassets.com/images/icons/emoji/unicode/1f353.png ":strawberry:"). For a full list of available emoji and codes, see "[Emoji cheat sheet](https://github.com/ikatyang/emoji-cheat-sheet)." */ + name: string; + /** @description The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`. */ + color?: string; + /** @description A short description of the label. Must be 100 characters or fewer. */ + description?: string; + }; + }; + }; + }; + "issues/get-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + name: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/delete-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + name: string; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/update-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + name: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The new name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji ![:strawberry:](https://github.githubassets.com/images/icons/emoji/unicode/1f353.png ":strawberry:"). For a full list of available emoji and codes, see "[Emoji cheat sheet](https://github.com/ikatyang/emoji-cheat-sheet)." */ + new_name?: string; + /** @description The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`. */ + color?: string; + /** @description A short description of the label. Must be 100 characters or fewer. */ + description?: string; + }; + }; + }; + }; + /** Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. */ + "repos/list-languages": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["language"]; + }; + }; + }; + }; + "repos/enable-lfs-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + /** + * We will return a 403 with one of the following messages: + * + * - Git LFS support not enabled because Git LFS is globally disabled. + * - Git LFS support not enabled because Git LFS is disabled for the root repository in the network. + * - Git LFS support not enabled because Git LFS is disabled for . + */ + 403: unknown; + }; + }; + "repos/disable-lfs-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * This method returns the contents of the repository's license file, if one is detected. + * + * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. + */ + "licenses/get-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["license-content"]; + }; + }; + }; + }; + /** Sync a branch of a forked repository to keep it up-to-date with the upstream repository. */ + "repos/merge-upstream": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** The branch has been successfully synced with the upstream repository */ + 200: { + content: { + "application/json": components["schemas"]["merged-upstream"]; + }; + }; + /** The branch could not be synced because of a merge conflict */ + 409: unknown; + /** The branch could not be synced for some other reason */ + 422: unknown; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the branch which should be updated to match upstream. */ + branch: string; + }; + }; + }; + }; + "repos/merge": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Successful Response (The resulting merge commit) */ + 201: { + content: { + "application/json": components["schemas"]["commit"]; + }; + }; + /** Response when already merged */ + 204: never; + 403: components["responses"]["forbidden"]; + /** Not Found when the base or head does not exist */ + 404: unknown; + /** Conflict when there is a merge conflict */ + 409: unknown; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the base branch that the head will be merged into. */ + base: string; + /** @description The head to merge. This can be a branch name or a commit SHA1. */ + head: string; + /** @description Commit message to use for the merge commit. If omitted, a default message will be used. */ + commit_message?: string; + }; + }; + }; + }; + "issues/list-milestones": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The state of the milestone. Either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** What to sort results by. Either `due_on` or `completeness`. */ + sort?: "due_on" | "completeness"; + /** The direction of the sort. Either `asc` or `desc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["milestone"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/create-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["milestone"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the milestone. */ + title: string; + /** + * @description The state of the milestone. Either `open` or `closed`. + * @default open + * @enum {string} + */ + state?: "open" | "closed"; + /** @description A description of the milestone. */ + description?: string; + /** + * Format: date-time + * @description The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + due_on?: string; + }; + }; + }; + }; + "issues/get-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the milestone. */ + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["milestone"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/delete-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the milestone. */ + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + "issues/update-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the milestone. */ + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["milestone"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the milestone. */ + title?: string; + /** + * @description The state of the milestone. Either `open` or `closed`. + * @default open + * @enum {string} + */ + state?: "open" | "closed"; + /** @description A description of the milestone. */ + description?: string; + /** + * Format: date-time + * @description The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + due_on?: string; + }; + }; + }; + }; + "issues/list-labels-for-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the milestone. */ + milestone_number: components["parameters"]["milestone-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + }; + }; + /** List all notifications for the current user. */ + "activity/list-repo-notifications-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** If `true`, show notifications marked as read. */ + all?: components["parameters"]["all"]; + /** If `true`, only shows notifications in which the user is directly participating or mentioned. */ + participating?: components["parameters"]["participating"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + before?: components["parameters"]["before"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["thread"][]; + }; + }; + }; + }; + /** Marks all notifications in a repository as "read" removes them from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ + "activity/mark-repo-notifications-as-read": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": { + message?: string; + url?: string; + }; + }; + }; + /** Reset Content */ + 205: unknown; + }; + requestBody: { + content: { + "application/json": { + /** + * Format: date-time + * @description Describes the last point that notifications were checked. Anything updated since this time will not be marked as read. If you omit this parameter, all notifications are marked as read. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. Default: The current timestamp. + */ + last_read_at?: string; + }; + }; + }; + }; + "repos/get-pages": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["page"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). */ + "repos/update-information-about-pages-site": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Specify a custom domain for the repository. Sending a `null` value will remove the custom domain. For more about custom domains, see "[Using a custom domain with GitHub Pages](https://docs.github.com/articles/using-a-custom-domain-with-github-pages/)." */ + cname?: string | null; + /** @description Specify whether HTTPS should be enforced for the repository. */ + https_enforced?: boolean; + /** @description Configures access controls for the GitHub Pages site. If public is set to `true`, the site is accessible to anyone on the internet. If set to `false`, the site will only be accessible to users who have at least `read` access to the repository that published the site. This includes anyone in your Enterprise if the repository is set to `internal` visibility. This feature is only available to repositories in an organization on an Enterprise plan. */ + public?: boolean; + source?: Partial<"gh-pages" | "master" | "master /docs"> & + Partial<{ + /** @description The repository branch used to publish your site's source files. */ + branch: string; + /** + * @description The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`. + * @enum {string} + */ + path: "/" | "/docs"; + }>; + }; + }; + }; + }; + /** Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." */ + "repos/create-pages-site": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["page"]; + }; + }; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The source branch and directory used to publish your Pages site. */ + source?: { + /** @description The repository branch used to publish your site's source files. */ + branch: string; + /** + * @description The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`. Default: `/` + * @default / + * @enum {string} + */ + path?: "/" | "/docs"; + }; + } | null; + }; + }; + }; + "repos/delete-pages-site": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "repos/list-pages-builds": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["page-build"][]; + }; + }; + }; + }; + /** + * You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. + * + * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. + */ + "repos/request-pages-build": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["page-build-status"]; + }; + }; + }; + }; + "repos/get-latest-pages-build": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["page-build"]; + }; + }; + }; + }; + "repos/get-pages-build": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + build_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["page-build"]; + }; + }; + }; + }; + /** + * Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. + * + * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. + * + * Users must have admin or owner permissions. GitHub Apps must have the `pages:write` and `administration:write` permission to use this endpoint. + */ + "repos/get-pages-health-check": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pages-health-check"]; + }; + }; + /** Empty response */ + 202: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Custom domains are not available for GitHub Pages */ + 400: unknown; + 404: components["responses"]["not_found"]; + /** There isn't a CNAME for this page */ + 422: unknown; + }; + }; + /** Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/list-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Indicates the state of the projects to return. Can be either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Creates a repository project board. Returns a `410 Gone` status if projects are disabled in the repository or if the repository does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/create-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the project. */ + name: string; + /** @description The description of the project. */ + body?: string; + }; + }; + }; + }; + /** Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "pulls/list": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Either `open`, `closed`, or `all` to filter by state. */ + state?: "open" | "closed" | "all"; + /** Filter pulls by head user or head organization and branch name in the format of `user:ref-name` or `organization:ref-name`. For example: `github:new-script-format` or `octocat:test-branch`. */ + head?: string; + /** Filter pulls by base branch name. Example: `gh-pages`. */ + base?: string; + /** What to sort results by. Can be either `created`, `updated`, `popularity` (comment count) or `long-running` (age, filtering by pulls updated in the last month). */ + sort?: "created" | "updated" | "popularity" | "long-running"; + /** The direction of the sort. Can be either `asc` or `desc`. Default: `desc` when sort is `created` or sort is not specified, otherwise `asc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + * + * You can create a new pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. + */ + "pulls/create": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["pull-request"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the new pull request. */ + title?: string; + /** @description The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace `head` with a user like this: `username:branch`. */ + head: string; + /** @description The name of the branch you want the changes pulled into. This should be an existing branch on the current repository. You cannot submit a pull request to one repository that requests a merge to a base of another repository. */ + base: string; + /** @description The contents of the pull request. */ + body?: string; + /** @description Indicates whether [maintainers can modify](https://docs.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request. */ + maintainer_can_modify?: boolean; + /** @description Indicates whether the pull request is a draft. See "[Draft Pull Requests](https://docs.github.com/en/articles/about-pull-requests#draft-pull-requests)" in the GitHub Help documentation to learn more. */ + draft?: boolean; + /** @example 1 */ + issue?: number; + }; + }; + }; + }; + /** Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. */ + "pulls/list-review-comments-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + sort?: "created" | "updated" | "created_at"; + /** Can be either `asc` or `desc`. Ignored without `sort` parameter. */ + direction?: "asc" | "desc"; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-review-comment"][]; + }; + }; + }; + }; + /** Provides details for a review comment. */ + "pulls/get-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Deletes a review comment. */ + "pulls/delete-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Enables you to edit a review comment. */ + "pulls/update-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The text of the reply to the review comment. */ + body: string; + }; + }; + }; + }; + /** List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). */ + "reactions/list-for-pull-request-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a pull request review comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with an HTTP `200` status means that you already added the reaction type to this pull request review comment. */ + "reactions/create-for-pull-request-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the pull request review comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` + * + * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + "reactions/delete-for-pull-request-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists details of a pull request by providing its number. + * + * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. + * + * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: + * + * * If merged as a [merge commit](https://docs.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. + * * If merged via a [squash](https://docs.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. + * * If [rebased](https://docs.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. + * + * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + */ + "pulls/get": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. */ + 200: { + content: { + "application/json": components["schemas"]["pull-request"]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + */ + "pulls/update": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the pull request. */ + title?: string; + /** @description The contents of the pull request. */ + body?: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state?: "open" | "closed"; + /** @description The name of the branch you want your changes pulled into. This should be an existing branch on the current repository. You cannot update the base branch on a pull request to point to another repository. */ + base?: string; + /** @description Indicates whether [maintainers can modify](https://docs.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request. */ + maintainer_can_modify?: boolean; + }; + }; + }; + }; + /** + * Creates a codespace owned by the authenticated user for the specified pull request. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/create-with-pr-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response when the codespace was successfully created */ + 201: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + /** Response when the codespace creation partially failed but is being retried in the background */ + 202: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Location for this codespace. Assigned by IP if not provided */ + location?: string; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Whether to authorize requested permissions from devcontainer.json */ + multi_repo_permissions_opt_out?: boolean; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). */ + retention_period_minutes?: number; + } | null; + }; + }; + }; + /** Lists all review comments for a pull request. By default, review comments are in ascending order by ID. */ + "pulls/list-review-comments": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** Can be either `asc` or `desc`. Ignored without `sort` parameter. */ + direction?: "asc" | "desc"; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-review-comment"][]; + }; + }; + }; + }; + /** + * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. + * + * The `position` parameter is deprecated. If you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. + * + * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "pulls/create-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The text of the review comment. */ + body: string; + /** @description The SHA of the commit needing a comment. Not using the latest commit SHA may render your comment outdated if a subsequent commit modifies the line you specify as the `position`. */ + commit_id?: string; + /** @description The relative path to the file that necessitates a comment. */ + path?: string; + /** + * @deprecated + * @description **This parameter is deprecated. Use `line` instead**. The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note above. + */ + position?: number; + /** + * @description In a split diff view, the side of the diff that the pull request's changes appear on. Can be `LEFT` or `RIGHT`. Use `LEFT` for deletions that appear in red. Use `RIGHT` for additions that appear in green or unchanged lines that appear in white and are shown for context. For a multi-line comment, side represents whether the last line of the comment range is a deletion or addition. For more information, see "[Diff view options](https://docs.github.com/en/articles/about-comparing-branches-in-pull-requests#diff-view-options)" in the GitHub Help documentation. + * @enum {string} + */ + side?: "LEFT" | "RIGHT"; + /** @description The line of the blob in the pull request diff that the comment applies to. For a multi-line comment, the last line of the range that your comment applies to. */ + line?: number; + /** @description **Required when using multi-line comments unless using `in_reply_to`**. The `start_line` is the first line in the pull request diff that your multi-line comment applies to. To learn more about multi-line comments, see "[Commenting on a pull request](https://docs.github.com/en/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation. */ + start_line?: number; + /** + * @description **Required when using multi-line comments unless using `in_reply_to`**. The `start_side` is the starting side of the diff that the comment applies to. Can be `LEFT` or `RIGHT`. To learn more about multi-line comments, see "[Commenting on a pull request](https://docs.github.com/en/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation. See `side` in this table for additional context. + * @enum {string} + */ + start_side?: "LEFT" | "RIGHT" | "side"; + /** + * @description The ID of the review comment to reply to. To find the ID of a review comment with ["List review comments on a pull request"](#list-review-comments-on-a-pull-request). When specified, all parameters other than `body` in the request body are ignored. + * @example 2 + */ + in_reply_to?: number; + }; + }; + }; + }; + /** + * Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "pulls/create-reply-for-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The text of the review comment. */ + body: string; + }; + }; + }; + }; + /** Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. */ + "pulls/list-commits": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["commit"][]; + }; + }; + }; + }; + /** **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. */ + "pulls/list-files": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["diff-entry"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + "pulls/check-if-merged": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response if pull request has been merged */ + 204: never; + /** Not Found if pull request has not been merged */ + 404: unknown; + }; + }; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + "pulls/merge": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** if merge was successful */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-merge-result"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + /** Method Not Allowed if merge cannot be performed */ + 405: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + /** Conflict if sha was provided and pull request head did not match */ + 409: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Title for the automatic commit message. */ + commit_title?: string; + /** @description Extra detail to append to automatic commit message. */ + commit_message?: string; + /** @description SHA that pull request head must match to allow merge. */ + sha?: string; + /** + * @description Merge method to use. Possible values are `merge`, `squash` or `rebase`. Default is `merge`. + * @enum {string} + */ + merge_method?: "merge" | "squash" | "rebase"; + } | null; + }; + }; + }; + /** Lists the users or teams whose review is requested for a pull request. Once a requested reviewer submits a review, they are no longer considered a requested reviewer. Their review will instead be returned by the [List reviews for a pull request](https://docs.github.com/rest/pulls/reviews#list-reviews-for-a-pull-request) operation. */ + "pulls/list-requested-reviewers": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-review-request"]; + }; + }; + }; + }; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + "pulls/request-reviewers": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["pull-request-simple"]; + }; + }; + 403: components["responses"]["forbidden"]; + /** Unprocessable Entity if user is not a collaborator */ + 422: unknown; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of user `login`s that will be requested. */ + reviewers?: string[]; + /** @description An array of team `slug`s that will be requested. */ + team_reviewers?: string[]; + }; + }; + }; + }; + "pulls/remove-requested-reviewers": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-simple"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of user `login`s that will be removed. */ + reviewers: string[]; + /** @description An array of team `slug`s that will be removed. */ + team_reviewers?: string[]; + }; + }; + }; + }; + /** The list of reviews returns in chronological order. */ + "pulls/list-reviews": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** The list of reviews returns in chronological order. */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-review"][]; + }; + }; + }; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Pull request reviews created in the `PENDING` state do not include the `submitted_at` property in the response. + * + * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API v3 offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. + * + * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + */ + "pulls/create-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The SHA of the commit that needs a review. Not using the latest commit SHA may render your review comment outdated if a subsequent commit modifies the line you specify as the `position`. Defaults to the most recent commit in the pull request when you do not specify a value. */ + commit_id?: string; + /** @description **Required** when using `REQUEST_CHANGES` or `COMMENT` for the `event` parameter. The body text of the pull request review. */ + body?: string; + /** + * @description The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. By leaving this blank, you set the review action state to `PENDING`, which means you will need to [submit the pull request review](https://docs.github.com/rest/reference/pulls#submit-a-review-for-a-pull-request) when you are ready. + * @enum {string} + */ + event?: "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; + /** @description Use the following table to specify the location, destination, and contents of the draft review comment. */ + comments?: { + /** @description The relative path to the file that necessitates a review comment. */ + path: string; + /** @description The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note below. */ + position?: number; + /** @description Text of the review comment. */ + body: string; + /** @example 28 */ + line?: number; + /** @example RIGHT */ + side?: string; + /** @example 26 */ + start_line?: number; + /** @example LEFT */ + start_side?: string; + }[]; + }; + }; + }; + }; + "pulls/get-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Update the review summary comment with new text. */ + "pulls/update-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The body text of the pull request review. */ + body: string; + }; + }; + }; + }; + "pulls/delete-pending-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** List comments for a specific pull request review. */ + "pulls/list-comments-for-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["review-comment"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. */ + "pulls/dismiss-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The message for the pull request review dismissal */ + message: string; + /** @example "APPROVE" */ + event?: string; + }; + }; + }; + }; + "pulls/submit-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The body text of the pull request review */ + body?: string; + /** + * @description The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. When you leave this blank, the API returns _HTTP 422 (Unrecognizable entity)_ and sets the review action state to `PENDING`, which means you will need to re-submit the pull request review using a review action. + * @enum {string} + */ + event: "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; + }; + }; + }; + }; + /** Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. */ + "pulls/update-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": { + message?: string; + url?: string; + }; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The expected SHA of the pull request's HEAD ref. This is the most recent commit on the pull request's branch. If the expected SHA does not match the pull request's HEAD, you will receive a `422 Unprocessable Entity` status. You can use the "[List commits](https://docs.github.com/rest/reference/repos#list-commits)" endpoint to find the most recent commit SHA. Default: SHA of the pull request's current HEAD ref. */ + expected_head_sha?: string; + } | null; + }; + }; + }; + /** + * Gets the preferred README for a repository. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + "repos/get-readme": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`) */ + ref?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["content-file"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Gets the README from a repository directory. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + "repos/get-readme-in-directory": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The alternate path to look for a README file */ + dir: string; + }; + query: { + /** The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`) */ + ref?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["content-file"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). + * + * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. + */ + "repos/list-releases": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["release"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Users with push access to the repository can create a release. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "repos/create-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["release"]; + }; + }; + /** Not Found if the discussion category name is invalid */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the tag. */ + tag_name: string; + /** @description Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually `master`). */ + target_commitish?: string; + /** @description The name of the release. */ + name?: string; + /** @description Text describing the contents of the tag. */ + body?: string; + /** + * @description `true` to create a draft (unpublished) release, `false` to create a published one. + * @default false + */ + draft?: boolean; + /** + * @description `true` to identify the release as a prerelease. `false` to identify the release as a full release. + * @default false + */ + prerelease?: boolean; + /** @description If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see "[Managing categories for discussions in your repository](https://docs.github.com/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository)." */ + discussion_category_name?: string; + /** + * @description Whether to automatically generate the name and body for this release. If `name` is specified, the specified name will be used; otherwise, a name will be automatically generated. If `body` is specified, the body will be pre-pended to the automatically generated notes. + * @default false + */ + generate_release_notes?: boolean; + }; + }; + }; + }; + /** To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. */ + "repos/get-release-asset": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the asset. */ + asset_id: components["parameters"]["asset-id"]; + }; + }; + responses: { + /** To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. */ + 200: { + content: { + "application/json": components["schemas"]["release-asset"]; + }; + }; + 302: components["responses"]["found"]; + 404: components["responses"]["not_found"]; + }; + }; + "repos/delete-release-asset": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the asset. */ + asset_id: components["parameters"]["asset-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Users with push access to the repository can edit a release asset. */ + "repos/update-release-asset": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the asset. */ + asset_id: components["parameters"]["asset-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["release-asset"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The file name of the asset. */ + name?: string; + /** @description An alternate short description of the asset. Used in place of the filename. */ + label?: string; + /** @example "uploaded" */ + state?: string; + }; + }; + }; + }; + /** Generate a name and body describing a [release](https://docs.github.com/rest/reference/repos#releases). The body content will be markdown formatted and contain information like the changes since last release and users who contributed. The generated release notes are not saved anywhere. They are intended to be generated and used when creating a new release. */ + "repos/generate-release-notes": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Name and body of generated release notes */ + 200: { + content: { + "application/json": components["schemas"]["release-notes-content"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The tag name for the release. This can be an existing tag or a new one. */ + tag_name: string; + /** @description Specifies the commitish value that will be the target for the release's tag. Required if the supplied tag_name does not reference an existing tag. Ignored if the tag_name already exists. */ + target_commitish?: string; + /** @description The name of the previous tag to use as the starting point for the release notes. Use to manually specify the range for the set of changes considered as part this release. */ + previous_tag_name?: string; + /** @description Specifies a path to a file in the repository containing configuration settings used for generating the release notes. If unspecified, the configuration file located in the repository at '.github/release.yml' or '.github/release.yaml' will be used. If that is not present, the default configuration will be used. */ + configuration_file_path?: string; + }; + }; + }; + }; + /** + * View the latest published full release for the repository. + * + * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. + */ + "repos/get-latest-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + }; + }; + /** Get a published release with the specified tag. */ + "repos/get-release-by-tag": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** tag parameter */ + tag: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ + "repos/get-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Users with push access to the repository can delete a release. */ + "repos/delete-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Users with push access to the repository can edit a release. */ + "repos/update-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + /** Not Found if the discussion category name is invalid */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the tag. */ + tag_name?: string; + /** @description Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually `master`). */ + target_commitish?: string; + /** @description The name of the release. */ + name?: string; + /** @description Text describing the contents of the tag. */ + body?: string; + /** @description `true` makes the release a draft, and `false` publishes the release. */ + draft?: boolean; + /** @description `true` to identify the release as a prerelease, `false` to identify the release as a full release. */ + prerelease?: boolean; + /** @description If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. If there is already a discussion linked to the release, this parameter is ignored. For more information, see "[Managing categories for discussions in your repository](https://docs.github.com/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository)." */ + discussion_category_name?: string; + }; + }; + }; + }; + "repos/list-release-assets": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["release-asset"][]; + }; + }; + }; + }; + /** + * This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in + * the response of the [Create a release endpoint](https://docs.github.com/rest/reference/repos#create-a-release) to upload a release asset. + * + * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. + * + * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: + * + * `application/zip` + * + * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, + * you'll still need to pass your authentication to be able to upload an asset. + * + * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. + * + * **Notes:** + * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" + * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. + */ + "repos/upload-release-asset": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + query: { + name: string; + label?: string; + }; + }; + responses: { + /** Response for successful upload */ + 201: { + content: { + "application/json": components["schemas"]["release-asset"]; + }; + }; + /** Response if you upload an asset with the same filename as another uploaded asset */ + 422: unknown; + }; + requestBody: { + content: { + "*/*": string; + }; + }; + }; + /** List the reactions to a [release](https://docs.github.com/rest/reference/repos#releases). */ + "reactions/list-for-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a release. */ + content?: "+1" | "laugh" | "heart" | "hooray" | "rocket" | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. */ + "reactions/create-for-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the release. + * @enum {string} + */ + content: "+1" | "laugh" | "heart" | "hooray" | "rocket" | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/releases/:release_id/reactions/:reaction_id`. + * + * Delete a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + "reactions/delete-for-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists secret scanning alerts for an eligible repository, from newest to oldest. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/list-alerts-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ + state?: components["parameters"]["secret-scanning-alert-state"]; + /** + * A comma-separated list of secret types to return. By default all secret types are returned. + * See "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)" + * for a complete list of secret types. + */ + secret_type?: components["parameters"]["secret-scanning-alert-secret-type"]; + /** A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. Valid resolutions are `false_positive`, `wont_fix`, `revoked`, `pattern_edited`, `pattern_deleted` or `used_in_tests`. */ + resolution?: components["parameters"]["secret-scanning-alert-resolution"]; + /** The property to sort the results by. `created` means when the alert was created. `updated` means when the alert was updated or resolved. */ + sort?: components["parameters"]["secret-scanning-alert-sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. To receive an initial cursor on your first request, include an empty "before" query string. */ + before?: components["parameters"]["secret-scanning-pagination-before-org-repo"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. To receive an initial cursor on your first request, include an empty "after" query string. */ + after?: components["parameters"]["secret-scanning-pagination-after-org-repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["secret-scanning-alert"][]; + }; + }; + /** Repository is public or secret scanning is disabled for the repository */ + 404: unknown; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets a single secret scanning alert detected in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/get-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["secret-scanning-alert"]; + }; + }; + 304: components["responses"]["not_modified"]; + /** Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ + 404: unknown; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Updates the status of a secret scanning alert in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. + */ + "secret-scanning/update-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["secret-scanning-alert"]; + }; + }; + /** Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ + 404: unknown; + /** State does not match the resolution */ + 422: unknown; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + state: components["schemas"]["secret-scanning-alert-state"]; + resolution?: components["schemas"]["secret-scanning-alert-resolution"]; + }; + }; + }; + }; + /** + * Lists all locations for a given secret scanning alert for an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/list-locations-for-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["secret-scanning-location"][]; + }; + }; + /** Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ + 404: unknown; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists the people that have starred the repository. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + "activity/list-stargazers-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": Partial & + Partial; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ + "repos/get-code-frequency-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ + 200: { + content: { + "application/json": components["schemas"]["code-frequency-stat"][]; + }; + }; + 202: components["responses"]["accepted"]; + 204: components["responses"]["no_content"]; + }; + }; + /** Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. */ + "repos/get-commit-activity-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-activity"][]; + }; + }; + 202: components["responses"]["accepted"]; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: + * + * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + * * `a` - Number of additions + * * `d` - Number of deletions + * * `c` - Number of commits + */ + "repos/get-contributors-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** + * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + * * `a` - Number of additions + * * `d` - Number of deletions + * * `c` - Number of commits + */ + 200: { + content: { + "application/json": components["schemas"]["contributor-activity"][]; + }; + }; + 202: components["responses"]["accepted"]; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. + * + * The array order is oldest week (index 0) to most recent week. + */ + "repos/get-participation-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** The array order is oldest week (index 0) to most recent week. */ + 200: { + content: { + "application/json": components["schemas"]["participation-stats"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Each array contains the day number, hour number, and number of commits: + * + * * `0-6`: Sunday - Saturday + * * `0-23`: Hour of day + * * Number of commits + * + * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. + */ + "repos/get-punch-card-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. */ + 200: { + content: { + "application/json": components["schemas"]["code-frequency-stat"][]; + }; + }; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Users with push access in a repository can create commit statuses for a given SHA. + * + * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. + */ + "repos/create-commit-status": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + sha: string; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["status"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state of the status. + * @enum {string} + */ + state: "error" | "failure" | "pending" | "success"; + /** + * @description The target URL to associate with this status. This URL will be linked from the GitHub UI to allow users to easily see the source of the status. + * For example, if your continuous integration system is posting build status, you would want to provide the deep link for the build output for this specific SHA: + * `http://ci.example.com/user/repo/build/sha` + */ + target_url?: string; + /** @description A short description of the status. */ + description?: string; + /** + * @description A string label to differentiate this status from the status of other systems. This field is case-insensitive. + * @default default + */ + context?: string; + }; + }; + }; + }; + /** Lists the people watching the specified repository. */ + "activity/list-watchers-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + "activity/get-repo-subscription": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** if you subscribe to the repository */ + 200: { + content: { + "application/json": components["schemas"]["repository-subscription"]; + }; + }; + 403: components["responses"]["forbidden"]; + /** Not Found if you don't subscribe to the repository */ + 404: unknown; + }; + }; + /** If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. */ + "activity/set-repo-subscription": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-subscription"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Determines if notifications should be received from this repository. */ + subscribed?: boolean; + /** @description Determines if all notifications should be blocked from this repository. */ + ignored?: boolean; + }; + }; + }; + }; + /** This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). */ + "activity/delete-repo-subscription": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "repos/list-tags": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["tag"][]; + }; + }; + }; + }; + /** + * This returns the tag protection states of a repository. + * + * This information is only available to repository administrators. + */ + "repos/list-tag-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["tag-protection"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * This creates a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + "repos/create-tag-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["tag-protection"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description An optional glob pattern to match against when enforcing tag protection. */ + pattern: string; + }; + }; + }; + }; + /** + * This deletes a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + "repos/delete-tag-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the tag protection. */ + tag_protection_id: components["parameters"]["tag-protection-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + "repos/download-tarball-archive": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + ref: string; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + "repos/list-teams": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + }; + }; + "repos/get-all-topics": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["topic"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "repos/replace-all-topics": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["topic"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of topics to add to the repository. Pass one or more topics to _replace_ the set of existing topics. Send an empty array (`[]`) to clear all topics from the repository. **Note:** Topic `names` cannot contain uppercase letters. */ + names: string[]; + }; + }; + }; + }; + /** Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ + "repos/get-clones": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The time frame to display results for. */ + per?: components["parameters"]["per"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["clone-traffic"]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** Get the top 10 popular contents over the last 14 days. */ + "repos/get-top-paths": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["content-traffic"][]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** Get the top 10 referrers over the last 14 days. */ + "repos/get-top-referrers": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["referrer-traffic"][]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ + "repos/get-views": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The time frame to display results for. */ + per?: components["parameters"]["per"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["view-traffic"]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://docs.github.com/articles/about-repository-transfers/). */ + "repos/transfer": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": components["schemas"]["minimal-repository"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The username or organization name the repository will be transferred to. */ + new_owner: string; + /** @description ID of the team or teams to add to the repository. Teams can only be added to organization-owned repositories. */ + team_ids?: number[]; + }; + }; + }; + }; + /** Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin read access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + "repos/check-vulnerability-alerts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response if repository is enabled with vulnerability alerts */ + 204: never; + /** Not Found if repository is not enabled with vulnerability alerts */ + 404: unknown; + }; + }; + /** Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + "repos/enable-vulnerability-alerts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Disables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + "repos/disable-vulnerability-alerts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + "repos/download-zipball-archive": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + ref: string; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + /** + * Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. The authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + "repos/create-using-template": { + parameters: { + path: { + template_owner: string; + template_repo: string; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["repository"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The organization or person who will own the new repository. To create a new repository in an organization, the authenticated user must be a member of the specified organization. */ + owner?: string; + /** @description The name of the new repository. */ + name: string; + /** @description A short description of the new repository. */ + description?: string; + /** + * @description Set to `true` to include the directory structure and files from all branches in the template repository, and not just the default branch. Default: `false`. + * @default false + */ + include_all_branches?: boolean; + /** + * @description Either `true` to create a new private repository or `false` to create a new public one. + * @default false + */ + private?: boolean; + }; + }; + }; + }; + /** + * Lists all public repositories in the order that they were created. + * + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. + * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. + */ + "repos/list-public": { + parameters: { + query: { + /** A repository ID. Only return repositories with an ID greater than this ID. */ + since?: components["parameters"]["since-repo"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/list-environment-secrets": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment */ + environment_name: components["parameters"]["environment-name"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["actions-secret"][]; + }; + }; + }; + }; + }; + /** Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/get-environment-public-key": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment */ + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-public-key"]; + }; + }; + }; + }; + /** Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/get-environment-secret": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment */ + environment_name: components["parameters"]["environment-name"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-secret"]; + }; + }; + }; + }; + /** + * Creates or updates an environment secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "actions/create-or-update-environment-secret": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment */ + environment_name: components["parameters"]["environment-name"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an environment public key](https://docs.github.com/rest/reference/actions#get-an-environment-public-key) endpoint. */ + encrypted_value: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id: string; + }; + }; + }; + }; + /** Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/delete-environment-secret": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment */ + environment_name: components["parameters"]["environment-name"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Default response */ + 204: never; + }; + }; + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + "enterprise-admin/list-provisioned-groups-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** Used for pagination: the index of the first result to return. */ + startIndex?: components["parameters"]["start-index"]; + /** Used for pagination: the number of results to return. */ + count?: components["parameters"]["count"]; + /** filter results */ + filter?: string; + /** attributes to exclude */ + excludedAttributes?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["scim-group-list-enterprise"]; + }; + }; + }; + }; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Provision an enterprise group, and invite users to the group. This sends invitation emails to the email address of the invited users to join the GitHub organization that the SCIM group corresponds to. + */ + "enterprise-admin/provision-and-invite-enterprise-group": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["scim-enterprise-group"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The SCIM schema URIs. */ + schemas: string[]; + /** @description The name of the SCIM group. This must match the GitHub organization that the group maps to. */ + displayName: string; + members?: { + /** @description The SCIM user ID for a user. */ + value: string; + }[]; + }; + }; + }; + }; + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + "enterprise-admin/get-provisioning-information-for-enterprise-group": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Identifier generated by the GitHub SCIM endpoint. */ + scim_group_id: components["parameters"]["scim-group-id"]; + }; + query: { + /** Attributes to exclude. */ + excludedAttributes?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["scim-enterprise-group"]; + }; + }; + }; + }; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Replaces an existing provisioned group’s information. You must provide all the information required for the group as if you were provisioning it for the first time. Any existing group information that you don't provide will be removed, including group membership. If you want to only update a specific attribute, use the [Update an attribute for a SCIM enterprise group](#update-an-attribute-for-a-scim-enterprise-group) endpoint instead. + */ + "enterprise-admin/set-information-for-provisioned-enterprise-group": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Identifier generated by the GitHub SCIM endpoint. */ + scim_group_id: components["parameters"]["scim-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["scim-enterprise-group"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The SCIM schema URIs. */ + schemas: string[]; + /** @description The name of the SCIM group. This must match the GitHub organization that the group maps to. */ + displayName: string; + members?: { + /** @description The SCIM user ID for a user. */ + value: string; + }[]; + }; + }; + }; + }; + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + "enterprise-admin/delete-scim-group-from-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Identifier generated by the GitHub SCIM endpoint. */ + scim_group_id: components["parameters"]["scim-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Allows you to change a provisioned group’s individual attributes. To change a group’s values, you must provide a specific Operations JSON format that contains at least one of the add, remove, or replace operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). + */ + "enterprise-admin/update-attribute-for-enterprise-group": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Identifier generated by the GitHub SCIM endpoint. */ + scim_group_id: components["parameters"]["scim-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["scim-enterprise-group"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The SCIM schema URIs. */ + schemas: string[]; + /** @description Array of [SCIM operations](https://tools.ietf.org/html/rfc7644#section-3.5.2). */ + Operations: { + /** @enum {string} */ + op: "add" | "Add" | "remove" | "Remove" | "replace" | "Replace"; + path?: string; + /** @description Can be any value - string, number, array or object. */ + value?: unknown; + }[]; + }; + }; + }; + }; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Retrieves a paginated list of all provisioned enterprise members, including pending invitations. + * + * When a user with a SAML-provisioned external identity leaves (or is removed from) an enterprise, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member: + * - When a user with a SCIM-provisioned external identity is removed from an enterprise, the account's metadata is preserved to allow the user to re-join the organization in the future. + * - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted). + * - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO. + * + * The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO: + * + * 1. The user is granted access by the IdP and is not a member of the GitHub enterprise. + * + * 1. The user attempts to access the GitHub enterprise and initiates the SAML SSO process, and is not currently signed in to their GitHub account. + * + * 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account: + * - If the user signs in, their GitHub account is linked to this entry. + * - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub enterprise, and the external identity `null` entry remains in place. + */ + "enterprise-admin/list-provisioned-identities-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** Used for pagination: the index of the first result to return. */ + startIndex?: components["parameters"]["start-index"]; + /** Used for pagination: the number of results to return. */ + count?: components["parameters"]["count"]; + /** filter results */ + filter?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["scim-user-list-enterprise"]; + }; + }; + }; + }; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Provision enterprise membership for a user, and send organization invitation emails to the email address. + * + * You can optionally include the groups a user will be invited to join. If you do not provide a list of `groups`, the user is provisioned for the enterprise, but no organization invitation emails will be sent. + */ + "enterprise-admin/provision-and-invite-enterprise-user": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["scim-enterprise-user"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The SCIM schema URIs. */ + schemas: string[]; + /** @description The username for the user. */ + userName: string; + name: { + /** @description The first name of the user. */ + givenName: string; + /** @description The last name of the user. */ + familyName: string; + }; + /** @description List of user emails. */ + emails: { + /** @description The email address. */ + value: string; + /** @description The type of email address. */ + type: string; + /** @description Whether this email address is the primary address. */ + primary: boolean; + }[]; + /** @description List of SCIM group IDs the user is a member of. */ + groups?: { + value?: string; + }[]; + }; + }; + }; + }; + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + "enterprise-admin/get-provisioning-information-for-enterprise-user": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** The unique identifier of the SCIM user. */ + scim_user_id: components["parameters"]["scim-user-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["scim-enterprise-user"]; + }; + }; + }; + }; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](#update-an-attribute-for-an-enterprise-scim-user) endpoint instead. + * + * You must at least provide the required values for the user: `userName`, `name`, and `emails`. + * + * **Warning:** Setting `active: false` removes the user from the enterprise, deletes the external identity, and deletes the associated `{scim_user_id}`. + */ + "enterprise-admin/set-information-for-provisioned-enterprise-user": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** The unique identifier of the SCIM user. */ + scim_user_id: components["parameters"]["scim-user-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["scim-enterprise-user"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The SCIM schema URIs. */ + schemas: string[]; + /** @description The username for the user. */ + userName: string; + name: { + /** @description The first name of the user. */ + givenName: string; + /** @description The last name of the user. */ + familyName: string; + }; + /** @description List of user emails. */ + emails: { + /** @description The email address. */ + value: string; + /** @description The type of email address. */ + type: string; + /** @description Whether this email address is the primary address. */ + primary: boolean; + }[]; + /** @description List of SCIM group IDs the user is a member of. */ + groups?: { + value?: string; + }[]; + }; + }; + }; + }; + /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ + "enterprise-admin/delete-user-from-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** The unique identifier of the SCIM user. */ + scim_user_id: components["parameters"]["scim-user-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. + * + * Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). + * + * **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work. + * + * **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the enterprise, deletes the external identity, and deletes the associated `:scim_user_id`. + * + * ``` + * { + * "Operations":[{ + * "op":"replace", + * "value":{ + * "active":false + * } + * }] + * } + * ``` + */ + "enterprise-admin/update-attribute-for-enterprise-user": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** The unique identifier of the SCIM user. */ + scim_user_id: components["parameters"]["scim-user-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["scim-enterprise-user"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The SCIM schema URIs. */ + schemas: string[]; + /** @description Array of [SCIM operations](https://tools.ietf.org/html/rfc7644#section-3.5.2). */ + Operations: { [key: string]: unknown }[]; + }; + }; + }; + }; + /** + * Retrieves a paginated list of all provisioned organization members, including pending invitations. If you provide the `filter` parameter, the resources for all matching provisions members are returned. + * + * When a user with a SAML-provisioned external identity leaves (or is removed from) an organization, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member: + * - When a user with a SCIM-provisioned external identity is removed from an organization, the account's metadata is preserved to allow the user to re-join the organization in the future. + * - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted). + * - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO. + * + * The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO: + * + * 1. The user is granted access by the IdP and is not a member of the GitHub organization. + * + * 1. The user attempts to access the GitHub organization and initiates the SAML SSO process, and is not currently signed in to their GitHub account. + * + * 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account: + * - If the user signs in, their GitHub account is linked to this entry. + * - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub organization, and the external identity `null` entry remains in place. + */ + "scim/list-provisioned-identities": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Used for pagination: the index of the first result to return. */ + startIndex?: number; + /** Used for pagination: the number of results to return. */ + count?: number; + /** + * Filters results using the equals query parameter operator (`eq`). You can filter results that are equal to `id`, `userName`, `emails`, and `external_id`. For example, to search for an identity with the `userName` Octocat, you would use this query: + * + * `?filter=userName%20eq%20\"Octocat\"`. + * + * To filter results for the identity with the email `octocat@github.com`, you would use this query: + * + * `?filter=emails%20eq%20\"octocat@github.com\"`. + */ + filter?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/scim+json": components["schemas"]["scim-user-list"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["scim_bad_request"]; + 403: components["responses"]["scim_forbidden"]; + 404: components["responses"]["scim_not_found"]; + 429: components["responses"]["scim_too_many_requests"]; + }; + }; + /** Provision organization membership for a user, and send an activation email to the email address. */ + "scim/provision-and-invite-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/scim+json": components["schemas"]["scim-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["scim_bad_request"]; + 403: components["responses"]["scim_forbidden"]; + 404: components["responses"]["scim_not_found"]; + 409: components["responses"]["scim_conflict"]; + 500: components["responses"]["scim_internal_error"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Configured by the admin. Could be an email, login, or username + * @example someone@example.com + */ + userName: string; + /** + * @description The name of the user, suitable for display to end-users + * @example Jon Doe + */ + displayName?: string; + /** + * @example { + * "givenName": "Jane", + * "familyName": "User" + * } + */ + name: { + givenName: string; + familyName: string; + formatted?: string; + }; + /** + * @description user emails + * @example [ + * { + * "value": "someone@example.com", + * "primary": true + * }, + * { + * "value": "another@example.com", + * "primary": false + * } + * ] + */ + emails: { + value: string; + primary?: boolean; + type?: string; + }[]; + schemas?: string[]; + externalId?: string; + groups?: string[]; + active?: boolean; + }; + }; + }; + }; + "scim/get-provisioning-information-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the SCIM user. */ + scim_user_id: components["parameters"]["scim-user-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/scim+json": components["schemas"]["scim-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["scim_forbidden"]; + 404: components["responses"]["scim_not_found"]; + }; + }; + /** + * Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](https://docs.github.com/rest/reference/scim#update-an-attribute-for-a-scim-user) endpoint instead. + * + * You must at least provide the required values for the user: `userName`, `name`, and `emails`. + * + * **Warning:** Setting `active: false` removes the user from the organization, deletes the external identity, and deletes the associated `{scim_user_id}`. + */ + "scim/set-information-for-provisioned-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the SCIM user. */ + scim_user_id: components["parameters"]["scim-user-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/scim+json": components["schemas"]["scim-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["scim_forbidden"]; + 404: components["responses"]["scim_not_found"]; + }; + requestBody: { + content: { + "application/json": { + schemas?: string[]; + /** + * @description The name of the user, suitable for display to end-users + * @example Jon Doe + */ + displayName?: string; + externalId?: string; + groups?: string[]; + active?: boolean; + /** + * @description Configured by the admin. Could be an email, login, or username + * @example someone@example.com + */ + userName: string; + /** + * @example { + * "givenName": "Jane", + * "familyName": "User" + * } + */ + name: { + givenName: string; + familyName: string; + formatted?: string; + }; + /** + * @description user emails + * @example [ + * { + * "value": "someone@example.com", + * "primary": true + * }, + * { + * "value": "another@example.com", + * "primary": false + * } + * ] + */ + emails: { + type?: string; + value: string; + primary?: boolean; + }[]; + }; + }; + }; + }; + "scim/delete-user-from-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the SCIM user. */ + scim_user_id: components["parameters"]["scim-user-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["scim_forbidden"]; + 404: components["responses"]["scim_not_found"]; + }; + }; + /** + * Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). + * + * **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work. + * + * **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the organization, deletes the external identity, and deletes the associated `:scim_user_id`. + * + * ``` + * { + * "Operations":[{ + * "op":"replace", + * "value":{ + * "active":false + * } + * }] + * } + * ``` + */ + "scim/update-attribute-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the SCIM user. */ + scim_user_id: components["parameters"]["scim-user-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/scim+json": components["schemas"]["scim-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["scim_bad_request"]; + 403: components["responses"]["scim_forbidden"]; + 404: components["responses"]["scim_not_found"]; + /** Response */ + 429: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + schemas?: string[]; + /** + * @description Set of operations to be performed + * @example [ + * { + * "op": "replace", + * "value": { + * "active": false + * } + * } + * ] + */ + Operations: { + /** @enum {string} */ + op: "add" | "remove" | "replace"; + path?: string; + value?: + | { + active?: boolean | null; + userName?: string | null; + externalId?: string | null; + givenName?: string | null; + familyName?: string | null; + } + | { + value?: string; + primary?: boolean; + }[] + | string; + }[]; + }; + }; + }; + }; + /** + * Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: + * + * `q=addClass+in:file+language:js+repo:jquery/jquery` + * + * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. + * + * #### Considerations for code search + * + * Due to the complexity of searching code, there are a few restrictions on how searches are performed: + * + * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. + * * Only files smaller than 384 KB are searchable. + * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing + * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. + */ + "search/code": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching code](https://docs.github.com/search-github/searching-on-github/searching-code)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query. Can only be `indexed`, which indicates how recently a file has been indexed by the GitHub search infrastructure. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "indexed"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["code-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Find commits via various criteria on the default branch (usually `master`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match + * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: + * + * `q=repo:octocat/Spoon-Knife+css` + */ + "search/commits": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching commits](https://docs.github.com/search-github/searching-on-github/searching-commits)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query by `author-date` or `committer-date`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "author-date" | "committer-date"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["commit-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted + * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. + * + * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` + * + * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. + * + * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." + */ + "search/issues-and-pull-requests": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching issues and pull requests](https://docs.github.com/search-github/searching-on-github/searching-issues-and-pull-requests)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query by the number of `comments`, `reactions`, `reactions-+1`, `reactions--1`, `reactions-smile`, `reactions-thinking_face`, `reactions-heart`, `reactions-tada`, or `interactions`. You can also sort results by how recently the items were `created` or `updated`, Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: + | "comments" + | "reactions" + | "reactions-+1" + | "reactions--1" + | "reactions-smile" + | "reactions-thinking_face" + | "reactions-heart" + | "reactions-tada" + | "interactions" + | "created" + | "updated"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["issue-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: + * + * `q=bug+defect+enhancement&repository_id=64778136` + * + * The labels that best match the query appear first in the search results. + */ + "search/labels": { + parameters: { + query: { + /** The id of the repository. */ + repository_id: number; + /** The search keywords. This endpoint does not accept qualifiers in the query. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). */ + q: string; + /** Sorts the results of your query by when the label was `created` or `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "created" | "updated"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["label-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: + * + * `q=tetris+language:assembly&sort=stars&order=desc` + * + * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. + */ + "search/repos": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching for repositories](https://docs.github.com/articles/searching-for-repositories/)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query by number of `stars`, `forks`, or `help-wanted-issues` or how recently the items were `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "stars" | "forks" | "help-wanted-issues" | "updated"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["repo-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://docs.github.com/articles/searching-topics/)" for a detailed list of qualifiers. + * + * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: + * + * `q=ruby+is:featured` + * + * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. + */ + "search/topics": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). */ + q: string; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["topic-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for users, you can get text match metadata for the issue **login**, **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you're looking for a list of popular users, you might try this query: + * + * `q=tom+repos:%3E42+followers:%3E1000` + * + * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. + */ + "search/users": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching users](https://docs.github.com/search-github/searching-on-github/searching-users)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query by number of `followers` or `repositories`, or when the person `joined` GitHub. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "followers" | "repositories" | "joined"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["user-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the [Get a team by name](https://docs.github.com/rest/reference/teams#get-a-team-by-name) endpoint. */ + "teams/get-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a team](https://docs.github.com/rest/reference/teams#delete-a-team) endpoint. + * + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + */ + "teams/delete-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a team](https://docs.github.com/rest/reference/teams#update-a-team) endpoint. + * + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** With nested teams, the `privacy` for parent teams cannot be `secret`. + */ + "teams/update-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response when the updated information already exists */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the team. */ + name: string; + /** @description The description of the team. */ + description?: string; + /** + * @description The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. The options are: + * **For a non-nested team:** + * \* `secret` - only visible to organization owners and members of this team. + * \* `closed` - visible to all members of this organization. + * **For a parent or child team:** + * \* `closed` - visible to all members of this organization. + * @enum {string} + */ + privacy?: "secret" | "closed"; + /** + * @description **Deprecated**. The permission that new repositories will be added to the team with when none is specified. + * @default pull + * @enum {string} + */ + permission?: "pull" | "push" | "admin"; + /** @description The ID of a team to set as the parent team. */ + parent_team_id?: number | null; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List discussions`](https://docs.github.com/rest/reference/teams#list-discussions) endpoint. + * + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/list-discussions-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-discussion"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create a discussion`](https://docs.github.com/rest/reference/teams#create-a-discussion) endpoint. + * + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "teams/create-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title: string; + /** @description The discussion post's body text. */ + body: string; + /** + * @description Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post. + * @default false + */ + private?: boolean; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion](https://docs.github.com/rest/reference/teams#get-a-discussion) endpoint. + * + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/get-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Delete a discussion`](https://docs.github.com/rest/reference/teams#delete-a-discussion) endpoint. + * + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/delete-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion](https://docs.github.com/rest/reference/teams#update-a-discussion) endpoint. + * + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/update-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title?: string; + /** @description The discussion post's body text. */ + body?: string; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List discussion comments](https://docs.github.com/rest/reference/teams#list-discussion-comments) endpoint. + * + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/list-discussion-comments-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + query: { + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-discussion-comment"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Create a discussion comment](https://docs.github.com/rest/reference/teams#create-a-discussion-comment) endpoint. + * + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "teams/create-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion comment](https://docs.github.com/rest/reference/teams#get-a-discussion-comment) endpoint. + * + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/get-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a discussion comment](https://docs.github.com/rest/reference/teams#delete-a-discussion-comment) endpoint. + * + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/delete-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion comment](https://docs.github.com/rest/reference/teams#update-a-discussion-comment) endpoint. + * + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/update-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion comment`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment) endpoint. + * + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/list-for-team-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Create reaction for a team discussion comment](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment)" endpoint. + * + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + */ + "reactions/create-for-team-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion) endpoint. + * + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/list-for-team-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion`](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion) endpoint. + * + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + */ + "reactions/create-for-team-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List pending team invitations`](https://docs.github.com/rest/reference/teams#list-pending-team-invitations) endpoint. + * + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + "teams/list-pending-invitations-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team members`](https://docs.github.com/rest/reference/teams#list-team-members) endpoint. + * + * Team members will include the members of child teams. + */ + "teams/list-members-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** Filters members returned by their role in the team. */ + role?: "member" | "maintainer" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * The "Get team member" endpoint (described below) is deprecated. + * + * We recommend using the [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint instead. It allows you to get both active and pending memberships. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + "teams/get-member-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** if user is a member */ + 204: never; + /** if user is not a member */ + 404: unknown; + }; + }; + /** + * The "Add team member" endpoint (described below) is deprecated. + * + * We recommend using the [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint instead. It allows you to invite new organization members to your teams. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To add someone to a team, the authenticated user must be an organization owner or a team maintainer in the team they're changing. The person being added to the team must be a member of the team's organization. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "teams/add-member-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + /** Not Found if team synchronization is set up */ + 404: unknown; + /** Unprocessable Entity if you attempt to add an organization to a team or you attempt to add a user to a team when they are not a member of at least one other team in the same organization */ + 422: unknown; + }; + }; + /** + * The "Remove team member" endpoint (described below) is deprecated. + * + * We recommend using the [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint instead. It allows you to remove both active and pending memberships. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a team member, the authenticated user must have 'admin' permissions to the team or be an owner of the org that the team is associated with. Removing a team member does not delete the user, it just removes them from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + "teams/remove-member-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Not Found if team synchronization is setup */ + 404: unknown; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint. + * + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + "teams/get-membership-for-user-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * If the user is already a member of the team's organization, this endpoint will add the user to the team. To add a membership between an organization member and a team, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * If the user is unaffiliated with the team's organization, this endpoint will send an invitation to the user via email. This newly-created membership will be in the "pending" state until the user accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. To add a membership between an unaffiliated user and a team, the authenticated user must be an organization owner. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + */ + "teams/add-or-update-membership-for-user-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + /** Forbidden if team synchronization is set up */ + 403: unknown; + 404: components["responses"]["not_found"]; + /** Unprocessable Entity if you attempt to add an organization to a team */ + 422: unknown; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The role that this user should have in the team. + * @default member + * @enum {string} + */ + role?: "member" | "maintainer"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + "teams/remove-membership-for-user-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** if team synchronization is set up */ + 403: unknown; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team projects`](https://docs.github.com/rest/reference/teams#list-team-projects) endpoint. + * + * Lists the organization projects for a team. + */ + "teams/list-projects-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-project"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a project](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project) endpoint. + * + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + */ + "teams/check-permissions-for-project-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-project"]; + }; + }; + /** Not Found if project is not managed by this team */ + 404: unknown; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team project permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions) endpoint. + * + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + */ + "teams/add-or-update-project-permissions-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Forbidden if the project is not owned by the organization */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant to the team for this project. Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * @enum {string} + */ + permission?: "read" | "write" | "admin"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a project from a team](https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team) endpoint. + * + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. **Note:** This endpoint removes the project from the team, but does not delete it. + */ + "teams/remove-project-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List team repositories](https://docs.github.com/rest/reference/teams#list-team-repositories) endpoint. */ + "teams/list-repos-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Note**: Repositories inherited through a parent team will also be checked. + * + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a repository](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-repository) endpoint. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + "teams/check-permissions-for-repo-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Alternative response with extra repository information */ + 200: { + content: { + "application/json": components["schemas"]["team-repository"]; + }; + }; + /** Response if repository is managed by this team */ + 204: never; + /** Not Found if repository is not managed by this team */ + 404: unknown; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Add or update team repository permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions)" endpoint. + * + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "teams/add-or-update-repo-permissions-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant the team on this repository. If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository. + * @enum {string} + */ + permission?: "pull" | "push" | "admin"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a repository from a team](https://docs.github.com/rest/reference/teams#remove-a-repository-from-a-team) endpoint. + * + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. NOTE: This does not delete the repository, it just removes it from the team. + */ + "teams/remove-repo-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List IdP groups for a team`](https://docs.github.com/rest/reference/teams#list-idp-groups-for-a-team) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * List IdP groups connected to a team on GitHub. + */ + "teams/list-idp-groups-for-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["group-mapping"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create or update IdP group connections`](https://docs.github.com/rest/reference/teams#create-or-update-idp-group-connections) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team. + */ + "teams/create-or-update-idp-group-connections-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["group-mapping"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The IdP groups you want to connect to a GitHub team. When updating, the new `groups` object will replace the original one. You must include any existing groups that you don't want to remove. */ + groups: { + /** @description ID of the IdP group. */ + group_id: string; + /** @description Name of the IdP group. */ + group_name: string; + /** @description Description of the IdP group. */ + group_description: string; + /** @example "caceab43fc9ffa20081c" */ + id?: string; + /** @example "external-team-6c13e7288ef7" */ + name?: string; + /** @example "moar cheese pleese" */ + description?: string; + }[]; + /** @example "I am not a timestamp" */ + synced_at?: string; + }; + }; + }; + }; + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List child teams`](https://docs.github.com/rest/reference/teams#list-child-teams) endpoint. */ + "teams/list-child-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** if child teams exist */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * If the authenticated user is authenticated through basic authentication or OAuth with the `user` scope, then the response lists public and private profile information. + * + * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. + */ + "users/get-authenticated": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": + | components["schemas"]["private-user"] + | components["schemas"]["public-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. */ + "users/update-authenticated": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["private-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The new name of the user. + * @example Omar Jahandar + */ + name?: string; + /** + * @description The publicly visible email address of the user. + * @example omar@example.com + */ + email?: string; + /** + * @description The new blog URL of the user. + * @example blog.example.com + */ + blog?: string; + /** + * @description The new Twitter username of the user. + * @example therealomarj + */ + twitter_username?: string | null; + /** + * @description The new company of the user. + * @example Acme corporation + */ + company?: string; + /** + * @description The new location of the user. + * @example Berlin, Germany + */ + location?: string; + /** @description The new hiring availability of the user. */ + hireable?: boolean; + /** @description The new short biography of the user. */ + bio?: string; + }; + }; + }; + }; + /** List the users you've blocked on your personal account. */ + "users/list-blocked-by-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "users/check-blocked": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** If the user is blocked: */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** If the user is not blocked: */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + "users/block": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "users/unblock": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists the authenticated user's codespaces. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/list-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** ID of the Repository to filter on */ + repository_id?: components["parameters"]["repository-id-in-query"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Creates a new codespace, owned by the authenticated user. + * + * This endpoint requires either a `repository_id` OR a `pull_request` but not both. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/create-for-authenticated-user": { + responses: { + /** Response when the codespace was successfully created */ + 201: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + /** Response when the codespace creation partially failed but is being retried in the background */ + 202: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": + | { + /** @description Repository id for this codespace */ + repository_id: number; + /** @description Git ref (typically a branch name) for this codespace */ + ref?: string; + /** @description Location for this codespace. Assigned by IP if not provided */ + location?: string; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Whether to authorize requested permissions from devcontainer.json */ + multi_repo_permissions_opt_out?: boolean; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). */ + retention_period_minutes?: number; + } + | { + /** @description Pull request number for this codespace */ + pull_request: { + /** @description Pull request number */ + pull_request_number: number; + /** @description Repository id for this codespace */ + repository_id: number; + }; + /** @description Location for this codespace. Assigned by IP if not provided */ + location?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + }; + }; + }; + }; + /** + * Lists all secrets available for a user's Codespaces without revealing their + * encrypted values. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/list-secrets-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["codespaces-secret"][]; + }; + }; + }; + }; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/get-public-key-for-authenticated-user": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-user-public-key"]; + }; + }; + }; + }; + /** + * Gets a secret available to a user's codespaces without revealing its encrypted value. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/get-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-secret"]; + }; + }; + }; + }; + /** + * Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must also have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "codespaces/create-or-update-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response after successfully creaing a secret */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** Response after successfully updating a secret */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get the public key for the authenticated user](https://docs.github.com/rest/reference/codespaces#get-the-public-key-for-the-authenticated-user) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id: string; + /** @description An array of repository ids that can access the user secret. You can manage the list of selected repositories using the [List selected repositories for a user secret](https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret), [Set selected repositories for a user secret](https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-a-user-secret), and [Remove a selected repository from a user secret](https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret) endpoints. */ + selected_repository_ids?: string[]; + }; + }; + }; + }; + /** + * Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/delete-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List the repositories that have been granted the ability to use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + "codespaces/list-repositories-for-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Select the repositories that will use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + "codespaces/set-repositories-for-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** No Content when repositories were added to the selected list */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids for which a codespace can access the secret. You can manage the list of selected repositories using the [List selected repositories for a user secret](https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret), [Add a selected repository to a user secret](https://docs.github.com/rest/reference/codespaces#add-a-selected-repository-to-a-user-secret), and [Remove a selected repository from a user secret](https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** + * Adds a repository to the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on the referenced repository to use this endpoint. + */ + "codespaces/add-repository-for-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** No Content when repository was added to the selected list */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Removes a repository from the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/remove-repository-for-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** No Content when repository was removed from the selected list */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Gets information about a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/get-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/delete-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Updates a codespace owned by the authenticated user. Currently only the codespace's machine type and recent folders can be modified using this endpoint. + * + * If you specify a new machine type it will be applied the next time your codespace is started. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/update-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A valid machine to transition this codespace to. */ + machine?: string; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Recently opened folders inside the codespace. It is currently used by the clients to determine the folder path to load the codespace in. */ + recent_folders?: string[]; + }; + }; + }; + }; + /** + * Triggers an export of the specified codespace and returns a URL and ID where the status of the export can be monitored. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/export-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": components["schemas"]["codespace-export-details"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Gets information about an export of a codespace. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/get-export-details-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + /** The ID of the export operation, or `latest`. Currently only `latest` is currently supported. */ + export_id: components["parameters"]["export-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace-export-details"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List the machine types a codespace can transition to use. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + "codespaces/codespace-machines-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + machines: components["schemas"]["codespace-machine"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Starts a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/start-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + /** Payment required */ + 402: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/stop-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** Sets the visibility for your primary email addresses. */ + "users/set-primary-email-visibility-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Denotes whether an email is publicly visible. + * @enum {string} + */ + visibility: "public" | "private"; + }; + }; + }; + }; + /** Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. */ + "users/list-emails-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** This endpoint is accessible with the `user` scope. */ + "users/add-email-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Adds one or more email addresses to your GitHub account. Must contain at least one email address. **Note:** Alternatively, you can pass a single email address or an `array` of emails addresses directly, but we recommend that you pass an object using the `emails` key. + * @example [] + */ + emails: string[]; + }; + }; + }; + }; + /** This endpoint is accessible with the `user` scope. */ + "users/delete-email-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Email addresses associated with the GitHub user account. */ + emails: string[]; + }; + }; + }; + }; + /** Lists the people following the authenticated user. */ + "users/list-followers-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Lists the people who the authenticated user follows. */ + "users/list-followed-by-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "users/check-person-is-followed-by-authenticated": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** if the person is followed by the authenticated user */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** if the person is not followed by the authenticated user */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + "users/follow": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. */ + "users/unfollow": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/list-gpg-keys-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["gpg-key"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/create-gpg-key-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["gpg-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A descriptive name for the new key. */ + name?: string; + /** @description A GPG key in ASCII-armored format. */ + armored_public_key: string; + }; + }; + }; + }; + /** View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/get-gpg-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the GPG key. */ + gpg_key_id: components["parameters"]["gpg-key-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gpg-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/delete-gpg-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the GPG key. */ + gpg_key_id: components["parameters"]["gpg-key-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You can find the permissions for the installation under the `permissions` key. + */ + "apps/list-installations-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** You can find the permissions for the installation under the `permissions` key. */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + installations: components["schemas"]["installation"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The access the user has to each repository is included in the hash under the `permissions` key. + */ + "apps/list-installation-repos-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** The access the user has to each repository is included in the hash under the `permissions` key. */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + repository_selection?: string; + repositories: components["schemas"]["repository"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + "apps/add-repo-to-installation-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove a single repository from an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + "apps/remove-repo-from-installation-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Shows which type of GitHub user can interact with your public repositories and when the restriction expires. */ + "interactions/get-restrictions-for-authenticated-user": { + responses: { + /** Default response */ + 200: { + content: { + "application/json": Partial< + components["schemas"]["interaction-limit-response"] + > & + Partial<{ [key: string]: unknown }>; + }; + }; + /** Response when there are no restrictions */ + 204: never; + }; + }; + /** Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. */ + "interactions/set-restrictions-for-authenticated-user": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["interaction-limit-response"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": components["schemas"]["interaction-limit"]; + }; + }; + }; + /** Removes any interaction restrictions from your public repositories. */ + "interactions/remove-restrictions-for-authenticated-user": { + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List issues across owned and member repositories assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list-for-authenticated-user": { + parameters: { + query: { + /** Indicates which sorts of issues to return. `assigned` means issues assigned to you. `created` means issues created by you. `mentioned` means issues mentioning you. `subscribed` means issues you're subscribed to updates for. `all` or `repos` means all issues you can see, regardless of participation or creation. */ + filter?: + | "assigned" + | "created" + | "mentioned" + | "subscribed" + | "repos" + | "all"; + /** Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** A list of comma separated label names. Example: `bug,ui,@high` */ + labels?: components["parameters"]["labels"]; + /** What to sort results by. Can be either `created`, `updated`, `comments`. */ + sort?: "created" | "updated" | "comments"; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/list-public-ssh-keys-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["key"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/create-public-ssh-key-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description A descriptive name for the new key. + * @example Personal MacBook Air + */ + title?: string; + /** @description The public SSH key to add to your GitHub account. */ + key: string; + }; + }; + }; + }; + /** View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/get-public-ssh-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the key. */ + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/delete-public-ssh-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the key. */ + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ + "apps/list-subscriptions-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["user-marketplace-purchase"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ + "apps/list-subscriptions-for-authenticated-user-stubbed": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["user-marketplace-purchase"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + }; + }; + "orgs/list-memberships-for-authenticated-user": { + parameters: { + query: { + /** Indicates the state of the memberships to return. Can be either `active` or `pending`. If not specified, the API returns both active and pending memberships. */ + state?: "active" | "pending"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["org-membership"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "orgs/get-membership-for-authenticated-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "orgs/update-membership-for-authenticated-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state that the membership should be in. Only `"active"` will be accepted. + * @enum {string} + */ + state: "active"; + }; + }; + }; + }; + /** Lists all migrations a user has started. */ + "migrations/list-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["migration"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Initiates the generation of a user migration archive. */ + "migrations/start-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Lock the repositories being migrated at the start of the migration + * @example true + */ + lock_repositories?: boolean; + /** + * @description Indicates whether metadata should be excluded and only git source should be included for the migration. + * @example true + */ + exclude_metadata?: boolean; + /** + * @description Indicates whether the repository git data should be excluded from the migration. + * @example true + */ + exclude_git_data?: boolean; + /** + * @description Do not include attachments in the migration + * @example true + */ + exclude_attachments?: boolean; + /** + * @description Do not include releases in the migration + * @example true + */ + exclude_releases?: boolean; + /** + * @description Indicates whether projects owned by the organization or users should be excluded. + * @example true + */ + exclude_owner_projects?: boolean; + /** + * @description Indicates whether this should only include organization metadata (repositories array should be empty and will ignore other flags). + * @default false + * @example true + */ + org_metadata_only?: boolean; + /** + * @description Exclude attributes from the API response to improve performance + * @example [ + * "repositories" + * ] + */ + exclude?: "repositories"[]; + repositories: string[]; + }; + }; + }; + }; + /** + * Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: + * + * * `pending` - the migration hasn't started yet. + * * `exporting` - the migration is in progress. + * * `exported` - the migration finished successfully. + * * `failed` - the migration failed. + * + * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive). + */ + "migrations/get-status-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + query: { + exclude?: string[]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: + * + * * attachments + * * bases + * * commit\_comments + * * issue\_comments + * * issue\_events + * * issues + * * milestones + * * organizations + * * projects + * * protected\_branches + * * pull\_request\_reviews + * * pull\_requests + * * releases + * * repositories + * * review\_comments + * * schema + * * users + * + * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. + */ + "migrations/get-archive-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** Response */ + 302: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/reference/migrations#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/reference/migrations#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. */ + "migrations/delete-archive-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/reference/migrations#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/reference/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. */ + "migrations/unlock-repo-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + /** repo_name parameter */ + repo_name: components["parameters"]["repo-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists all the repositories for this user migration. */ + "migrations/list-repos-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List organizations for the authenticated user. + * + * **OAuth scope requirements** + * + * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. + */ + "orgs/list-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Lists packages owned by the authenticated user within the user's namespace. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/list-packages-for-authenticated-user": { + parameters: { + query: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** The selected visibility of the packages. Only `container` package_types currently support `internal` visibility properly. For other ecosystems `internal` is synonymous with `private`. This parameter is optional and only filters an existing result set. */ + visibility?: components["parameters"]["package-visibility"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + }; + }; + /** + * Gets a specific package for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"]; + }; + }; + }; + }; + /** + * Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/delete-package-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores a package owned by the authenticated user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/restore-package-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + }; + query: { + /** package token */ + token?: string; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Returns all package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-all-package-versions-for-package-owned-by-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The state of the package, either active or deleted. */ + state?: "active" | "deleted"; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a specific package version for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-version-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"]; + }; + }; + }; + }; + /** + * Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/delete-package-version-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores a package version owned by the authenticated user. + * + * You can restore a deleted package version under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/restore-package-version-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Creates a user project board. Returns a `410 Gone` status if the user does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/create-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project + * @example Week One Sprint + */ + name: string; + /** + * @description Body of the project + * @example This project represents the sprint of the first week in January + */ + body?: string | null; + }; + }; + }; + }; + /** Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. */ + "users/list-public-emails-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + */ + "repos/list-for-authenticated-user": { + parameters: { + query: { + /** Limit results to repositories with the specified visibility. */ + visibility?: "all" | "public" | "private"; + /** + * Comma-separated list of values. Can include: + * \* `owner`: Repositories that are owned by the authenticated user. + * \* `collaborator`: Repositories that the user has been added to as a collaborator. + * \* `organization_member`: Repositories that the user has access to through being a member of an organization. This includes every repository on every team that the user is on. + */ + affiliation?: string; + /** Limit results to repositories of the specified type. Will cause a `422` error if used in the same request as **visibility** or **affiliation**. */ + type?: "all" | "owner" | "public" | "private" | "member"; + /** The property to sort the results by. */ + sort?: "created" | "updated" | "pushed" | "full_name"; + /** The order to sort by. Default: `asc` when using `full_name`, otherwise `desc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + before?: components["parameters"]["before"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Creates a new repository for the authenticated user. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository. + */ + "repos/create-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["repository"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @description A short description of the repository. */ + description?: string; + /** @description A URL with more information about the repository. */ + homepage?: string; + /** + * @description Whether the repository is private. + * @default false + */ + private?: boolean; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues?: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects?: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki?: boolean; + /** @description The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization. */ + team_id?: number; + /** + * @description Whether the repository is initialized with a minimal README. + * @default false + */ + auto_init?: boolean; + /** + * @description The desired language or platform to apply to the .gitignore. + * @example Haskell + */ + gitignore_template?: string; + /** + * @description The license keyword of the open source license for this repository. + * @example mit + */ + license_template?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads?: boolean; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + }; + }; + }; + }; + /** When authenticating as a user, this endpoint will list all currently open repository invitations for that user. */ + "repos/list-invitations-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["repository-invitation"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "repos/decline-invitation-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + }; + }; + "repos/accept-invitation-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + }; + }; + /** + * Lists repositories the authenticated user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + "activity/list-repos-starred-by-authenticated-user": { + parameters: { + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["repository"][]; + "application/vnd.github.v3.star+json": components["schemas"]["starred-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "activity/check-repo-is-starred-by-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response if this repository is starred by you */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** Not Found if this repository is not starred by you */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ + "activity/star-repo-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "activity/unstar-repo-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists repositories the authenticated user is watching. */ + "activity/list-watched-repos-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). */ + "teams/list-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-full"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. + * + * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of users. + */ + "users/list": { + parameters: { + query: { + /** A user ID. Only return users with an ID greater than this ID. */ + since?: components["parameters"]["since-user"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Provides publicly available information about someone with a GitHub account. + * + * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" + * + * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). + * + * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". + */ + "users/get-by-username": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": + | components["schemas"]["private-user"] + | components["schemas"]["public-user"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. */ + "activity/list-events-for-authenticated-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** This is the user's organization dashboard. You must be authenticated as the user to view this. */ + "activity/list-org-events-for-authenticated-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + "activity/list-public-events-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** Lists the people following the specified user. */ + "users/list-followers-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** Lists the people who the specified user follows. */ + "users/list-following-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + "users/check-following-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + target_user: string; + }; + }; + responses: { + /** if the user follows the target user */ + 204: never; + /** if the user does not follow the target user */ + 404: unknown; + }; + }; + /** Lists public gists for the specified user: */ + "gists/list-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Lists the GPG keys for a user. This information is accessible by anyone. */ + "users/list-gpg-keys-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["gpg-key"][]; + }; + }; + }; + }; + /** + * Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. + * + * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: + * + * ```shell + * curl -u username:token + * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 + * ``` + */ + "users/get-context-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** Identifies which additional information you'd like to receive about the person's hovercard. Can be `organization`, `repository`, `issue`, `pull_request`. **Required** when using `subject_id`. */ + subject_type?: "organization" | "repository" | "issue" | "pull_request"; + /** Uses the ID for the `subject_type` you specified. **Required** when using `subject_type`. */ + subject_id?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hovercard"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Enables an authenticated GitHub App to find the user’s installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-user-installation": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + }; + }; + /** Lists the _verified_ public SSH keys for a user. This is accessible by anyone. */ + "users/list-public-keys-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["key-simple"][]; + }; + }; + }; + }; + /** + * List [public organization memberships](https://docs.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. + * + * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. + */ + "orgs/list-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-simple"][]; + }; + }; + }; + }; + /** + * Lists all packages in a user's namespace for which the requesting user has access. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/list-packages-for-user": { + parameters: { + query: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** The selected visibility of the packages. Only `container` package_types currently support `internal` visibility properly. For other ecosystems `internal` is synonymous with `private`. This parameter is optional and only filters an existing result set. */ + visibility?: components["parameters"]["package-visibility"]; + }; + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Gets a specific package metadata for a public package owned by a user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"]; + }; + }; + }; + }; + /** + * Deletes an entire package for a user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + "packages/delete-package-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores an entire package for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + "packages/restore-package-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** package token */ + token?: string; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Returns all package versions for a public package owned by a specified user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-all-package-versions-for-package-owned-by-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a specific package version for a public package owned by a specified user. + * + * At this time, to use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-version-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"]; + }; + }; + }; + }; + /** + * Deletes a specific package version for a user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + "packages/delete-package-version-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores a specific package version for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + "packages/restore-package-version-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "projects/list-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** Indicates the state of the projects to return. Can be either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. */ + "activity/list-received-events-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + "activity/list-received-public-events-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. */ + "repos/list-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** Limit results to repositories of the specified type. */ + type?: "all" | "owner" | "member"; + /** The property to sort the results by. */ + sort?: "created" | "updated" | "pushed" | "full_name"; + /** The order to sort by. Default: `asc` when using `full_name`, otherwise `desc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `user` scope. + */ + "billing/get-github-actions-billing-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + "billing/get-github-packages-billing-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["packages-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + "billing/get-shared-storage-billing-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-billing-usage"]; + }; + }; + }; + }; + /** + * Lists repositories a user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + "activity/list-repos-starred-by-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": Partial< + components["schemas"]["starred-repository"][] + > & + Partial; + }; + }; + }; + }; + /** Lists repositories a user is watching. */ + "activity/list-repos-watched-by-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** Get a random sentence from the Zen of GitHub */ + "meta/get-zen": { + responses: { + /** Response */ + 200: { + content: { + "text/plain": string; + }; + }; + }; + }; + /** + * **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/compare-commits": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + base: string; + head: string; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comparison"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; +} + +export interface external {} diff --git a/node_modules/@octokit/plugin-paginate-rest/LICENSE b/node_modules/@octokit/plugin-paginate-rest/LICENSE new file mode 100644 index 0000000..57bee5f --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@octokit/plugin-paginate-rest/README.md b/node_modules/@octokit/plugin-paginate-rest/README.md new file mode 100644 index 0000000..1e3c0ba --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/README.md @@ -0,0 +1,269 @@ +# plugin-paginate-rest.js + +> Octokit plugin to paginate REST API endpoint responses + +[![@latest](https://img.shields.io/npm/v/@octokit/plugin-paginate-rest.svg)](https://www.npmjs.com/package/@octokit/plugin-paginate-rest) +[![Build Status](https://github.com/octokit/plugin-paginate-rest.js/workflows/Test/badge.svg)](https://github.com/octokit/plugin-paginate-rest.js/actions?workflow=Test) + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/plugin-paginate-rest` and [`@octokit/core`](https://github.com/octokit/core.js) (or core-compatible module) directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with `npm install @octokit/core @octokit/plugin-paginate-rest`. Optionally replace `@octokit/core` with a core-compatible module + +```js +const { Octokit } = require("@octokit/core"); +const { + paginateRest, + composePaginateRest, +} = require("@octokit/plugin-paginate-rest"); +``` + +
+ +```js +const MyOctokit = Octokit.plugin(paginateRest); +const octokit = new MyOctokit({ auth: "secret123" }); + +// See https://developer.github.com/v3/issues/#list-issues-for-a-repository +const issues = await octokit.paginate("GET /repos/{owner}/{repo}/issues", { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, +}); +``` + +If you want to utilize the pagination methods in another plugin, use `composePaginateRest`. + +```js +function myPlugin(octokit, options) { + return { + allStars({owner, repo}) => { + return composePaginateRest( + octokit, + "GET /repos/{owner}/{repo}/stargazers", + {owner, repo } + ) + } + } +} +``` + +## `octokit.paginate()` + +The `paginateRest` plugin adds a new `octokit.paginate()` method which accepts the same parameters as [`octokit.request`](https://github.com/octokit/request.js#request). Only "List ..." endpoints such as [List issues for a repository](https://developer.github.com/v3/issues/#list-issues-for-a-repository) are supporting pagination. Their [response includes a Link header](https://developer.github.com/v3/issues/#response-1). For other endpoints, `octokit.paginate()` behaves the same as `octokit.request()`. + +The `per_page` parameter is usually defaulting to `30`, and can be set to up to `100`, which helps retrieving a big amount of data without hitting the rate limits too soon. + +An optional `mapFunction` can be passed to map each page response to a new value, usually an array with only the data you need. This can help to reduce memory usage, as only the relevant data has to be kept in memory until the pagination is complete. + +```js +const issueTitles = await octokit.paginate( + "GET /repos/{owner}/{repo}/issues", + { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, + }, + (response) => response.data.map((issue) => issue.title) +); +``` + +The `mapFunction` gets a 2nd argument `done` which can be called to end the pagination early. + +```js +const issues = await octokit.paginate( + "GET /repos/{owner}/{repo}/issues", + { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, + }, + (response, done) => { + if (response.data.find((issue) => issue.title.includes("something"))) { + done(); + } + return response.data; + } +); +``` + +Alternatively you can pass a `request` method as first argument. This is great when using in combination with [`@octokit/plugin-rest-endpoint-methods`](https://github.com/octokit/plugin-rest-endpoint-methods.js/): + +```js +const issues = await octokit.paginate(octokit.rest.issues.listForRepo, { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, +}); +``` + +## `octokit.paginate.iterator()` + +If your target runtime environments supports async iterators (such as most modern browsers and Node 10+), you can iterate through each response + +```js +const parameters = { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, +}; +for await (const response of octokit.paginate.iterator( + "GET /repos/{owner}/{repo}/issues", + parameters +)) { + // do whatever you want with each response, break out of the loop, etc. + const issues = response.data; + console.log("%d issues found", issues.length); +} +``` + +Alternatively you can pass a `request` method as first argument. This is great when using in combination with [`@octokit/plugin-rest-endpoint-methods`](https://github.com/octokit/plugin-rest-endpoint-methods.js/): + +```js +const parameters = { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, +}; +for await (const response of octokit.paginate.iterator( + octokit.rest.issues.listForRepo, + parameters +)) { + // do whatever you want with each response, break out of the loop, etc. + const issues = response.data; + console.log("%d issues found", issues.length); +} +``` + +## `composePaginateRest` and `composePaginateRest.iterator` + +The `compose*` methods work just like their `octokit.*` counterparts described above, with the differenct that both methods require an `octokit` instance to be passed as first argument + +## How it works + +`octokit.paginate()` wraps `octokit.request()`. As long as a `rel="next"` link value is present in the response's `Link` header, it sends another request for that URL, and so on. + +Most of GitHub's paginating REST API endpoints return an array, but there are a few exceptions which return an object with a key that includes the items array. For example: + +- [Search repositories](https://developer.github.com/v3/search/#example) (key `items`) +- [List check runs for a specific ref](https://developer.github.com/v3/checks/runs/#response-3) (key: `check_runs`) +- [List check suites for a specific ref](https://developer.github.com/v3/checks/suites/#response-1) (key: `check_suites`) +- [List repositories](https://developer.github.com/v3/apps/installations/#list-repositories) for an installation (key: `repositories`) +- [List installations for a user](https://developer.github.com/v3/apps/installations/#response-1) (key `installations`) + +`octokit.paginate()` is working around these inconsistencies so you don't have to worry about it. + +If a response is lacking the `Link` header, `octokit.paginate()` still resolves with an array, even if the response returns a single object. + +## Types + +The plugin also exposes some types and runtime type guards for TypeScript projects. + + + + + + +
+Types + + +```typescript +import { + PaginateInterface, + PaginatingEndpoints, +} from "@octokit/plugin-paginate-rest"; +``` + +
+Guards + + +```typescript +import { isPaginatingEndpoint } from "@octokit/plugin-paginate-rest"; +``` + +
+ +### PaginateInterface + +An `interface` that declares all the overloads of the `.paginate` method. + +### PaginatingEndpoints + +An `interface` which describes all API endpoints supported by the plugin. Some overloads of `.paginate()` method and `composePaginateRest()` function depend on `PaginatingEndpoints`, using the `keyof PaginatingEndpoints` as a type for one of its arguments. + +```typescript +import { Octokit } from "@octokit/core"; +import { + PaginatingEndpoints, + composePaginateRest, +} from "@octokit/plugin-paginate-rest"; + +type DataType = "data" extends keyof T ? T["data"] : unknown; + +async function myPaginatePlugin( + octokit: Octokit, + endpoint: E, + parameters?: PaginatingEndpoints[E]["parameters"] +): Promise> { + return await composePaginateRest(octokit, endpoint, parameters); +} +``` + +### isPaginatingEndpoint + +A type guard, `isPaginatingEndpoint(arg)` returns `true` if `arg` is one of the keys in `PaginatingEndpoints` (is `keyof PaginatingEndpoints`). + +```typescript +import { Octokit } from "@octokit/core"; +import { + isPaginatingEndpoint, + composePaginateRest, +} from "@octokit/plugin-paginate-rest"; + +async function myPlugin(octokit: Octokit, arg: unknown) { + if (isPaginatingEndpoint(arg)) { + return await composePaginateRest(octokit, arg); + } + // ... +} +``` + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js b/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js new file mode 100644 index 0000000..d5fc599 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js @@ -0,0 +1,205 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +const VERSION = "2.21.3"; + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + enumerableOnly && (symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + })), keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = null != arguments[i] ? arguments[i] : {}; + i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { + _defineProperty(target, key, source[key]); + }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + + return target; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return _objectSpread2(_objectSpread2({}, response), {}, { + data: [] + }); + } + + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. + + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + + response.data.total_count = totalCount; + return response; +} + +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) return { + done: true + }; + + try { + const response = await requestMethod({ + method, + url, + headers + }); + const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: normalizedResponse + }; + } catch (error) { + if (error.status !== 409) throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + + }) + }; +} + +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; + } + + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +} + +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; + } + + let earlyExit = false; + + function done() { + earlyExit = true; + } + + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); + + if (earlyExit) { + return results; + } + + return gather(octokit, results, iterator, mapFn); + }); +} + +const composePaginateRest = Object.assign(paginate, { + iterator +}); + +const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; + +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ + +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; + +exports.composePaginateRest = composePaginateRest; +exports.isPaginatingEndpoint = isPaginatingEndpoint; +exports.paginateRest = paginateRest; +exports.paginatingEndpoints = paginatingEndpoints; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map b/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map new file mode 100644 index 0000000..fb04fff --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/normalize-paginated-list-response.js","../dist-src/iterator.js","../dist-src/paginate.js","../dist-src/compose-paginate.js","../dist-src/generated/paginating-endpoints.js","../dist-src/paginating-endpoints.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"2.21.3\";\n","/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nexport function normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return {\n ...response,\n data: [],\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n","import { normalizePaginatedListResponse } from \"./normalize-paginated-list-response\";\nexport function iterator(octokit, route, parameters) {\n const options = typeof route === \"function\"\n ? route.endpoint(parameters)\n : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return { value: normalizedResponse };\n }\n catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: [],\n },\n };\n }\n },\n }),\n };\n}\n","import { iterator } from \"./iterator\";\nexport function paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator, mapFn);\n });\n}\n","import { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport const composePaginateRest = Object.assign(paginate, {\n iterator,\n});\n","export const paginatingEndpoints = [\n \"GET /app/hook/deliveries\",\n \"GET /app/installations\",\n \"GET /applications/grants\",\n \"GET /authorizations\",\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /enterprises/{enterprise}/actions/runners\",\n \"GET /enterprises/{enterprise}/audit-log\",\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n \"GET /enterprises/{enterprise}/settings/billing/advanced-security\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /licenses\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/runner-groups\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/audit-log\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/code-scanning/alerts\",\n \"GET /orgs/{org}/codespaces\",\n \"GET /orgs/{org}/credential-authorizations\",\n \"GET /orgs/{org}/dependabot/secrets\",\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/external-groups\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/packages\",\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/secret-scanning/alerts\",\n \"GET /orgs/{org}/settings/billing/advanced-security\",\n \"GET /orgs/{org}/team-sync/groups\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/caches\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/codespaces\",\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n \"GET /repos/{owner}/{repo}/codespaces/secrets\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/status\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/dependabot/secrets\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/environments\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repos/{owner}/{repo}/topics\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/codespaces\",\n \"GET /user/codespaces/secrets\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/packages\",\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/packages\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\",\n];\n","import { paginatingEndpoints, } from \"./generated/paginating-endpoints\";\nexport { paginatingEndpoints } from \"./generated/paginating-endpoints\";\nexport function isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n }\n else {\n return false;\n }\n}\n","import { VERSION } from \"./version\";\nimport { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport { composePaginateRest } from \"./compose-paginate\";\nexport { isPaginatingEndpoint, paginatingEndpoints, } from \"./paginating-endpoints\";\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\nexport function paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit),\n }),\n };\n}\npaginateRest.VERSION = VERSION;\n"],"names":["VERSION","normalizePaginatedListResponse","response","data","responseNeedsNormalization","incompleteResults","incomplete_results","repositorySelection","repository_selection","totalCount","total_count","namespaceKey","Object","keys","iterator","octokit","route","parameters","options","endpoint","request","requestMethod","method","headers","url","Symbol","asyncIterator","next","done","normalizedResponse","link","match","value","error","status","paginate","mapFn","undefined","gather","results","then","result","earlyExit","concat","composePaginateRest","assign","paginatingEndpoints","isPaginatingEndpoint","arg","includes","paginateRest","bind"],"mappings":";;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,AAAO,SAASC,8BAAT,CAAwCC,QAAxC,EAAkD;;EAErD,IAAI,CAACA,QAAQ,CAACC,IAAd,EAAoB;IAChB,yCACOD,QADP;MAEIC,IAAI,EAAE;;;;EAGd,MAAMC,0BAA0B,GAAG,iBAAiBF,QAAQ,CAACC,IAA1B,IAAkC,EAAE,SAASD,QAAQ,CAACC,IAApB,CAArE;EACA,IAAI,CAACC,0BAAL,EACI,OAAOF,QAAP,CAViD;;;EAarD,MAAMG,iBAAiB,GAAGH,QAAQ,CAACC,IAAT,CAAcG,kBAAxC;EACA,MAAMC,mBAAmB,GAAGL,QAAQ,CAACC,IAAT,CAAcK,oBAA1C;EACA,MAAMC,UAAU,GAAGP,QAAQ,CAACC,IAAT,CAAcO,WAAjC;EACA,OAAOR,QAAQ,CAACC,IAAT,CAAcG,kBAArB;EACA,OAAOJ,QAAQ,CAACC,IAAT,CAAcK,oBAArB;EACA,OAAON,QAAQ,CAACC,IAAT,CAAcO,WAArB;EACA,MAAMC,YAAY,GAAGC,MAAM,CAACC,IAAP,CAAYX,QAAQ,CAACC,IAArB,EAA2B,CAA3B,CAArB;EACA,MAAMA,IAAI,GAAGD,QAAQ,CAACC,IAAT,CAAcQ,YAAd,CAAb;EACAT,QAAQ,CAACC,IAAT,GAAgBA,IAAhB;;EACA,IAAI,OAAOE,iBAAP,KAA6B,WAAjC,EAA8C;IAC1CH,QAAQ,CAACC,IAAT,CAAcG,kBAAd,GAAmCD,iBAAnC;;;EAEJ,IAAI,OAAOE,mBAAP,KAA+B,WAAnC,EAAgD;IAC5CL,QAAQ,CAACC,IAAT,CAAcK,oBAAd,GAAqCD,mBAArC;;;EAEJL,QAAQ,CAACC,IAAT,CAAcO,WAAd,GAA4BD,UAA5B;EACA,OAAOP,QAAP;AACH;;AC7CM,SAASY,QAAT,CAAkBC,OAAlB,EAA2BC,KAA3B,EAAkCC,UAAlC,EAA8C;EACjD,MAAMC,OAAO,GAAG,OAAOF,KAAP,KAAiB,UAAjB,GACVA,KAAK,CAACG,QAAN,CAAeF,UAAf,CADU,GAEVF,OAAO,CAACK,OAAR,CAAgBD,QAAhB,CAAyBH,KAAzB,EAAgCC,UAAhC,CAFN;EAGA,MAAMI,aAAa,GAAG,OAAOL,KAAP,KAAiB,UAAjB,GAA8BA,KAA9B,GAAsCD,OAAO,CAACK,OAApE;EACA,MAAME,MAAM,GAAGJ,OAAO,CAACI,MAAvB;EACA,MAAMC,OAAO,GAAGL,OAAO,CAACK,OAAxB;EACA,IAAIC,GAAG,GAAGN,OAAO,CAACM,GAAlB;EACA,OAAO;IACH,CAACC,MAAM,CAACC,aAAR,GAAwB,OAAO;MAC3B,MAAMC,IAAN,GAAa;QACT,IAAI,CAACH,GAAL,EACI,OAAO;UAAEI,IAAI,EAAE;SAAf;;QACJ,IAAI;UACA,MAAM1B,QAAQ,GAAG,MAAMmB,aAAa,CAAC;YAAEC,MAAF;YAAUE,GAAV;YAAeD;WAAhB,CAApC;UACA,MAAMM,kBAAkB,GAAG5B,8BAA8B,CAACC,QAAD,CAAzD,CAFA;;;;UAMAsB,GAAG,GAAG,CAAC,CAACK,kBAAkB,CAACN,OAAnB,CAA2BO,IAA3B,IAAmC,EAApC,EAAwCC,KAAxC,CAA8C,yBAA9C,KAA4E,EAA7E,EAAiF,CAAjF,CAAN;UACA,OAAO;YAAEC,KAAK,EAAEH;WAAhB;SAPJ,CASA,OAAOI,KAAP,EAAc;UACV,IAAIA,KAAK,CAACC,MAAN,KAAiB,GAArB,EACI,MAAMD,KAAN;UACJT,GAAG,GAAG,EAAN;UACA,OAAO;YACHQ,KAAK,EAAE;cACHE,MAAM,EAAE,GADL;cAEHX,OAAO,EAAE,EAFN;cAGHpB,IAAI,EAAE;;WAJd;;;;KAjBY;GAD5B;AA6BH;;ACrCM,SAASgC,QAAT,CAAkBpB,OAAlB,EAA2BC,KAA3B,EAAkCC,UAAlC,EAA8CmB,KAA9C,EAAqD;EACxD,IAAI,OAAOnB,UAAP,KAAsB,UAA1B,EAAsC;IAClCmB,KAAK,GAAGnB,UAAR;IACAA,UAAU,GAAGoB,SAAb;;;EAEJ,OAAOC,MAAM,CAACvB,OAAD,EAAU,EAAV,EAAcD,QAAQ,CAACC,OAAD,EAAUC,KAAV,EAAiBC,UAAjB,CAAR,CAAqCQ,MAAM,CAACC,aAA5C,GAAd,EAA4EU,KAA5E,CAAb;AACH;;AACD,SAASE,MAAT,CAAgBvB,OAAhB,EAAyBwB,OAAzB,EAAkCzB,QAAlC,EAA4CsB,KAA5C,EAAmD;EAC/C,OAAOtB,QAAQ,CAACa,IAAT,GAAgBa,IAAhB,CAAsBC,MAAD,IAAY;IACpC,IAAIA,MAAM,CAACb,IAAX,EAAiB;MACb,OAAOW,OAAP;;;IAEJ,IAAIG,SAAS,GAAG,KAAhB;;IACA,SAASd,IAAT,GAAgB;MACZc,SAAS,GAAG,IAAZ;;;IAEJH,OAAO,GAAGA,OAAO,CAACI,MAAR,CAAeP,KAAK,GAAGA,KAAK,CAACK,MAAM,CAACT,KAAR,EAAeJ,IAAf,CAAR,GAA+Ba,MAAM,CAACT,KAAP,CAAa7B,IAAhE,CAAV;;IACA,IAAIuC,SAAJ,EAAe;MACX,OAAOH,OAAP;;;IAEJ,OAAOD,MAAM,CAACvB,OAAD,EAAUwB,OAAV,EAAmBzB,QAAnB,EAA6BsB,KAA7B,CAAb;GAZG,CAAP;AAcH;;MCrBYQ,mBAAmB,GAAGhC,MAAM,CAACiC,MAAP,CAAcV,QAAd,EAAwB;EACvDrB;AADuD,CAAxB,CAA5B;;MCFMgC,mBAAmB,GAAG,CAC/B,0BAD+B,EAE/B,wBAF+B,EAG/B,0BAH+B,EAI/B,qBAJ+B,EAK/B,iEAL+B,EAM/B,qDAN+B,EAO/B,qFAP+B,EAQ/B,+EAR+B,EAS/B,+CAT+B,EAU/B,yCAV+B,EAW/B,sDAX+B,EAY/B,kEAZ+B,EAa/B,aAb+B,EAc/B,YAd+B,EAe/B,mBAf+B,EAgB/B,oBAhB+B,EAiB/B,+BAjB+B,EAkB/B,8BAlB+B,EAmB/B,4BAnB+B,EAoB/B,gCApB+B,EAqB/B,aArB+B,EAsB/B,eAtB+B,EAuB/B,gCAvB+B,EAwB/B,mDAxB+B,EAyB/B,wCAzB+B,EA0B/B,2DA1B+B,EA2B/B,qCA3B+B,EA4B/B,oBA5B+B,EA6B/B,oBA7B+B,EA8B/B,mDA9B+B,EA+B/B,kDA/B+B,EAgC/B,uCAhC+B,EAiC/B,sEAjC+B,EAkC/B,iEAlC+B,EAmC/B,iCAnC+B,EAoC/B,iCApC+B,EAqC/B,4DArC+B,EAsC/B,2BAtC+B,EAuC/B,wBAvC+B,EAwC/B,sCAxC+B,EAyC/B,4BAzC+B,EA0C/B,2CA1C+B,EA2C/B,oCA3C+B,EA4C/B,+DA5C+B,EA6C/B,wBA7C+B,EA8C/B,iCA9C+B,EA+C/B,oCA/C+B,EAgD/B,uBAhD+B,EAiD/B,4CAjD+B,EAkD/B,+BAlD+B,EAmD/B,6BAnD+B,EAoD/B,mDApD+B,EAqD/B,wBArD+B,EAsD/B,yBAtD+B,EAuD/B,4BAvD+B,EAwD/B,wDAxD+B,EAyD/B,uCAzD+B,EA0D/B,0BA1D+B,EA2D/B,iEA3D+B,EA4D/B,0BA5D+B,EA6D/B,gCA7D+B,EA8D/B,uBA9D+B,EA+D/B,wCA/D+B,EAgE/B,oDAhE+B,EAiE/B,kCAjE+B,EAkE/B,uBAlE+B,EAmE/B,+CAnE+B,EAoE/B,4EApE+B,EAqE/B,uGArE+B,EAsE/B,6EAtE+B,EAuE/B,+CAvE+B,EAwE/B,2CAxE+B,EAyE/B,4CAzE+B,EA0E/B,yCA1E+B,EA2E/B,yCA3E+B,EA4E/B,yCA5E+B,EA6E/B,0CA7E+B,EA8E/B,oCA9E+B,EA+E/B,6CA/E+B,EAgF/B,0CAhF+B,EAiF/B,2CAjF+B,EAkF/B,wCAlF+B,EAmF/B,2DAnF+B,EAoF/B,gFApF+B,EAqF/B,sDArF+B,EAsF/B,2CAtF+B,EAuF/B,6CAvF+B,EAwF/B,gEAxF+B,EAyF/B,qCAzF+B,EA0F/B,oCA1F+B,EA2F/B,iEA3F+B,EA4F/B,oEA5F+B,EA6F/B,gDA7F+B,EA8F/B,yEA9F+B,EA+F/B,kDA/F+B,EAgG/B,sCAhG+B,EAiG/B,oDAjG+B,EAkG/B,8CAlG+B,EAmG/B,yCAnG+B,EAoG/B,oCApG+B,EAqG/B,2DArG+B,EAsG/B,mCAtG+B,EAuG/B,yDAvG+B,EAwG/B,sDAxG+B,EAyG/B,oDAzG+B,EA0G/B,sDA1G+B,EA2G/B,gDA3G+B,EA4G/B,kDA5G+B,EA6G/B,wCA7G+B,EA8G/B,8CA9G+B,EA+G/B,uCA/G+B,EAgH/B,gEAhH+B,EAiH/B,wCAjH+B,EAkH/B,kCAlH+B,EAmH/B,iCAnH+B,EAoH/B,mDApH+B,EAqH/B,iCArH+B,EAsH/B,sDAtH+B,EAuH/B,uCAvH+B,EAwH/B,kCAxH+B,EAyH/B,2CAzH+B,EA0H/B,kEA1H+B,EA2H/B,yCA3H+B,EA4H/B,0DA5H+B,EA6H/B,wDA7H+B,EA8H/B,wDA9H+B,EA+H/B,2DA/H+B,EAgI/B,0DAhI+B,EAiI/B,gCAjI+B,EAkI/B,kCAlI+B,EAmI/B,sCAnI+B,EAoI/B,gEApI+B,EAqI/B,yCArI+B,EAsI/B,wCAtI+B,EAuI/B,oCAvI+B,EAwI/B,iCAxI+B,EAyI/B,0CAzI+B,EA0I/B,iEA1I+B,EA2I/B,wDA3I+B,EA4I/B,uDA5I+B,EA6I/B,qDA7I+B,EA8I/B,mEA9I+B,EA+I/B,uDA/I+B,EAgJ/B,4EAhJ+B,EAiJ/B,oCAjJ+B,EAkJ/B,wDAlJ+B,EAmJ/B,2DAnJ+B,EAoJ/B,kDApJ+B,EAqJ/B,2EArJ+B,EAsJ/B,sCAtJ+B,EAuJ/B,uCAvJ+B,EAwJ/B,gCAxJ+B,EAyJ/B,iCAzJ+B,EA0J/B,kCA1J+B,EA2J/B,mBA3J+B,EA4J/B,2EA5J+B,EA6J/B,kBA7J+B,EA8J/B,qBA9J+B,EA+J/B,oBA/J+B,EAgK/B,oBAhK+B,EAiK/B,0BAjK+B,EAkK/B,oBAlK+B,EAmK/B,mBAnK+B,EAoK/B,kCApK+B,EAqK/B,+DArK+B,EAsK/B,0FAtK+B,EAuK/B,gEAvK+B,EAwK/B,kCAxK+B,EAyK/B,8BAzK+B,EA0K/B,+BA1K+B,EA2K/B,4BA3K+B,EA4K/B,4BA5K+B,EA6K/B,kBA7K+B,EA8K/B,sBA9K+B,EA+K/B,8BA/K+B,EAgL/B,kBAhL+B,EAiL/B,qBAjL+B,EAkL/B,qBAlL+B,EAmL/B,oBAnL+B,EAoL/B,yBApL+B,EAqL/B,wDArL+B,EAsL/B,kBAtL+B,EAuL/B,gBAvL+B,EAwL/B,iCAxL+B,EAyL/B,yCAzL+B,EA0L/B,4BA1L+B,EA2L/B,sBA3L+B,EA4L/B,kDA5L+B,EA6L/B,gBA7L+B,EA8L/B,oBA9L+B,EA+L/B,2DA/L+B,EAgM/B,yBAhM+B,EAiM/B,iBAjM+B,EAkM/B,kCAlM+B,EAmM/B,mBAnM+B,EAoM/B,yBApM+B,EAqM/B,iBArM+B,EAsM/B,YAtM+B,EAuM/B,8BAvM+B,EAwM/B,yCAxM+B,EAyM/B,qCAzM+B,EA0M/B,iCA1M+B,EA2M/B,iCA3M+B,EA4M/B,6BA5M+B,EA6M/B,gCA7M+B,EA8M/B,4BA9M+B,EA+M/B,4BA/M+B,EAgN/B,gCAhN+B,EAiN/B,gCAjN+B,EAkN/B,uCAlN+B,EAmN/B,8CAnN+B,EAoN/B,6BApN+B,EAqN/B,+BArN+B,EAsN/B,qCAtN+B,CAA5B;;ACEA,SAASC,oBAAT,CAA8BC,GAA9B,EAAmC;EACtC,IAAI,OAAOA,GAAP,KAAe,QAAnB,EAA6B;IACzB,OAAOF,mBAAmB,CAACG,QAApB,CAA6BD,GAA7B,CAAP;GADJ,MAGK;IACD,OAAO,KAAP;;AAEP;;ACJD;AACA;AACA;AACA;;AACA,AAAO,SAASE,YAAT,CAAsBnC,OAAtB,EAA+B;EAClC,OAAO;IACHoB,QAAQ,EAAEvB,MAAM,CAACiC,MAAP,CAAcV,QAAQ,CAACgB,IAAT,CAAc,IAAd,EAAoBpC,OAApB,CAAd,EAA4C;MAClDD,QAAQ,EAAEA,QAAQ,CAACqC,IAAT,CAAc,IAAd,EAAoBpC,OAApB;KADJ;GADd;AAKH;AACDmC,YAAY,CAAClD,OAAb,GAAuBA,OAAvB;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/compose-paginate.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/compose-paginate.js new file mode 100644 index 0000000..09ca53f --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/compose-paginate.js @@ -0,0 +1,5 @@ +import { paginate } from "./paginate"; +import { iterator } from "./iterator"; +export const composePaginateRest = Object.assign(paginate, { + iterator, +}); diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/generated/paginating-endpoints.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/generated/paginating-endpoints.js new file mode 100644 index 0000000..863af10 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/generated/paginating-endpoints.js @@ -0,0 +1,216 @@ +export const paginatingEndpoints = [ + "GET /app/hook/deliveries", + "GET /app/installations", + "GET /applications/grants", + "GET /authorizations", + "GET /enterprises/{enterprise}/actions/permissions/organizations", + "GET /enterprises/{enterprise}/actions/runner-groups", + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", + "GET /enterprises/{enterprise}/actions/runners", + "GET /enterprises/{enterprise}/audit-log", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /enterprises/{enterprise}/settings/billing/advanced-security", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runner-groups", + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/audit-log", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/credential-authorizations", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/external-groups", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/settings/billing/advanced-security", + "GET /orgs/{org}/team-sync/groups", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions", +]; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/index.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/index.js new file mode 100644 index 0000000..5ba74de --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/index.js @@ -0,0 +1,17 @@ +import { VERSION } from "./version"; +import { paginate } from "./paginate"; +import { iterator } from "./iterator"; +export { composePaginateRest } from "./compose-paginate"; +export { isPaginatingEndpoint, paginatingEndpoints, } from "./paginating-endpoints"; +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ +export function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit), + }), + }; +} +paginateRest.VERSION = VERSION; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js new file mode 100644 index 0000000..7f9ee64 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js @@ -0,0 +1,39 @@ +import { normalizePaginatedListResponse } from "./normalize-paginated-list-response"; +export function iterator(octokit, route, parameters) { + const options = typeof route === "function" + ? route.endpoint(parameters) + : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { value: normalizedResponse }; + } + catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [], + }, + }; + } + }, + }), + }; +} diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js new file mode 100644 index 0000000..a87028b --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js @@ -0,0 +1,47 @@ +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +export function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return { + ...response, + data: [], + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + // keep the additional properties intact as there is currently no other way + // to retrieve the same information. + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/paginate.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/paginate.js new file mode 100644 index 0000000..8d18a60 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/paginate.js @@ -0,0 +1,24 @@ +import { iterator } from "./iterator"; +export function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; + } + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +} +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator, mapFn); + }); +} diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/paginating-endpoints.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/paginating-endpoints.js new file mode 100644 index 0000000..1e52899 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/paginating-endpoints.js @@ -0,0 +1,10 @@ +import { paginatingEndpoints, } from "./generated/paginating-endpoints"; +export { paginatingEndpoints } from "./generated/paginating-endpoints"; +export function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } + else { + return false; + } +} diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/types.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js new file mode 100644 index 0000000..af4553e --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js @@ -0,0 +1 @@ +export const VERSION = "2.21.3"; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/compose-paginate.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/compose-paginate.d.ts new file mode 100644 index 0000000..38a7432 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/compose-paginate.d.ts @@ -0,0 +1,2 @@ +import { ComposePaginateInterface } from "./types"; +export declare const composePaginateRest: ComposePaginateInterface; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts new file mode 100644 index 0000000..4882d94 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts @@ -0,0 +1,1612 @@ +import { Endpoints } from "@octokit/types"; +export interface PaginatingEndpoints { + /** + * @see https://docs.github.com/rest/reference/apps#list-deliveries-for-an-app-webhook + */ + "GET /app/hook/deliveries": { + parameters: Endpoints["GET /app/hook/deliveries"]["parameters"]; + response: Endpoints["GET /app/hook/deliveries"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app + */ + "GET /app/installations": { + parameters: Endpoints["GET /app/installations"]["parameters"]; + response: Endpoints["GET /app/installations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#list-your-grants + */ + "GET /applications/grants": { + parameters: Endpoints["GET /applications/grants"]["parameters"]; + response: Endpoints["GET /applications/grants"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#list-your-authorizations + */ + "GET /authorizations": { + parameters: Endpoints["GET /authorizations"]["parameters"]; + response: Endpoints["GET /authorizations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-organizations-enabled-for-github-actions-in-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/permissions/organizations": { + parameters: Endpoints["GET /enterprises/{enterprise}/actions/permissions/organizations"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/actions/permissions/organizations"]["response"] & { + data: Endpoints["GET /enterprises/{enterprise}/actions/permissions/organizations"]["response"]["data"]["organizations"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runner-groups-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups": { + parameters: Endpoints["GET /enterprises/{enterprise}/actions/runner-groups"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/actions/runner-groups"]["response"] & { + data: Endpoints["GET /enterprises/{enterprise}/actions/runner-groups"]["response"]["data"]["runner_groups"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-organization-access-to-a-self-hosted-runner-group-in-a-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations": { + parameters: Endpoints["GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations"]["response"] & { + data: Endpoints["GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations"]["response"]["data"]["organizations"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-in-a-group-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners": { + parameters: Endpoints["GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners"]["response"] & { + data: Endpoints["GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners"]["response"]["data"]["runners"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runners": { + parameters: Endpoints["GET /enterprises/{enterprise}/actions/runners"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/actions/runners"]["response"] & { + data: Endpoints["GET /enterprises/{enterprise}/actions/runners"]["response"]["data"]["runners"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#get-the-audit-log-for-an-enterprise + */ + "GET /enterprises/{enterprise}/audit-log": { + parameters: Endpoints["GET /enterprises/{enterprise}/audit-log"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/audit-log"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/secret-scanning/alerts": { + parameters: Endpoints["GET /enterprises/{enterprise}/secret-scanning/alerts"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/secret-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/billing#export-advanced-security-active-committers-data-for-enterprise + */ + "GET /enterprises/{enterprise}/settings/billing/advanced-security": { + parameters: Endpoints["GET /enterprises/{enterprise}/settings/billing/advanced-security"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/settings/billing/advanced-security"]["response"] & { + data: Endpoints["GET /enterprises/{enterprise}/settings/billing/advanced-security"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events + */ + "GET /events": { + parameters: Endpoints["GET /events"]["parameters"]; + response: Endpoints["GET /events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-the-authenticated-user + */ + "GET /gists": { + parameters: Endpoints["GET /gists"]["parameters"]; + response: Endpoints["GET /gists"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-public-gists + */ + "GET /gists/public": { + parameters: Endpoints["GET /gists/public"]["parameters"]; + response: Endpoints["GET /gists/public"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-starred-gists + */ + "GET /gists/starred": { + parameters: Endpoints["GET /gists/starred"]["parameters"]; + response: Endpoints["GET /gists/starred"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-comments + */ + "GET /gists/{gist_id}/comments": { + parameters: Endpoints["GET /gists/{gist_id}/comments"]["parameters"]; + response: Endpoints["GET /gists/{gist_id}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-commits + */ + "GET /gists/{gist_id}/commits": { + parameters: Endpoints["GET /gists/{gist_id}/commits"]["parameters"]; + response: Endpoints["GET /gists/{gist_id}/commits"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-forks + */ + "GET /gists/{gist_id}/forks": { + parameters: Endpoints["GET /gists/{gist_id}/forks"]["parameters"]; + response: Endpoints["GET /gists/{gist_id}/forks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-app-installation + */ + "GET /installation/repositories": { + parameters: Endpoints["GET /installation/repositories"]["parameters"]; + response: Endpoints["GET /installation/repositories"]["response"] & { + data: Endpoints["GET /installation/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issues-assigned-to-the-authenticated-user + */ + "GET /issues": { + parameters: Endpoints["GET /issues"]["parameters"]; + response: Endpoints["GET /issues"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/licenses#get-all-commonly-used-licenses + */ + "GET /licenses": { + parameters: Endpoints["GET /licenses"]["parameters"]; + response: Endpoints["GET /licenses"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans + */ + "GET /marketplace_listing/plans": { + parameters: Endpoints["GET /marketplace_listing/plans"]["parameters"]; + response: Endpoints["GET /marketplace_listing/plans"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan + */ + "GET /marketplace_listing/plans/{plan_id}/accounts": { + parameters: Endpoints["GET /marketplace_listing/plans/{plan_id}/accounts"]["parameters"]; + response: Endpoints["GET /marketplace_listing/plans/{plan_id}/accounts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans-stubbed + */ + "GET /marketplace_listing/stubbed/plans": { + parameters: Endpoints["GET /marketplace_listing/stubbed/plans"]["parameters"]; + response: Endpoints["GET /marketplace_listing/stubbed/plans"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan-stubbed + */ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts": { + parameters: Endpoints["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"]["parameters"]; + response: Endpoints["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-network-of-repositories + */ + "GET /networks/{owner}/{repo}/events": { + parameters: Endpoints["GET /networks/{owner}/{repo}/events"]["parameters"]; + response: Endpoints["GET /networks/{owner}/{repo}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user + */ + "GET /notifications": { + parameters: Endpoints["GET /notifications"]["parameters"]; + response: Endpoints["GET /notifications"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations + */ + "GET /organizations": { + parameters: Endpoints["GET /organizations"]["parameters"]; + response: Endpoints["GET /organizations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-repositories-with-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage-by-repository": { + parameters: Endpoints["GET /orgs/{org}/actions/cache/usage-by-repository"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/cache/usage-by-repository"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/cache/usage-by-repository"]["response"]["data"]["repository_cache_usages"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "GET /orgs/{org}/actions/permissions/repositories": { + parameters: Endpoints["GET /orgs/{org}/actions/permissions/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/permissions/repositories"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/permissions/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runner-groups-for-an-organization + */ + "GET /orgs/{org}/actions/runner-groups": { + parameters: Endpoints["GET /orgs/{org}/actions/runner-groups"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/runner-groups"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/runner-groups"]["response"]["data"]["runner_groups"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-access-to-a-self-hosted-runner-group-in-an-organization + */ + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories": { + parameters: Endpoints["GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-in-a-group-for-an-organization + */ + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners": { + parameters: Endpoints["GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners"]["response"]["data"]["runners"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-an-organization + */ + "GET /orgs/{org}/actions/runners": { + parameters: Endpoints["GET /orgs/{org}/actions/runners"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/runners"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/runners"]["response"]["data"]["runners"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-organization-secrets + */ + "GET /orgs/{org}/actions/secrets": { + parameters: Endpoints["GET /orgs/{org}/actions/secrets"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/secrets"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories": { + parameters: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#get-audit-log + */ + "GET /orgs/{org}/audit-log": { + parameters: Endpoints["GET /orgs/{org}/audit-log"]["parameters"]; + response: Endpoints["GET /orgs/{org}/audit-log"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-users-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks": { + parameters: Endpoints["GET /orgs/{org}/blocks"]["parameters"]; + response: Endpoints["GET /orgs/{org}/blocks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-by-organization + */ + "GET /orgs/{org}/code-scanning/alerts": { + parameters: Endpoints["GET /orgs/{org}/code-scanning/alerts"]["parameters"]; + response: Endpoints["GET /orgs/{org}/code-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + */ + "GET /orgs/{org}/codespaces": { + parameters: Endpoints["GET /orgs/{org}/codespaces"]["parameters"]; + response: Endpoints["GET /orgs/{org}/codespaces"]["response"] & { + data: Endpoints["GET /orgs/{org}/codespaces"]["response"]["data"]["codespaces"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-saml-sso-authorizations-for-an-organization + */ + "GET /orgs/{org}/credential-authorizations": { + parameters: Endpoints["GET /orgs/{org}/credential-authorizations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/credential-authorizations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-organization-secrets + */ + "GET /orgs/{org}/dependabot/secrets": { + parameters: Endpoints["GET /orgs/{org}/dependabot/secrets"]["parameters"]; + response: Endpoints["GET /orgs/{org}/dependabot/secrets"]["response"] & { + data: Endpoints["GET /orgs/{org}/dependabot/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories": { + parameters: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["response"] & { + data: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-organization-events + */ + "GET /orgs/{org}/events": { + parameters: Endpoints["GET /orgs/{org}/events"]["parameters"]; + response: Endpoints["GET /orgs/{org}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-external-idp-groups-for-an-organization + */ + "GET /orgs/{org}/external-groups": { + parameters: Endpoints["GET /orgs/{org}/external-groups"]["parameters"]; + response: Endpoints["GET /orgs/{org}/external-groups"]["response"] & { + data: Endpoints["GET /orgs/{org}/external-groups"]["response"]["data"]["groups"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-failed-organization-invitations + */ + "GET /orgs/{org}/failed_invitations": { + parameters: Endpoints["GET /orgs/{org}/failed_invitations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/failed_invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-webhooks + */ + "GET /orgs/{org}/hooks": { + parameters: Endpoints["GET /orgs/{org}/hooks"]["parameters"]; + response: Endpoints["GET /orgs/{org}/hooks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-deliveries-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries": { + parameters: Endpoints["GET /orgs/{org}/hooks/{hook_id}/deliveries"]["parameters"]; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}/deliveries"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-app-installations-for-an-organization + */ + "GET /orgs/{org}/installations": { + parameters: Endpoints["GET /orgs/{org}/installations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/installations"]["response"] & { + data: Endpoints["GET /orgs/{org}/installations"]["response"]["data"]["installations"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-pending-organization-invitations + */ + "GET /orgs/{org}/invitations": { + parameters: Endpoints["GET /orgs/{org}/invitations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-invitation-teams + */ + "GET /orgs/{org}/invitations/{invitation_id}/teams": { + parameters: Endpoints["GET /orgs/{org}/invitations/{invitation_id}/teams"]["parameters"]; + response: Endpoints["GET /orgs/{org}/invitations/{invitation_id}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-organization-issues-assigned-to-the-authenticated-user + */ + "GET /orgs/{org}/issues": { + parameters: Endpoints["GET /orgs/{org}/issues"]["parameters"]; + response: Endpoints["GET /orgs/{org}/issues"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-members + */ + "GET /orgs/{org}/members": { + parameters: Endpoints["GET /orgs/{org}/members"]["parameters"]; + response: Endpoints["GET /orgs/{org}/members"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/migrations#list-organization-migrations + */ + "GET /orgs/{org}/migrations": { + parameters: Endpoints["GET /orgs/{org}/migrations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/migrations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/migrations#list-repositories-in-an-organization-migration + */ + "GET /orgs/{org}/migrations/{migration_id}/repositories": { + parameters: Endpoints["GET /orgs/{org}/migrations/{migration_id}/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/migrations/{migration_id}/repositories"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-outside-collaborators-for-an-organization + */ + "GET /orgs/{org}/outside_collaborators": { + parameters: Endpoints["GET /orgs/{org}/outside_collaborators"]["parameters"]; + response: Endpoints["GET /orgs/{org}/outside_collaborators"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-an-organization + */ + "GET /orgs/{org}/packages": { + parameters: Endpoints["GET /orgs/{org}/packages"]["parameters"]; + response: Endpoints["GET /orgs/{org}/packages"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/packages#get-all-package-versions-for-a-package-owned-by-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions": { + parameters: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["parameters"]; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-organization-projects + */ + "GET /orgs/{org}/projects": { + parameters: Endpoints["GET /orgs/{org}/projects"]["parameters"]; + response: Endpoints["GET /orgs/{org}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-public-organization-members + */ + "GET /orgs/{org}/public_members": { + parameters: Endpoints["GET /orgs/{org}/public_members"]["parameters"]; + response: Endpoints["GET /orgs/{org}/public_members"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-organization-repositories + */ + "GET /orgs/{org}/repos": { + parameters: Endpoints["GET /orgs/{org}/repos"]["parameters"]; + response: Endpoints["GET /orgs/{org}/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-organization + */ + "GET /orgs/{org}/secret-scanning/alerts": { + parameters: Endpoints["GET /orgs/{org}/secret-scanning/alerts"]["parameters"]; + response: Endpoints["GET /orgs/{org}/secret-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-advanced-security-active-committers-for-an-organization + */ + "GET /orgs/{org}/settings/billing/advanced-security": { + parameters: Endpoints["GET /orgs/{org}/settings/billing/advanced-security"]["parameters"]; + response: Endpoints["GET /orgs/{org}/settings/billing/advanced-security"]["response"] & { + data: Endpoints["GET /orgs/{org}/settings/billing/advanced-security"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-idp-groups-for-an-organization + */ + "GET /orgs/{org}/team-sync/groups": { + parameters: Endpoints["GET /orgs/{org}/team-sync/groups"]["parameters"]; + response: Endpoints["GET /orgs/{org}/team-sync/groups"]["response"] & { + data: Endpoints["GET /orgs/{org}/team-sync/groups"]["response"]["data"]["groups"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams + */ + "GET /orgs/{org}/teams": { + parameters: Endpoints["GET /orgs/{org}/teams"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions + */ + "GET /orgs/{org}/teams/{team_slug}/discussions": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations + */ + "GET /orgs/{org}/teams/{team_slug}/invitations": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/invitations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members + */ + "GET /orgs/{org}/teams/{team_slug}/members": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/members"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/members"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-projects + */ + "GET /orgs/{org}/teams/{team_slug}/projects": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-repositories + */ + "GET /orgs/{org}/teams/{team_slug}/repos": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-child-teams + */ + "GET /orgs/{org}/teams/{team_slug}/teams": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/teams"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-cards + */ + "GET /projects/columns/{column_id}/cards": { + parameters: Endpoints["GET /projects/columns/{column_id}/cards"]["parameters"]; + response: Endpoints["GET /projects/columns/{column_id}/cards"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-collaborators + */ + "GET /projects/{project_id}/collaborators": { + parameters: Endpoints["GET /projects/{project_id}/collaborators"]["parameters"]; + response: Endpoints["GET /projects/{project_id}/collaborators"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-columns + */ + "GET /projects/{project_id}/columns": { + parameters: Endpoints["GET /projects/{project_id}/columns"]["parameters"]; + response: Endpoints["GET /projects/{project_id}/columns"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-artifacts-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/artifacts": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts"]["response"]["data"]["artifacts"]; + }; + }; + /** + * @see https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/caches": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/caches"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/caches"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/caches"]["response"]["data"]["actions_caches"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runners"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runners"]["response"]["data"]["runners"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runs"]["response"]["data"]["workflow_runs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-run-artifacts + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"]["response"]["data"]["artifacts"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"]["response"]["data"]["jobs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]["response"]["data"]["jobs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/actions/secrets": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/secrets"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-workflows + */ + "GET /repos/{owner}/{repo}/actions/workflows": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/workflows"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/workflows"]["response"]["data"]["workflows"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"]["response"]["data"]["workflow_runs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-assignees + */ + "GET /repos/{owner}/{repo}/assignees": { + parameters: Endpoints["GET /repos/{owner}/{repo}/assignees"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/assignees"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-branches + */ + "GET /repos/{owner}/{repo}/branches": { + parameters: Endpoints["GET /repos/{owner}/{repo}/branches"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/branches"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-run-annotations + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": { + parameters: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-in-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"]["response"]["data"]["check_runs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts": { + parameters: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-instances-of-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": { + parameters: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-analyses-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses": { + parameters: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces": { + parameters: Endpoints["GET /repos/{owner}/{repo}/codespaces"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/codespaces"]["response"]["data"]["codespaces"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-devcontainers-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces/devcontainers": { + parameters: Endpoints["GET /repos/{owner}/{repo}/codespaces/devcontainers"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/devcontainers"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/codespaces/devcontainers"]["response"]["data"]["devcontainers"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/codespaces/secrets": { + parameters: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-collaborators + */ + "GET /repos/{owner}/{repo}/collaborators": { + parameters: Endpoints["GET /repos/{owner}/{repo}/collaborators"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/collaborators"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-commit-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-commits + */ + "GET /repos/{owner}/{repo}/commits": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-commit-comments + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-pull-requests-associated-with-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"]["response"]["data"]["check_runs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-suites-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"]["response"]["data"]["check_suites"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-combined-status-for-a-specific-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/status": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/status"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/status"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/status"]["response"]["data"]["statuses"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-commit-statuses-for-a-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/statuses"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/statuses"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-contributors + */ + "GET /repos/{owner}/{repo}/contributors": { + parameters: Endpoints["GET /repos/{owner}/{repo}/contributors"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/contributors"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/dependabot/secrets": { + parameters: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-deployments + */ + "GET /repos/{owner}/{repo}/deployments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/deployments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/deployments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-deployment-statuses + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": { + parameters: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-environments + */ + "GET /repos/{owner}/{repo}/environments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/environments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/environments"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/environments"]["response"]["data"]["environments"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-events + */ + "GET /repos/{owner}/{repo}/events": { + parameters: Endpoints["GET /repos/{owner}/{repo}/events"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-forks + */ + "GET /repos/{owner}/{repo}/forks": { + parameters: Endpoints["GET /repos/{owner}/{repo}/forks"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/forks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/git#list-matching-references + */ + "GET /repos/{owner}/{repo}/git/matching-refs/{ref}": { + parameters: Endpoints["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-webhooks + */ + "GET /repos/{owner}/{repo}/hooks": { + parameters: Endpoints["GET /repos/{owner}/{repo}/hooks"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/hooks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-deliveries-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries": { + parameters: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-invitations + */ + "GET /repos/{owner}/{repo}/invitations": { + parameters: Endpoints["GET /repos/{owner}/{repo}/invitations"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-repository-issues + */ + "GET /repos/{owner}/{repo}/issues": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/events": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/events"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/events": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/events"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-timeline-events-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-deploy-keys + */ + "GET /repos/{owner}/{repo}/keys": { + parameters: Endpoints["GET /repos/{owner}/{repo}/keys"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-a-repository + */ + "GET /repos/{owner}/{repo}/labels": { + parameters: Endpoints["GET /repos/{owner}/{repo}/labels"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/labels"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-milestones + */ + "GET /repos/{owner}/{repo}/milestones": { + parameters: Endpoints["GET /repos/{owner}/{repo}/milestones"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/milestones"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-issues-in-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels": { + parameters: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/notifications": { + parameters: Endpoints["GET /repos/{owner}/{repo}/notifications"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/notifications"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-github-pages-builds + */ + "GET /repos/{owner}/{repo}/pages/builds": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pages/builds"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pages/builds"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-repository-projects + */ + "GET /repos/{owner}/{repo}/projects": { + parameters: Endpoints["GET /repos/{owner}/{repo}/projects"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests + */ + "GET /repos/{owner}/{repo}/pulls": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-in-a-repository + */ + "GET /repos/{owner}/{repo}/pulls/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-pull-request-review-comment + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-commits-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests-files + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-requested-reviewers-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]["data"]["users"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-reviews-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-comments-for-a-pull-request-review + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-releases + */ + "GET /repos/{owner}/{repo}/releases": { + parameters: Endpoints["GET /repos/{owner}/{repo}/releases"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/releases"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-release-assets + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets": { + parameters: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/assets"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/assets"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts": { + parameters: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-locations-for-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations": { + parameters: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-stargazers + */ + "GET /repos/{owner}/{repo}/stargazers": { + parameters: Endpoints["GET /repos/{owner}/{repo}/stargazers"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/stargazers"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-watchers + */ + "GET /repos/{owner}/{repo}/subscribers": { + parameters: Endpoints["GET /repos/{owner}/{repo}/subscribers"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/subscribers"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-tags + */ + "GET /repos/{owner}/{repo}/tags": { + parameters: Endpoints["GET /repos/{owner}/{repo}/tags"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/tags"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-teams + */ + "GET /repos/{owner}/{repo}/teams": { + parameters: Endpoints["GET /repos/{owner}/{repo}/teams"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-repository-topics + */ + "GET /repos/{owner}/{repo}/topics": { + parameters: Endpoints["GET /repos/{owner}/{repo}/topics"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/topics"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/topics"]["response"]["data"]["names"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-public-repositories + */ + "GET /repositories": { + parameters: Endpoints["GET /repositories"]["parameters"]; + response: Endpoints["GET /repositories"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-environment-secrets + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets": { + parameters: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets"]["parameters"]; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets"]["response"] & { + data: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-code + */ + "GET /search/code": { + parameters: Endpoints["GET /search/code"]["parameters"]; + response: Endpoints["GET /search/code"]["response"] & { + data: Endpoints["GET /search/code"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-commits + */ + "GET /search/commits": { + parameters: Endpoints["GET /search/commits"]["parameters"]; + response: Endpoints["GET /search/commits"]["response"] & { + data: Endpoints["GET /search/commits"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-issues-and-pull-requests + */ + "GET /search/issues": { + parameters: Endpoints["GET /search/issues"]["parameters"]; + response: Endpoints["GET /search/issues"]["response"] & { + data: Endpoints["GET /search/issues"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-labels + */ + "GET /search/labels": { + parameters: Endpoints["GET /search/labels"]["parameters"]; + response: Endpoints["GET /search/labels"]["response"] & { + data: Endpoints["GET /search/labels"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-repositories + */ + "GET /search/repositories": { + parameters: Endpoints["GET /search/repositories"]["parameters"]; + response: Endpoints["GET /search/repositories"]["response"] & { + data: Endpoints["GET /search/repositories"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-topics + */ + "GET /search/topics": { + parameters: Endpoints["GET /search/topics"]["parameters"]; + response: Endpoints["GET /search/topics"]["response"] & { + data: Endpoints["GET /search/topics"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-users + */ + "GET /search/users": { + parameters: Endpoints["GET /search/users"]["parameters"]; + response: Endpoints["GET /search/users"]["response"] & { + data: Endpoints["GET /search/users"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions-legacy + */ + "GET /teams/{team_id}/discussions": { + parameters: Endpoints["GET /teams/{team_id}/discussions"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/discussions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments": { + parameters: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/comments"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + parameters: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/reactions": { + parameters: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/reactions"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations-legacy + */ + "GET /teams/{team_id}/invitations": { + parameters: Endpoints["GET /teams/{team_id}/invitations"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members-legacy + */ + "GET /teams/{team_id}/members": { + parameters: Endpoints["GET /teams/{team_id}/members"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/members"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-projects-legacy + */ + "GET /teams/{team_id}/projects": { + parameters: Endpoints["GET /teams/{team_id}/projects"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-repositories-legacy + */ + "GET /teams/{team_id}/repos": { + parameters: Endpoints["GET /teams/{team_id}/repos"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams/#list-child-teams-legacy + */ + "GET /teams/{team_id}/teams": { + parameters: Endpoints["GET /teams/{team_id}/teams"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-users-blocked-by-the-authenticated-user + */ + "GET /user/blocks": { + parameters: Endpoints["GET /user/blocks"]["parameters"]; + response: Endpoints["GET /user/blocks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-for-the-authenticated-user + */ + "GET /user/codespaces": { + parameters: Endpoints["GET /user/codespaces"]["parameters"]; + response: Endpoints["GET /user/codespaces"]["response"] & { + data: Endpoints["GET /user/codespaces"]["response"]["data"]["codespaces"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-secrets-for-the-authenticated-user + */ + "GET /user/codespaces/secrets": { + parameters: Endpoints["GET /user/codespaces/secrets"]["parameters"]; + response: Endpoints["GET /user/codespaces/secrets"]["response"] & { + data: Endpoints["GET /user/codespaces/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-email-addresses-for-the-authenticated-user + */ + "GET /user/emails": { + parameters: Endpoints["GET /user/emails"]["parameters"]; + response: Endpoints["GET /user/emails"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-the-authenticated-user + */ + "GET /user/followers": { + parameters: Endpoints["GET /user/followers"]["parameters"]; + response: Endpoints["GET /user/followers"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-the-authenticated-user-follows + */ + "GET /user/following": { + parameters: Endpoints["GET /user/following"]["parameters"]; + response: Endpoints["GET /user/following"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-the-authenticated-user + */ + "GET /user/gpg_keys": { + parameters: Endpoints["GET /user/gpg_keys"]["parameters"]; + response: Endpoints["GET /user/gpg_keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-app-installations-accessible-to-the-user-access-token + */ + "GET /user/installations": { + parameters: Endpoints["GET /user/installations"]["parameters"]; + response: Endpoints["GET /user/installations"]["response"] & { + data: Endpoints["GET /user/installations"]["response"]["data"]["installations"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-user-access-token + */ + "GET /user/installations/{installation_id}/repositories": { + parameters: Endpoints["GET /user/installations/{installation_id}/repositories"]["parameters"]; + response: Endpoints["GET /user/installations/{installation_id}/repositories"]["response"] & { + data: Endpoints["GET /user/installations/{installation_id}/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-user-account-issues-assigned-to-the-authenticated-user + */ + "GET /user/issues": { + parameters: Endpoints["GET /user/issues"]["parameters"]; + response: Endpoints["GET /user/issues"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-keys-for-the-authenticated-user + */ + "GET /user/keys": { + parameters: Endpoints["GET /user/keys"]["parameters"]; + response: Endpoints["GET /user/keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user + */ + "GET /user/marketplace_purchases": { + parameters: Endpoints["GET /user/marketplace_purchases"]["parameters"]; + response: Endpoints["GET /user/marketplace_purchases"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user-stubbed + */ + "GET /user/marketplace_purchases/stubbed": { + parameters: Endpoints["GET /user/marketplace_purchases/stubbed"]["parameters"]; + response: Endpoints["GET /user/marketplace_purchases/stubbed"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-memberships-for-the-authenticated-user + */ + "GET /user/memberships/orgs": { + parameters: Endpoints["GET /user/memberships/orgs"]["parameters"]; + response: Endpoints["GET /user/memberships/orgs"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/migrations#list-user-migrations + */ + "GET /user/migrations": { + parameters: Endpoints["GET /user/migrations"]["parameters"]; + response: Endpoints["GET /user/migrations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/migrations#list-repositories-for-a-user-migration + */ + "GET /user/migrations/{migration_id}/repositories": { + parameters: Endpoints["GET /user/migrations/{migration_id}/repositories"]["parameters"]; + response: Endpoints["GET /user/migrations/{migration_id}/repositories"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user + */ + "GET /user/orgs": { + parameters: Endpoints["GET /user/orgs"]["parameters"]; + response: Endpoints["GET /user/orgs"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-the-authenticated-user + */ + "GET /user/packages": { + parameters: Endpoints["GET /user/packages"]["parameters"]; + response: Endpoints["GET /user/packages"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions": { + parameters: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["parameters"]; + response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-public-email-addresses-for-the-authenticated-user + */ + "GET /user/public_emails": { + parameters: Endpoints["GET /user/public_emails"]["parameters"]; + response: Endpoints["GET /user/public_emails"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-the-authenticated-user + */ + "GET /user/repos": { + parameters: Endpoints["GET /user/repos"]["parameters"]; + response: Endpoints["GET /user/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-invitations-for-the-authenticated-user + */ + "GET /user/repository_invitations": { + parameters: Endpoints["GET /user/repository_invitations"]["parameters"]; + response: Endpoints["GET /user/repository_invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-the-authenticated-user + */ + "GET /user/starred": { + parameters: Endpoints["GET /user/starred"]["parameters"]; + response: Endpoints["GET /user/starred"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-the-authenticated-user + */ + "GET /user/subscriptions": { + parameters: Endpoints["GET /user/subscriptions"]["parameters"]; + response: Endpoints["GET /user/subscriptions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams-for-the-authenticated-user + */ + "GET /user/teams": { + parameters: Endpoints["GET /user/teams"]["parameters"]; + response: Endpoints["GET /user/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-users + */ + "GET /users": { + parameters: Endpoints["GET /users"]["parameters"]; + response: Endpoints["GET /users"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-for-the-authenticated-user + */ + "GET /users/{username}/events": { + parameters: Endpoints["GET /users/{username}/events"]["parameters"]; + response: Endpoints["GET /users/{username}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-organization-events-for-the-authenticated-user + */ + "GET /users/{username}/events/orgs/{org}": { + parameters: Endpoints["GET /users/{username}/events/orgs/{org}"]["parameters"]; + response: Endpoints["GET /users/{username}/events/orgs/{org}"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-user + */ + "GET /users/{username}/events/public": { + parameters: Endpoints["GET /users/{username}/events/public"]["parameters"]; + response: Endpoints["GET /users/{username}/events/public"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-a-user + */ + "GET /users/{username}/followers": { + parameters: Endpoints["GET /users/{username}/followers"]["parameters"]; + response: Endpoints["GET /users/{username}/followers"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-a-user-follows + */ + "GET /users/{username}/following": { + parameters: Endpoints["GET /users/{username}/following"]["parameters"]; + response: Endpoints["GET /users/{username}/following"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-a-user + */ + "GET /users/{username}/gists": { + parameters: Endpoints["GET /users/{username}/gists"]["parameters"]; + response: Endpoints["GET /users/{username}/gists"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-a-user + */ + "GET /users/{username}/gpg_keys": { + parameters: Endpoints["GET /users/{username}/gpg_keys"]["parameters"]; + response: Endpoints["GET /users/{username}/gpg_keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-public-keys-for-a-user + */ + "GET /users/{username}/keys": { + parameters: Endpoints["GET /users/{username}/keys"]["parameters"]; + response: Endpoints["GET /users/{username}/keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-a-user + */ + "GET /users/{username}/orgs": { + parameters: Endpoints["GET /users/{username}/orgs"]["parameters"]; + response: Endpoints["GET /users/{username}/orgs"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-user + */ + "GET /users/{username}/packages": { + parameters: Endpoints["GET /users/{username}/packages"]["parameters"]; + response: Endpoints["GET /users/{username}/packages"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-user-projects + */ + "GET /users/{username}/projects": { + parameters: Endpoints["GET /users/{username}/projects"]["parameters"]; + response: Endpoints["GET /users/{username}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-received-by-the-authenticated-user + */ + "GET /users/{username}/received_events": { + parameters: Endpoints["GET /users/{username}/received_events"]["parameters"]; + response: Endpoints["GET /users/{username}/received_events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-received-by-a-user + */ + "GET /users/{username}/received_events/public": { + parameters: Endpoints["GET /users/{username}/received_events/public"]["parameters"]; + response: Endpoints["GET /users/{username}/received_events/public"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-a-user + */ + "GET /users/{username}/repos": { + parameters: Endpoints["GET /users/{username}/repos"]["parameters"]; + response: Endpoints["GET /users/{username}/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-a-user + */ + "GET /users/{username}/starred": { + parameters: Endpoints["GET /users/{username}/starred"]["parameters"]; + response: Endpoints["GET /users/{username}/starred"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-a-user + */ + "GET /users/{username}/subscriptions": { + parameters: Endpoints["GET /users/{username}/subscriptions"]["parameters"]; + response: Endpoints["GET /users/{username}/subscriptions"]["response"]; + }; +} +export declare const paginatingEndpoints: (keyof PaginatingEndpoints)[]; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/index.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/index.d.ts new file mode 100644 index 0000000..4d84aae --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/index.d.ts @@ -0,0 +1,16 @@ +import { Octokit } from "@octokit/core"; +import { PaginateInterface } from "./types"; +export { PaginateInterface } from "./types"; +export { PaginatingEndpoints } from "./types"; +export { composePaginateRest } from "./compose-paginate"; +export { isPaginatingEndpoint, paginatingEndpoints, } from "./paginating-endpoints"; +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ +export declare function paginateRest(octokit: Octokit): { + paginate: PaginateInterface; +}; +export declare namespace paginateRest { + var VERSION: string; +} diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts new file mode 100644 index 0000000..931d530 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts @@ -0,0 +1,20 @@ +import { Octokit } from "@octokit/core"; +import { RequestInterface, RequestParameters, Route } from "./types"; +export declare function iterator(octokit: Octokit, route: Route | RequestInterface, parameters?: RequestParameters): { + [Symbol.asyncIterator]: () => { + next(): Promise<{ + done: boolean; + value?: undefined; + } | { + value: import("@octokit/types/dist-types/OctokitResponse").OctokitResponse; + done?: undefined; + } | { + value: { + status: number; + headers: {}; + data: never[]; + }; + done?: undefined; + }>; + }; +}; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/normalize-paginated-list-response.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/normalize-paginated-list-response.d.ts new file mode 100644 index 0000000..f948a78 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/normalize-paginated-list-response.d.ts @@ -0,0 +1,18 @@ +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +import { OctokitResponse } from "./types"; +export declare function normalizePaginatedListResponse(response: OctokitResponse): OctokitResponse; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/paginate.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/paginate.d.ts new file mode 100644 index 0000000..774c604 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/paginate.d.ts @@ -0,0 +1,3 @@ +import { Octokit } from "@octokit/core"; +import { MapFunction, PaginationResults, RequestParameters, Route, RequestInterface } from "./types"; +export declare function paginate(octokit: Octokit, route: Route | RequestInterface, parameters?: RequestParameters, mapFn?: MapFunction): Promise>; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/paginating-endpoints.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/paginating-endpoints.d.ts new file mode 100644 index 0000000..f6a4d7b --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/paginating-endpoints.d.ts @@ -0,0 +1,3 @@ +import { PaginatingEndpoints } from "./generated/paginating-endpoints"; +export { paginatingEndpoints } from "./generated/paginating-endpoints"; +export declare function isPaginatingEndpoint(arg: unknown): arg is keyof PaginatingEndpoints; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts new file mode 100644 index 0000000..0634907 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts @@ -0,0 +1,242 @@ +import { Octokit } from "@octokit/core"; +import * as OctokitTypes from "@octokit/types"; +export { EndpointOptions, RequestInterface, OctokitResponse, RequestParameters, Route, } from "@octokit/types"; +export { PaginatingEndpoints } from "./generated/paginating-endpoints"; +import { PaginatingEndpoints } from "./generated/paginating-endpoints"; +declare type KnownKeys = Extract<{ + [K in keyof T]: string extends K ? never : number extends K ? never : K; +} extends { + [_ in keyof T]: infer U; +} ? U : never, keyof T>; +declare type KeysMatching = { + [K in keyof T]: T[K] extends V ? K : never; +}[keyof T]; +declare type KnownKeysMatching = KeysMatching>, V>; +declare type GetResultsType = T extends { + data: any[]; +} ? T["data"] : T extends { + data: object; +} ? T["data"][KnownKeysMatching] : never; +declare type NormalizeResponse = T & { + data: GetResultsType; +}; +declare type DataType = "data" extends keyof T ? T["data"] : unknown; +export interface MapFunction>, M = unknown[]> { + (response: T, done: () => void): M; +} +export declare type PaginationResults = T[]; +export interface PaginateInterface { + /** + * Paginate a request using endpoint options and map each response to a custom array + * + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * @param {function} mapFn Optional method to map each response to a custom array + */ + (options: OctokitTypes.EndpointOptions, mapFn: MapFunction>, M[]>): Promise>; + /** + * Paginate a request using endpoint options + * + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: OctokitTypes.EndpointOptions): Promise>; + /** + * Paginate a request using a known endpoint route string and map each response to a custom array + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {function} mapFn Optional method to map each response to a custom array + */ + (route: R, mapFn: MapFunction): Promise; + /** + * Paginate a request using a known endpoint route string and parameters, and map each response to a custom array + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * @param {function} mapFn Optional method to map each response to a custom array + */ + (route: R, parameters: PaginatingEndpoints[R]["parameters"], mapFn: MapFunction): Promise; + /** + * Paginate a request using an known endpoint route string + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: R, parameters?: PaginatingEndpoints[R]["parameters"]): Promise>; + /** + * Paginate a request using an unknown endpoint route string + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: R, parameters?: R extends keyof PaginatingEndpoints ? PaginatingEndpoints[R]["parameters"] : OctokitTypes.RequestParameters): Promise; + /** + * Paginate a request using an endpoint method and a map function + * + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {function} mapFn? Optional method to map each response to a custom array + */ + (request: R, mapFn: MapFunction>, M>): Promise; + /** + * Paginate a request using an endpoint method, parameters, and a map function + * + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * @param {function} mapFn? Optional method to map each response to a custom array + */ + (request: R, parameters: Parameters[0], mapFn: MapFunction>, M>): Promise; + /** + * Paginate a request using an endpoint method and parameters + * + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: R, parameters?: Parameters[0]): Promise>["data"]>; + iterator: { + /** + * Get an async iterator to paginate a request using endpoint options + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: OctokitTypes.EndpointOptions): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using a known endpoint route string and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: R, parameters?: PaginatingEndpoints[R]["parameters"]): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using an unknown endpoint route string and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: R, parameters?: R extends keyof PaginatingEndpoints ? PaginatingEndpoints[R]["parameters"] : OctokitTypes.RequestParameters): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using a request method and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * @param {string} request `@octokit/request` or `octokit.request` method + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: R, parameters?: Parameters[0]): AsyncIterableIterator>>; + }; +} +export interface ComposePaginateInterface { + /** + * Paginate a request using endpoint options and map each response to a custom array + * + * @param {object} octokit Octokit instance + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * @param {function} mapFn Optional method to map each response to a custom array + */ + (octokit: Octokit, options: OctokitTypes.EndpointOptions, mapFn: MapFunction>, M[]>): Promise>; + /** + * Paginate a request using endpoint options + * + * @param {object} octokit Octokit instance + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (octokit: Octokit, options: OctokitTypes.EndpointOptions): Promise>; + /** + * Paginate a request using a known endpoint route string and map each response to a custom array + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {function} mapFn Optional method to map each response to a custom array + */ + (octokit: Octokit, route: R, mapFn: MapFunction): Promise; + /** + * Paginate a request using a known endpoint route string and parameters, and map each response to a custom array + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * @param {function} mapFn Optional method to map each response to a custom array + */ + (octokit: Octokit, route: R, parameters: PaginatingEndpoints[R]["parameters"], mapFn: MapFunction): Promise; + /** + * Paginate a request using an known endpoint route string + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (octokit: Octokit, route: R, parameters?: PaginatingEndpoints[R]["parameters"]): Promise>; + /** + * Paginate a request using an unknown endpoint route string + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (octokit: Octokit, route: R, parameters?: R extends keyof PaginatingEndpoints ? PaginatingEndpoints[R]["parameters"] : OctokitTypes.RequestParameters): Promise; + /** + * Paginate a request using an endpoint method and a map function + * + * @param {object} octokit Octokit instance + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {function} mapFn? Optional method to map each response to a custom array + */ + (octokit: Octokit, request: R, mapFn: MapFunction>, M>): Promise; + /** + * Paginate a request using an endpoint method, parameters, and a map function + * + * @param {object} octokit Octokit instance + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * @param {function} mapFn? Optional method to map each response to a custom array + */ + (octokit: Octokit, request: R, parameters: Parameters[0], mapFn: MapFunction>, M>): Promise; + /** + * Paginate a request using an endpoint method and parameters + * + * @param {object} octokit Octokit instance + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (octokit: Octokit, request: R, parameters?: Parameters[0]): Promise>["data"]>; + iterator: { + /** + * Get an async iterator to paginate a request using endpoint options + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * + * @param {object} octokit Octokit instance + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (octokit: Octokit, options: OctokitTypes.EndpointOptions): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using a known endpoint route string and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (octokit: Octokit, route: R, parameters?: PaginatingEndpoints[R]["parameters"]): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using an unknown endpoint route string and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (octokit: Octokit, route: R, parameters?: R extends keyof PaginatingEndpoints ? PaginatingEndpoints[R]["parameters"] : OctokitTypes.RequestParameters): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using a request method and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * + * @param {object} octokit Octokit instance + * @param {string} request `@octokit/request` or `octokit.request` method + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (octokit: Octokit, request: R, parameters?: Parameters[0]): AsyncIterableIterator>>; + }; +} diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts new file mode 100644 index 0000000..77e3d51 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "2.21.3"; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js b/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js new file mode 100644 index 0000000..e76659f --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js @@ -0,0 +1,358 @@ +const VERSION = "2.21.3"; + +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return { + ...response, + data: [], + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + // keep the additional properties intact as there is currently no other way + // to retrieve the same information. + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +function iterator(octokit, route, parameters) { + const options = typeof route === "function" + ? route.endpoint(parameters) + : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { value: normalizedResponse }; + } + catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [], + }, + }; + } + }, + }), + }; +} + +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; + } + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +} +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator, mapFn); + }); +} + +const composePaginateRest = Object.assign(paginate, { + iterator, +}); + +const paginatingEndpoints = [ + "GET /app/hook/deliveries", + "GET /app/installations", + "GET /applications/grants", + "GET /authorizations", + "GET /enterprises/{enterprise}/actions/permissions/organizations", + "GET /enterprises/{enterprise}/actions/runner-groups", + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", + "GET /enterprises/{enterprise}/actions/runners", + "GET /enterprises/{enterprise}/audit-log", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /enterprises/{enterprise}/settings/billing/advanced-security", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runner-groups", + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/audit-log", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/credential-authorizations", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/external-groups", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/settings/billing/advanced-security", + "GET /orgs/{org}/team-sync/groups", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions", +]; + +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } + else { + return false; + } +} + +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit), + }), + }; +} +paginateRest.VERSION = VERSION; + +export { composePaginateRest, isPaginatingEndpoint, paginateRest, paginatingEndpoints }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map b/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map new file mode 100644 index 0000000..878b8e4 --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/normalize-paginated-list-response.js","../dist-src/iterator.js","../dist-src/paginate.js","../dist-src/compose-paginate.js","../dist-src/generated/paginating-endpoints.js","../dist-src/paginating-endpoints.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"2.21.3\";\n","/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nexport function normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return {\n ...response,\n data: [],\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n","import { normalizePaginatedListResponse } from \"./normalize-paginated-list-response\";\nexport function iterator(octokit, route, parameters) {\n const options = typeof route === \"function\"\n ? route.endpoint(parameters)\n : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return { value: normalizedResponse };\n }\n catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: [],\n },\n };\n }\n },\n }),\n };\n}\n","import { iterator } from \"./iterator\";\nexport function paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator, mapFn);\n });\n}\n","import { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport const composePaginateRest = Object.assign(paginate, {\n iterator,\n});\n","export const paginatingEndpoints = [\n \"GET /app/hook/deliveries\",\n \"GET /app/installations\",\n \"GET /applications/grants\",\n \"GET /authorizations\",\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /enterprises/{enterprise}/actions/runners\",\n \"GET /enterprises/{enterprise}/audit-log\",\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n \"GET /enterprises/{enterprise}/settings/billing/advanced-security\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /licenses\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/runner-groups\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/audit-log\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/code-scanning/alerts\",\n \"GET /orgs/{org}/codespaces\",\n \"GET /orgs/{org}/credential-authorizations\",\n \"GET /orgs/{org}/dependabot/secrets\",\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/external-groups\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/packages\",\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/secret-scanning/alerts\",\n \"GET /orgs/{org}/settings/billing/advanced-security\",\n \"GET /orgs/{org}/team-sync/groups\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/caches\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/codespaces\",\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n \"GET /repos/{owner}/{repo}/codespaces/secrets\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/status\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/dependabot/secrets\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/environments\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repos/{owner}/{repo}/topics\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/codespaces\",\n \"GET /user/codespaces/secrets\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/packages\",\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/packages\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\",\n];\n","import { paginatingEndpoints, } from \"./generated/paginating-endpoints\";\nexport { paginatingEndpoints } from \"./generated/paginating-endpoints\";\nexport function isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n }\n else {\n return false;\n }\n}\n","import { VERSION } from \"./version\";\nimport { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport { composePaginateRest } from \"./compose-paginate\";\nexport { isPaginatingEndpoint, paginatingEndpoints, } from \"./paginating-endpoints\";\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\nexport function paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit),\n }),\n };\n}\npaginateRest.VERSION = VERSION;\n"],"names":[],"mappings":"AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACA1C;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,AAAO,SAAS,8BAA8B,CAAC,QAAQ,EAAE;AACzD;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE;AACxB,QAAQ,OAAO;AACf,YAAY,GAAG,QAAQ;AACvB,YAAY,IAAI,EAAE,EAAE;AACpB,SAAS,CAAC;AACV,KAAK;AACL,IAAI,MAAM,0BAA0B,GAAG,aAAa,IAAI,QAAQ,CAAC,IAAI,IAAI,EAAE,KAAK,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC;AACnG,IAAI,IAAI,CAAC,0BAA0B;AACnC,QAAQ,OAAO,QAAQ,CAAC;AACxB;AACA;AACA,IAAI,MAAM,iBAAiB,GAAG,QAAQ,CAAC,IAAI,CAAC,kBAAkB,CAAC;AAC/D,IAAI,MAAM,mBAAmB,GAAG,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC;AACnE,IAAI,MAAM,UAAU,GAAG,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC;AACjD,IAAI,OAAO,QAAQ,CAAC,IAAI,CAAC,kBAAkB,CAAC;AAC5C,IAAI,OAAO,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC;AAC9C,IAAI,OAAO,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC;AACrC,IAAI,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,IAAI,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AAC7C,IAAI,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;AACzB,IAAI,IAAI,OAAO,iBAAiB,KAAK,WAAW,EAAE;AAClD,QAAQ,QAAQ,CAAC,IAAI,CAAC,kBAAkB,GAAG,iBAAiB,CAAC;AAC7D,KAAK;AACL,IAAI,IAAI,OAAO,mBAAmB,KAAK,WAAW,EAAE;AACpD,QAAQ,QAAQ,CAAC,IAAI,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;AACjE,KAAK;AACL,IAAI,QAAQ,CAAC,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;AAC3C,IAAI,OAAO,QAAQ,CAAC;AACpB,CAAC;;AC7CM,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE;AACrD,IAAI,MAAM,OAAO,GAAG,OAAO,KAAK,KAAK,UAAU;AAC/C,UAAU,KAAK,CAAC,QAAQ,CAAC,UAAU,CAAC;AACpC,UAAU,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;AACtD,IAAI,MAAM,aAAa,GAAG,OAAO,KAAK,KAAK,UAAU,GAAG,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC;AAChF,IAAI,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;AAClC,IAAI,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACpC,IAAI,IAAI,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;AAC1B,IAAI,OAAO;AACX,QAAQ,CAAC,MAAM,CAAC,aAAa,GAAG,OAAO;AACvC,YAAY,MAAM,IAAI,GAAG;AACzB,gBAAgB,IAAI,CAAC,GAAG;AACxB,oBAAoB,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;AAC1C,gBAAgB,IAAI;AACpB,oBAAoB,MAAM,QAAQ,GAAG,MAAM,aAAa,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,CAAC,CAAC;AACnF,oBAAoB,MAAM,kBAAkB,GAAG,8BAA8B,CAAC,QAAQ,CAAC,CAAC;AACxF;AACA;AACA;AACA,oBAAoB,GAAG,GAAG,CAAC,CAAC,kBAAkB,CAAC,OAAO,CAAC,IAAI,IAAI,EAAE,EAAE,KAAK,CAAC,yBAAyB,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC;AAC9G,oBAAoB,OAAO,EAAE,KAAK,EAAE,kBAAkB,EAAE,CAAC;AACzD,iBAAiB;AACjB,gBAAgB,OAAO,KAAK,EAAE;AAC9B,oBAAoB,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG;AAC5C,wBAAwB,MAAM,KAAK,CAAC;AACpC,oBAAoB,GAAG,GAAG,EAAE,CAAC;AAC7B,oBAAoB,OAAO;AAC3B,wBAAwB,KAAK,EAAE;AAC/B,4BAA4B,MAAM,EAAE,GAAG;AACvC,4BAA4B,OAAO,EAAE,EAAE;AACvC,4BAA4B,IAAI,EAAE,EAAE;AACpC,yBAAyB;AACzB,qBAAqB,CAAC;AACtB,iBAAiB;AACjB,aAAa;AACb,SAAS,CAAC;AACV,KAAK,CAAC;AACN,CAAC;;ACrCM,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,KAAK,EAAE;AAC5D,IAAI,IAAI,OAAO,UAAU,KAAK,UAAU,EAAE;AAC1C,QAAQ,KAAK,GAAG,UAAU,CAAC;AAC3B,QAAQ,UAAU,GAAG,SAAS,CAAC;AAC/B,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,OAAO,EAAE,EAAE,EAAE,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;AACpG,CAAC;AACD,SAAS,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE;AACnD,IAAI,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK;AAC5C,QAAQ,IAAI,MAAM,CAAC,IAAI,EAAE;AACzB,YAAY,OAAO,OAAO,CAAC;AAC3B,SAAS;AACT,QAAQ,IAAI,SAAS,GAAG,KAAK,CAAC;AAC9B,QAAQ,SAAS,IAAI,GAAG;AACxB,YAAY,SAAS,GAAG,IAAI,CAAC;AAC7B,SAAS;AACT,QAAQ,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACxF,QAAQ,IAAI,SAAS,EAAE;AACvB,YAAY,OAAO,OAAO,CAAC;AAC3B,SAAS;AACT,QAAQ,OAAO,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;AACzD,KAAK,CAAC,CAAC;AACP,CAAC;;ACrBW,MAAC,mBAAmB,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;AAC3D,IAAI,QAAQ;AACZ,CAAC,CAAC;;ACJU,MAAC,mBAAmB,GAAG;AACnC,IAAI,0BAA0B;AAC9B,IAAI,wBAAwB;AAC5B,IAAI,0BAA0B;AAC9B,IAAI,qBAAqB;AACzB,IAAI,iEAAiE;AACrE,IAAI,qDAAqD;AACzD,IAAI,qFAAqF;AACzF,IAAI,+EAA+E;AACnF,IAAI,+CAA+C;AACnD,IAAI,yCAAyC;AAC7C,IAAI,sDAAsD;AAC1D,IAAI,kEAAkE;AACtE,IAAI,aAAa;AACjB,IAAI,YAAY;AAChB,IAAI,mBAAmB;AACvB,IAAI,oBAAoB;AACxB,IAAI,+BAA+B;AACnC,IAAI,8BAA8B;AAClC,IAAI,4BAA4B;AAChC,IAAI,gCAAgC;AACpC,IAAI,aAAa;AACjB,IAAI,eAAe;AACnB,IAAI,gCAAgC;AACpC,IAAI,mDAAmD;AACvD,IAAI,wCAAwC;AAC5C,IAAI,2DAA2D;AAC/D,IAAI,qCAAqC;AACzC,IAAI,oBAAoB;AACxB,IAAI,oBAAoB;AACxB,IAAI,mDAAmD;AACvD,IAAI,kDAAkD;AACtD,IAAI,uCAAuC;AAC3C,IAAI,sEAAsE;AAC1E,IAAI,iEAAiE;AACrE,IAAI,iCAAiC;AACrC,IAAI,iCAAiC;AACrC,IAAI,4DAA4D;AAChE,IAAI,2BAA2B;AAC/B,IAAI,wBAAwB;AAC5B,IAAI,sCAAsC;AAC1C,IAAI,4BAA4B;AAChC,IAAI,2CAA2C;AAC/C,IAAI,oCAAoC;AACxC,IAAI,+DAA+D;AACnE,IAAI,wBAAwB;AAC5B,IAAI,iCAAiC;AACrC,IAAI,oCAAoC;AACxC,IAAI,uBAAuB;AAC3B,IAAI,4CAA4C;AAChD,IAAI,+BAA+B;AACnC,IAAI,6BAA6B;AACjC,IAAI,mDAAmD;AACvD,IAAI,wBAAwB;AAC5B,IAAI,yBAAyB;AAC7B,IAAI,4BAA4B;AAChC,IAAI,wDAAwD;AAC5D,IAAI,uCAAuC;AAC3C,IAAI,0BAA0B;AAC9B,IAAI,iEAAiE;AACrE,IAAI,0BAA0B;AAC9B,IAAI,gCAAgC;AACpC,IAAI,uBAAuB;AAC3B,IAAI,wCAAwC;AAC5C,IAAI,oDAAoD;AACxD,IAAI,kCAAkC;AACtC,IAAI,uBAAuB;AAC3B,IAAI,+CAA+C;AACnD,IAAI,4EAA4E;AAChF,IAAI,uGAAuG;AAC3G,IAAI,6EAA6E;AACjF,IAAI,+CAA+C;AACnD,IAAI,2CAA2C;AAC/C,IAAI,4CAA4C;AAChD,IAAI,yCAAyC;AAC7C,IAAI,yCAAyC;AAC7C,IAAI,yCAAyC;AAC7C,IAAI,0CAA0C;AAC9C,IAAI,oCAAoC;AACxC,IAAI,6CAA6C;AACjD,IAAI,0CAA0C;AAC9C,IAAI,2CAA2C;AAC/C,IAAI,wCAAwC;AAC5C,IAAI,2DAA2D;AAC/D,IAAI,gFAAgF;AACpF,IAAI,sDAAsD;AAC1D,IAAI,2CAA2C;AAC/C,IAAI,6CAA6C;AACjD,IAAI,gEAAgE;AACpE,IAAI,qCAAqC;AACzC,IAAI,oCAAoC;AACxC,IAAI,iEAAiE;AACrE,IAAI,oEAAoE;AACxE,IAAI,gDAAgD;AACpD,IAAI,yEAAyE;AAC7E,IAAI,kDAAkD;AACtD,IAAI,sCAAsC;AAC1C,IAAI,oDAAoD;AACxD,IAAI,8CAA8C;AAClD,IAAI,yCAAyC;AAC7C,IAAI,oCAAoC;AACxC,IAAI,2DAA2D;AAC/D,IAAI,mCAAmC;AACvC,IAAI,yDAAyD;AAC7D,IAAI,sDAAsD;AAC1D,IAAI,oDAAoD;AACxD,IAAI,sDAAsD;AAC1D,IAAI,gDAAgD;AACpD,IAAI,kDAAkD;AACtD,IAAI,wCAAwC;AAC5C,IAAI,8CAA8C;AAClD,IAAI,uCAAuC;AAC3C,IAAI,gEAAgE;AACpE,IAAI,wCAAwC;AAC5C,IAAI,kCAAkC;AACtC,IAAI,iCAAiC;AACrC,IAAI,mDAAmD;AACvD,IAAI,iCAAiC;AACrC,IAAI,sDAAsD;AAC1D,IAAI,uCAAuC;AAC3C,IAAI,kCAAkC;AACtC,IAAI,2CAA2C;AAC/C,IAAI,kEAAkE;AACtE,IAAI,yCAAyC;AAC7C,IAAI,0DAA0D;AAC9D,IAAI,wDAAwD;AAC5D,IAAI,wDAAwD;AAC5D,IAAI,2DAA2D;AAC/D,IAAI,0DAA0D;AAC9D,IAAI,gCAAgC;AACpC,IAAI,kCAAkC;AACtC,IAAI,sCAAsC;AAC1C,IAAI,gEAAgE;AACpE,IAAI,yCAAyC;AAC7C,IAAI,wCAAwC;AAC5C,IAAI,oCAAoC;AACxC,IAAI,iCAAiC;AACrC,IAAI,0CAA0C;AAC9C,IAAI,iEAAiE;AACrE,IAAI,wDAAwD;AAC5D,IAAI,uDAAuD;AAC3D,IAAI,qDAAqD;AACzD,IAAI,mEAAmE;AACvE,IAAI,uDAAuD;AAC3D,IAAI,4EAA4E;AAChF,IAAI,oCAAoC;AACxC,IAAI,wDAAwD;AAC5D,IAAI,2DAA2D;AAC/D,IAAI,kDAAkD;AACtD,IAAI,2EAA2E;AAC/E,IAAI,sCAAsC;AAC1C,IAAI,uCAAuC;AAC3C,IAAI,gCAAgC;AACpC,IAAI,iCAAiC;AACrC,IAAI,kCAAkC;AACtC,IAAI,mBAAmB;AACvB,IAAI,2EAA2E;AAC/E,IAAI,kBAAkB;AACtB,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,oBAAoB;AACxB,IAAI,0BAA0B;AAC9B,IAAI,oBAAoB;AACxB,IAAI,mBAAmB;AACvB,IAAI,kCAAkC;AACtC,IAAI,+DAA+D;AACnE,IAAI,0FAA0F;AAC9F,IAAI,gEAAgE;AACpE,IAAI,kCAAkC;AACtC,IAAI,8BAA8B;AAClC,IAAI,+BAA+B;AACnC,IAAI,4BAA4B;AAChC,IAAI,4BAA4B;AAChC,IAAI,kBAAkB;AACtB,IAAI,sBAAsB;AAC1B,IAAI,8BAA8B;AAClC,IAAI,kBAAkB;AACtB,IAAI,qBAAqB;AACzB,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,yBAAyB;AAC7B,IAAI,wDAAwD;AAC5D,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AACpB,IAAI,iCAAiC;AACrC,IAAI,yCAAyC;AAC7C,IAAI,4BAA4B;AAChC,IAAI,sBAAsB;AAC1B,IAAI,kDAAkD;AACtD,IAAI,gBAAgB;AACpB,IAAI,oBAAoB;AACxB,IAAI,2DAA2D;AAC/D,IAAI,yBAAyB;AAC7B,IAAI,iBAAiB;AACrB,IAAI,kCAAkC;AACtC,IAAI,mBAAmB;AACvB,IAAI,yBAAyB;AAC7B,IAAI,iBAAiB;AACrB,IAAI,YAAY;AAChB,IAAI,8BAA8B;AAClC,IAAI,yCAAyC;AAC7C,IAAI,qCAAqC;AACzC,IAAI,iCAAiC;AACrC,IAAI,iCAAiC;AACrC,IAAI,6BAA6B;AACjC,IAAI,gCAAgC;AACpC,IAAI,4BAA4B;AAChC,IAAI,4BAA4B;AAChC,IAAI,gCAAgC;AACpC,IAAI,gCAAgC;AACpC,IAAI,uCAAuC;AAC3C,IAAI,8CAA8C;AAClD,IAAI,6BAA6B;AACjC,IAAI,+BAA+B;AACnC,IAAI,qCAAqC;AACzC,CAAC;;ACrNM,SAAS,oBAAoB,CAAC,GAAG,EAAE;AAC1C,IAAI,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;AACjC,QAAQ,OAAO,mBAAmB,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AACjD,KAAK;AACL,SAAS;AACT,QAAQ,OAAO,KAAK,CAAC;AACrB,KAAK;AACL,CAAC;;ACJD;AACA;AACA;AACA;AACA,AAAO,SAAS,YAAY,CAAC,OAAO,EAAE;AACtC,IAAI,OAAO;AACX,QAAQ,QAAQ,EAAE,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE;AAC9D,YAAY,QAAQ,EAAE,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC;AAClD,SAAS,CAAC;AACV,KAAK,CAAC;AACN,CAAC;AACD,YAAY,CAAC,OAAO,GAAG,OAAO,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/plugin-paginate-rest/package.json b/node_modules/@octokit/plugin-paginate-rest/package.json new file mode 100644 index 0000000..db9a6dc --- /dev/null +++ b/node_modules/@octokit/plugin-paginate-rest/package.json @@ -0,0 +1,51 @@ +{ + "name": "@octokit/plugin-paginate-rest", + "description": "Octokit plugin to paginate REST API endpoint responses", + "version": "2.21.3", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "pika": true, + "sideEffects": false, + "keywords": [ + "github", + "api", + "sdk", + "toolkit" + ], + "repository": "github:octokit/plugin-paginate-rest.js", + "dependencies": { + "@octokit/types": "^6.40.0" + }, + "peerDependencies": { + "@octokit/core": ">=2" + }, + "devDependencies": { + "@octokit/core": "^4.0.0", + "@octokit/plugin-rest-endpoint-methods": "^6.0.0", + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^28.0.0", + "@types/node": "^16.0.0", + "fetch-mock": "^9.0.0", + "github-openapi-graphql-query": "^2.0.0", + "jest": "^28.0.0", + "npm-run-all": "^4.1.5", + "prettier": "2.7.1", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^28.0.0", + "typescript": "^4.0.2" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE b/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE new file mode 100644 index 0000000..57bee5f --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/README.md b/node_modules/@octokit/plugin-rest-endpoint-methods/README.md new file mode 100644 index 0000000..b23ff58 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/README.md @@ -0,0 +1,76 @@ +# plugin-rest-endpoint-methods.js + +> Octokit plugin adding one method for all of api.github.com REST API endpoints + +[![@latest](https://img.shields.io/npm/v/@octokit/plugin-rest-endpoint-methods.svg)](https://www.npmjs.com/package/@octokit/plugin-rest-endpoint-methods) +[![Build Status](https://github.com/octokit/plugin-rest-endpoint-methods.js/workflows/Test/badge.svg)](https://github.com/octokit/plugin-rest-endpoint-methods.js/actions?workflow=Test) + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/plugin-rest-endpoint-methods` and [`@octokit/core`](https://github.com/octokit/core.js) (or core-compatible module) directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with `npm install @octokit/core @octokit/plugin-rest-endpoint-methods`. Optionally replace `@octokit/core` with a compatible module + +```js +const { Octokit } = require("@octokit/core"); +const { + restEndpointMethods, +} = require("@octokit/plugin-rest-endpoint-methods"); +``` + +
+ +```js +const MyOctokit = Octokit.plugin(restEndpointMethods); +const octokit = new MyOctokit({ auth: "secret123" }); + +// https://developer.github.com/v3/users/#get-the-authenticated-user +octokit.rest.users.getAuthenticated(); +``` + +There is one method for each REST API endpoint documented at [https://developer.github.com/v3](https://developer.github.com/v3). All endpoint methods are documented in the [docs/](docs/) folder, e.g. [docs/users/getAuthenticated.md](docs/users/getAuthenticated.md) + +## TypeScript + +Parameter and response types for all endpoint methods exported as `{ RestEndpointMethodTypes }`. + +Example + +```ts +import { RestEndpointMethodTypes } from "@octokit/plugin-rest-endpoint-methods"; + +type UpdateLabelParameters = + RestEndpointMethodTypes["issues"]["updateLabel"]["parameters"]; +type UpdateLabelResponse = + RestEndpointMethodTypes["issues"]["updateLabel"]["response"]; +``` + +In order to get types beyond parameters and responses, check out [`@octokit/openapi-types`](https://github.com/octokit/openapi-types.ts/#readme), which is a direct transpilation from GitHub's official OpenAPI specification. + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js new file mode 100644 index 0000000..a7c5c16 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js @@ -0,0 +1,1107 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + + if (enumerableOnly) { + symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + } + + keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + } + + return target; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +const Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], + addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], + cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], + createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], + createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], + deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], + deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], + deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], + deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], + disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], + downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], + downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], + downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], + enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], + getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], + getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], + getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], + getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], + getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], + getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], + getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { + renamed: ["actions", "getGithubActionsPermissionsRepository"] + }], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], + getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], + getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], + listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], + listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], + listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], + listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], + removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], + setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], + setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], + setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], + setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], + setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], + setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], + setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], + setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], + setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { + renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] + }], + addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], + getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], + listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], + removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { + renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] + }], + removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], + getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"], + getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], + getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], + getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"], + rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], + setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], + getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { + renamedParameters: { + alert_id: "alert_number" + } + }], + getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { + renamed: ["codeScanning", "listAlertInstances"] + }], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], + codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], + createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"], + createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], + createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], + deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], + exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], + getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], + getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], + listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: ["GET /orgs/{org}/codespaces", {}, { + renamedParameters: { + org_id: "org" + } + }], + listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], + repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"], + setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + dependabot: { + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] + }, + dependencyGraph: { + createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], + diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] + }, + emojis: { + get: ["GET /emojis"] + }, + enterpriseAdmin: { + addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], + disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], + enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], + getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], + getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], + getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"], + listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], + listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], + removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], + removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"], + setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], + setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], + setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], + setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] + }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { + renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] + }], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], + removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { + renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] + }], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { + renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] + }] + }, + issues: { + addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], + removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: ["POST /markdown/raw", { + headers: { + "content-type": "text/plain; charset=utf-8" + } + }] + }, + meta: { + get: ["GET /meta"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"], + deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"], + downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"], + getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, { + renamed: ["migrations", "listReposForAuthenticatedUser"] + }], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"], + unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] + }, + orgs: { + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], + removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], + deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], + deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"], + deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], + deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { + renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] + }], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { + renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] + }], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], + getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], + getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], + getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], + getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], + getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], + getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], + restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], + restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"], + updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] + }, + rateLimit: { + get: ["GET /rate_limit"] + }, + reactions: { + createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"], + createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], + createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], + createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"], + createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], + createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"], + deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"], + deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], + deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], + deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], + deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"], + deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], + deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], + listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], + listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], + listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"], + listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], + listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] + }, + repos: { + acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, { + renamed: ["repos", "acceptInvitationForAuthenticatedUser"] + }], + acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"], + addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { + renamed: ["repos", "declineInvitationForAuthenticatedUser"] + }], + declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], + disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], + disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"], + downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { + renamed: ["repos", "downloadZipballArchive"] + }], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"], + enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], + enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"], + generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], + getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], + listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], + removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], + removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], + updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], + updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "updateStatusCheckProtection"] + }], + updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], + uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { + baseUrl: "https://uploads.github.com" + }] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], + listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"], + updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] + }, + teams: { + addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], + addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], + removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: ["POST /user/emails", {}, { + renamed: ["users", "addEmailForAuthenticatedUser"] + }], + addEmailForAuthenticatedUser: ["POST /user/emails"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, { + renamed: ["users", "createGpgKeyForAuthenticatedUser"] + }], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, { + renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] + }], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + deleteEmailForAuthenticated: ["DELETE /user/emails", {}, { + renamed: ["users", "deleteEmailForAuthenticatedUser"] + }], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { + renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] + }], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, { + renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] + }], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, { + renamed: ["users", "getGpgKeyForAuthenticatedUser"] + }], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, { + renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] + }], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlockedByAuthenticated: ["GET /user/blocks", {}, { + renamed: ["users", "listBlockedByAuthenticatedUser"] + }], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: ["GET /user/emails", {}, { + renamed: ["users", "listEmailsForAuthenticatedUser"] + }], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: ["GET /user/following", {}, { + renamed: ["users", "listFollowedByAuthenticatedUser"] + }], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, { + renamed: ["users", "listGpgKeysForAuthenticatedUser"] + }], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, { + renamed: ["users", "listPublicEmailsForAuthenticatedUser"] + }], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, { + renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] + }], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, { + renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] + }], + setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; + +const VERSION = "5.16.2"; + +function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; + + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ + method, + url + }, defaults); + + if (!newMethods[scope]) { + newMethods[scope] = {}; + } + + const scopeMethods = newMethods[scope]; + + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; + } + + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + } + } + + return newMethods; +} + +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + /* istanbul ignore next */ + + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` + + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined + }); + return requestWithDefaults(options); + } + + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + } + + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); + + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); + + if (!(alias in options)) { + options[alias] = options[name]; + } + + delete options[name]; + } + } + + return requestWithDefaults(options); + } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + + + return requestWithDefaults(...args); + } + + return Object.assign(withDecorations, requestWithDefaults); +} + +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, Endpoints); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, Endpoints); + return _objectSpread2(_objectSpread2({}, api), {}, { + rest: api + }); +} +legacyRestEndpointMethods.VERSION = VERSION; + +exports.legacyRestEndpointMethods = legacyRestEndpointMethods; +exports.restEndpointMethods = restEndpointMethods; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map new file mode 100644 index 0000000..20eff5f --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/generated/endpoints.js","../dist-src/version.js","../dist-src/endpoints-to-methods.js","../dist-src/index.js"],"sourcesContent":["const Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\n \"POST /orgs/{org}/actions/runners/{runner_id}/labels\",\n ],\n addCustomLabelsToSelfHostedRunnerForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\",\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\",\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\",\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\",\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\",\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\",\n ],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\",\n ],\n deleteActionsCacheById: [\n \"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\",\n ],\n deleteActionsCacheByKey: [\n \"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\",\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\",\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\",\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\",\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\",\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\",\n ],\n downloadWorkflowRunAttemptLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\",\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\",\n ],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n ],\n getActionsCacheUsageForEnterprise: [\n \"GET /enterprises/{enterprise}/actions/cache/usage\",\n ],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\",\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\",\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n getGithubActionsDefaultWorkflowPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/workflow\",\n ],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/workflow\",\n ],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/workflow\",\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\",\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] },\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\",\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/access\",\n ],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\",\n ],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\",\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\",\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n ],\n listJobsForWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n ],\n listLabelsForSelfHostedRunnerForOrg: [\n \"GET /orgs/{org}/actions/runners/{runner_id}/labels\",\n ],\n listLabelsForSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\",\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\",\n ],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\",\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\",\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\",\n ],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\",\n ],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\",\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\",\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n setCustomLabelsForSelfHostedRunnerForOrg: [\n \"PUT /orgs/{org}/actions/runners/{runner_id}/labels\",\n ],\n setCustomLabelsForSelfHostedRunnerForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\",\n ],\n setGithubActionsDefaultWorkflowPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/workflow\",\n ],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/workflow\",\n ],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/workflow\",\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\",\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\",\n ],\n setWorkflowAccessToRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/access\",\n ],\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\",\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\",\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\",\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\",\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\",\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\",\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"],\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"] },\n ],\n addRepoToInstallationForAuthenticatedUser: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\",\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\",\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\",\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\",\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\",\n ],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\n \"POST /app/hook/deliveries/{delivery_id}/attempts\",\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"] },\n ],\n removeRepoFromInstallationForAuthenticatedUser: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\",\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"],\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\",\n ],\n getGithubAdvancedSecurityBillingGhe: [\n \"GET /enterprises/{enterprise}/settings/billing/advanced-security\",\n ],\n getGithubAdvancedSecurityBillingOrg: [\n \"GET /orgs/{org}/settings/billing/advanced-security\",\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\",\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\",\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\",\n ],\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\n \"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\",\n ],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\",\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\",\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } },\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\",\n ],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] },\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"],\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"],\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n codespaceMachinesForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/machines\",\n ],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\",\n ],\n createOrUpdateSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}\",\n ],\n createWithPrForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\",\n ],\n createWithRepoForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/codespaces\",\n ],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\n \"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\",\n ],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\",\n ],\n deleteSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}\",\n ],\n exportForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/exports\",\n ],\n getExportDetailsForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/exports/{export_id}\",\n ],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getPublicKeyForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/public-key\",\n ],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\",\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\",\n ],\n getSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}\",\n ],\n listDevcontainersInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n ],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\n \"GET /orgs/{org}/codespaces\",\n {},\n { renamedParameters: { org_id: \"org\" } },\n ],\n listInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces\",\n ],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}/repositories\",\n ],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n removeRepositoryForSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n repoMachinesForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/machines\",\n ],\n setRepositoriesForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories\",\n ],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\n \"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\",\n ],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"],\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}\",\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\",\n ],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\",\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n ],\n },\n dependencyGraph: {\n createRepositorySnapshot: [\n \"POST /repos/{owner}/{repo}/dependency-graph/snapshots\",\n ],\n diffRange: [\n \"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\",\n ],\n },\n emojis: { get: [\"GET /emojis\"] },\n enterpriseAdmin: {\n addCustomLabelsToSelfHostedRunnerForEnterprise: [\n \"POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels\",\n ],\n disableSelectedOrganizationGithubActionsEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n enableSelectedOrganizationGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n getAllowedActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n getGithubActionsPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions\",\n ],\n getServerStatistics: [\n \"GET /enterprise-installation/{enterprise_or_org}/server-statistics\",\n ],\n listLabelsForSelfHostedRunnerForEnterprise: [\n \"GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels\",\n ],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels\",\n ],\n removeCustomLabelFromSelfHostedRunnerForEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}\",\n ],\n setAllowedActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n setCustomLabelsForSelfHostedRunnerForEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels\",\n ],\n setGithubActionsPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions\",\n ],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"],\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"],\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"],\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] },\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\",\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] },\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] },\n ],\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\",\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\",\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"],\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } },\n ],\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"],\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\",\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\",\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\",\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\",\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n ],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n {},\n { renamed: [\"migrations\", \"listReposForAuthenticatedUser\"] },\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\",\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\",\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"],\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\",\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\",\n ],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listCustomRoles: [\"GET /organizations/{organization_id}/custom_roles\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\",\n ],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\",\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\",\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\",\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\",\n ],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"],\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\",\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n deletePackageForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}\",\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] },\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\",\n ],\n },\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\",\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\",\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\",\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\",\n ],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\",\n ],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"],\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\",\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\",\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\",\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\",\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\",\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\",\n ],\n deleteForRelease: [\n \"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\",\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\",\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\",\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n ],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n ],\n listForRelease: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n ],\n },\n repos: {\n acceptInvitation: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"] },\n ],\n acceptInvitationForAuthenticatedUser: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n ],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\",\n ],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\",\n ],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\",\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"] },\n ],\n declineInvitationForAuthenticatedUser: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n ],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\",\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n deleteTagProtection: [\n \"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\",\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\",\n ],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\",\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] },\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\",\n ],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\",\n ],\n generateReleaseNotes: [\n \"POST /repos/{owner}/{repo}/releases/generate-notes\",\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n ],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n ],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\",\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\",\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n getWebhookDelivery: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\",\n ],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n ],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\",\n ],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\",\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] },\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" },\n ],\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"],\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n ],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"],\n },\n users: {\n addEmailForAuthenticated: [\n \"POST /user/emails\",\n {},\n { renamed: [\"users\", \"addEmailForAuthenticatedUser\"] },\n ],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\n \"POST /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"] },\n ],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\n \"POST /user/keys\",\n {},\n { renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"] },\n ],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\n \"DELETE /user/emails\",\n {},\n { renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"] },\n ],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\n \"DELETE /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"] },\n ],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\n \"DELETE /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"] },\n ],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\n \"GET /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"] },\n ],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\n \"GET /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"] },\n ],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\n \"GET /user/blocks\",\n {},\n { renamed: [\"users\", \"listBlockedByAuthenticatedUser\"] },\n ],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\n \"GET /user/emails\",\n {},\n { renamed: [\"users\", \"listEmailsForAuthenticatedUser\"] },\n ],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\n \"GET /user/following\",\n {},\n { renamed: [\"users\", \"listFollowedByAuthenticatedUser\"] },\n ],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\n \"GET /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"] },\n ],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\n \"GET /user/public_emails\",\n {},\n { renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"] },\n ],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\n \"GET /user/keys\",\n {},\n { renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"] },\n ],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\n \"PATCH /user/email/visibility\",\n {},\n { renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"] },\n ],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\n \"PATCH /user/email/visibility\",\n ],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"],\n },\n};\nexport default Endpoints;\n","export const VERSION = \"5.16.2\";\n","export function endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({ method, url }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined,\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n","import ENDPOINTS from \"./generated/endpoints\";\nimport { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nexport function restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n rest: api,\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nexport function legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n ...api,\n rest: api,\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n"],"names":["Endpoints","actions","addCustomLabelsToSelfHostedRunnerForOrg","addCustomLabelsToSelfHostedRunnerForRepo","addSelectedRepoToOrgSecret","approveWorkflowRun","cancelWorkflowRun","createOrUpdateEnvironmentSecret","createOrUpdateOrgSecret","createOrUpdateRepoSecret","createRegistrationTokenForOrg","createRegistrationTokenForRepo","createRemoveTokenForOrg","createRemoveTokenForRepo","createWorkflowDispatch","deleteActionsCacheById","deleteActionsCacheByKey","deleteArtifact","deleteEnvironmentSecret","deleteOrgSecret","deleteRepoSecret","deleteSelfHostedRunnerFromOrg","deleteSelfHostedRunnerFromRepo","deleteWorkflowRun","deleteWorkflowRunLogs","disableSelectedRepositoryGithubActionsOrganization","disableWorkflow","downloadArtifact","downloadJobLogsForWorkflowRun","downloadWorkflowRunAttemptLogs","downloadWorkflowRunLogs","enableSelectedRepositoryGithubActionsOrganization","enableWorkflow","getActionsCacheList","getActionsCacheUsage","getActionsCacheUsageByRepoForOrg","getActionsCacheUsageForEnterprise","getActionsCacheUsageForOrg","getAllowedActionsOrganization","getAllowedActionsRepository","getArtifact","getEnvironmentPublicKey","getEnvironmentSecret","getGithubActionsDefaultWorkflowPermissionsEnterprise","getGithubActionsDefaultWorkflowPermissionsOrganization","getGithubActionsDefaultWorkflowPermissionsRepository","getGithubActionsPermissionsOrganization","getGithubActionsPermissionsRepository","getJobForWorkflowRun","getOrgPublicKey","getOrgSecret","getPendingDeploymentsForRun","getRepoPermissions","renamed","getRepoPublicKey","getRepoSecret","getReviewsForRun","getSelfHostedRunnerForOrg","getSelfHostedRunnerForRepo","getWorkflow","getWorkflowAccessToRepository","getWorkflowRun","getWorkflowRunAttempt","getWorkflowRunUsage","getWorkflowUsage","listArtifactsForRepo","listEnvironmentSecrets","listJobsForWorkflowRun","listJobsForWorkflowRunAttempt","listLabelsForSelfHostedRunnerForOrg","listLabelsForSelfHostedRunnerForRepo","listOrgSecrets","listRepoSecrets","listRepoWorkflows","listRunnerApplicationsForOrg","listRunnerApplicationsForRepo","listSelectedReposForOrgSecret","listSelectedRepositoriesEnabledGithubActionsOrganization","listSelfHostedRunnersForOrg","listSelfHostedRunnersForRepo","listWorkflowRunArtifacts","listWorkflowRuns","listWorkflowRunsForRepo","reRunJobForWorkflowRun","reRunWorkflow","reRunWorkflowFailedJobs","removeAllCustomLabelsFromSelfHostedRunnerForOrg","removeAllCustomLabelsFromSelfHostedRunnerForRepo","removeCustomLabelFromSelfHostedRunnerForOrg","removeCustomLabelFromSelfHostedRunnerForRepo","removeSelectedRepoFromOrgSecret","reviewPendingDeploymentsForRun","setAllowedActionsOrganization","setAllowedActionsRepository","setCustomLabelsForSelfHostedRunnerForOrg","setCustomLabelsForSelfHostedRunnerForRepo","setGithubActionsDefaultWorkflowPermissionsEnterprise","setGithubActionsDefaultWorkflowPermissionsOrganization","setGithubActionsDefaultWorkflowPermissionsRepository","setGithubActionsPermissionsOrganization","setGithubActionsPermissionsRepository","setSelectedReposForOrgSecret","setSelectedRepositoriesEnabledGithubActionsOrganization","setWorkflowAccessToRepository","activity","checkRepoIsStarredByAuthenticatedUser","deleteRepoSubscription","deleteThreadSubscription","getFeeds","getRepoSubscription","getThread","getThreadSubscriptionForAuthenticatedUser","listEventsForAuthenticatedUser","listNotificationsForAuthenticatedUser","listOrgEventsForAuthenticatedUser","listPublicEvents","listPublicEventsForRepoNetwork","listPublicEventsForUser","listPublicOrgEvents","listReceivedEventsForUser","listReceivedPublicEventsForUser","listRepoEvents","listRepoNotificationsForAuthenticatedUser","listReposStarredByAuthenticatedUser","listReposStarredByUser","listReposWatchedByUser","listStargazersForRepo","listWatchedReposForAuthenticatedUser","listWatchersForRepo","markNotificationsAsRead","markRepoNotificationsAsRead","markThreadAsRead","setRepoSubscription","setThreadSubscription","starRepoForAuthenticatedUser","unstarRepoForAuthenticatedUser","apps","addRepoToInstallation","addRepoToInstallationForAuthenticatedUser","checkToken","createFromManifest","createInstallationAccessToken","deleteAuthorization","deleteInstallation","deleteToken","getAuthenticated","getBySlug","getInstallation","getOrgInstallation","getRepoInstallation","getSubscriptionPlanForAccount","getSubscriptionPlanForAccountStubbed","getUserInstallation","getWebhookConfigForApp","getWebhookDelivery","listAccountsForPlan","listAccountsForPlanStubbed","listInstallationReposForAuthenticatedUser","listInstallations","listInstallationsForAuthenticatedUser","listPlans","listPlansStubbed","listReposAccessibleToInstallation","listSubscriptionsForAuthenticatedUser","listSubscriptionsForAuthenticatedUserStubbed","listWebhookDeliveries","redeliverWebhookDelivery","removeRepoFromInstallation","removeRepoFromInstallationForAuthenticatedUser","resetToken","revokeInstallationAccessToken","scopeToken","suspendInstallation","unsuspendInstallation","updateWebhookConfigForApp","billing","getGithubActionsBillingOrg","getGithubActionsBillingUser","getGithubAdvancedSecurityBillingGhe","getGithubAdvancedSecurityBillingOrg","getGithubPackagesBillingOrg","getGithubPackagesBillingUser","getSharedStorageBillingOrg","getSharedStorageBillingUser","checks","create","createSuite","get","getSuite","listAnnotations","listForRef","listForSuite","listSuitesForRef","rerequestRun","rerequestSuite","setSuitesPreferences","update","codeScanning","deleteAnalysis","getAlert","renamedParameters","alert_id","getAnalysis","getSarif","listAlertInstances","listAlertsForOrg","listAlertsForRepo","listAlertsInstances","listRecentAnalyses","updateAlert","uploadSarif","codesOfConduct","getAllCodesOfConduct","getConductCode","codespaces","addRepositoryForSecretForAuthenticatedUser","codespaceMachinesForAuthenticatedUser","createForAuthenticatedUser","createOrUpdateSecretForAuthenticatedUser","createWithPrForAuthenticatedUser","createWithRepoForAuthenticatedUser","deleteForAuthenticatedUser","deleteFromOrganization","deleteSecretForAuthenticatedUser","exportForAuthenticatedUser","getExportDetailsForAuthenticatedUser","getForAuthenticatedUser","getPublicKeyForAuthenticatedUser","getSecretForAuthenticatedUser","listDevcontainersInRepositoryForAuthenticatedUser","listForAuthenticatedUser","listInOrganization","org_id","listInRepositoryForAuthenticatedUser","listRepositoriesForSecretForAuthenticatedUser","listSecretsForAuthenticatedUser","removeRepositoryForSecretForAuthenticatedUser","repoMachinesForAuthenticatedUser","setRepositoriesForSecretForAuthenticatedUser","startForAuthenticatedUser","stopForAuthenticatedUser","stopInOrganization","updateForAuthenticatedUser","dependabot","dependencyGraph","createRepositorySnapshot","diffRange","emojis","enterpriseAdmin","addCustomLabelsToSelfHostedRunnerForEnterprise","disableSelectedOrganizationGithubActionsEnterprise","enableSelectedOrganizationGithubActionsEnterprise","getAllowedActionsEnterprise","getGithubActionsPermissionsEnterprise","getServerStatistics","listLabelsForSelfHostedRunnerForEnterprise","listSelectedOrganizationsEnabledGithubActionsEnterprise","removeAllCustomLabelsFromSelfHostedRunnerForEnterprise","removeCustomLabelFromSelfHostedRunnerForEnterprise","setAllowedActionsEnterprise","setCustomLabelsForSelfHostedRunnerForEnterprise","setGithubActionsPermissionsEnterprise","setSelectedOrganizationsEnabledGithubActionsEnterprise","gists","checkIsStarred","createComment","delete","deleteComment","fork","getComment","getRevision","list","listComments","listCommits","listForUser","listForks","listPublic","listStarred","star","unstar","updateComment","git","createBlob","createCommit","createRef","createTag","createTree","deleteRef","getBlob","getCommit","getRef","getTag","getTree","listMatchingRefs","updateRef","gitignore","getAllTemplates","getTemplate","interactions","getRestrictionsForAuthenticatedUser","getRestrictionsForOrg","getRestrictionsForRepo","getRestrictionsForYourPublicRepos","removeRestrictionsForAuthenticatedUser","removeRestrictionsForOrg","removeRestrictionsForRepo","removeRestrictionsForYourPublicRepos","setRestrictionsForAuthenticatedUser","setRestrictionsForOrg","setRestrictionsForRepo","setRestrictionsForYourPublicRepos","issues","addAssignees","addLabels","checkUserCanBeAssigned","createLabel","createMilestone","deleteLabel","deleteMilestone","getEvent","getLabel","getMilestone","listAssignees","listCommentsForRepo","listEvents","listEventsForRepo","listEventsForTimeline","listForOrg","listForRepo","listLabelsForMilestone","listLabelsForRepo","listLabelsOnIssue","listMilestones","lock","removeAllLabels","removeAssignees","removeLabel","setLabels","unlock","updateLabel","updateMilestone","licenses","getAllCommonlyUsed","getForRepo","markdown","render","renderRaw","headers","meta","getOctocat","getZen","root","migrations","cancelImport","deleteArchiveForAuthenticatedUser","deleteArchiveForOrg","downloadArchiveForOrg","getArchiveForAuthenticatedUser","getCommitAuthors","getImportStatus","getLargeFiles","getStatusForAuthenticatedUser","getStatusForOrg","listReposForAuthenticatedUser","listReposForOrg","listReposForUser","mapCommitAuthor","setLfsPreference","startForOrg","startImport","unlockRepoForAuthenticatedUser","unlockRepoForOrg","updateImport","orgs","blockUser","cancelInvitation","checkBlockedUser","checkMembershipForUser","checkPublicMembershipForUser","convertMemberToOutsideCollaborator","createInvitation","createWebhook","deleteWebhook","getMembershipForAuthenticatedUser","getMembershipForUser","getWebhook","getWebhookConfigForOrg","listAppInstallations","listBlockedUsers","listCustomRoles","listFailedInvitations","listInvitationTeams","listMembers","listMembershipsForAuthenticatedUser","listOutsideCollaborators","listPendingInvitations","listPublicMembers","listWebhooks","pingWebhook","removeMember","removeMembershipForUser","removeOutsideCollaborator","removePublicMembershipForAuthenticatedUser","setMembershipForUser","setPublicMembershipForAuthenticatedUser","unblockUser","updateMembershipForAuthenticatedUser","updateWebhook","updateWebhookConfigForOrg","packages","deletePackageForAuthenticatedUser","deletePackageForOrg","deletePackageForUser","deletePackageVersionForAuthenticatedUser","deletePackageVersionForOrg","deletePackageVersionForUser","getAllPackageVersionsForAPackageOwnedByAnOrg","getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser","getAllPackageVersionsForPackageOwnedByAuthenticatedUser","getAllPackageVersionsForPackageOwnedByOrg","getAllPackageVersionsForPackageOwnedByUser","getPackageForAuthenticatedUser","getPackageForOrganization","getPackageForUser","getPackageVersionForAuthenticatedUser","getPackageVersionForOrganization","getPackageVersionForUser","listPackagesForAuthenticatedUser","listPackagesForOrganization","listPackagesForUser","restorePackageForAuthenticatedUser","restorePackageForOrg","restorePackageForUser","restorePackageVersionForAuthenticatedUser","restorePackageVersionForOrg","restorePackageVersionForUser","projects","addCollaborator","createCard","createColumn","createForOrg","createForRepo","deleteCard","deleteColumn","getCard","getColumn","getPermissionForUser","listCards","listCollaborators","listColumns","moveCard","moveColumn","removeCollaborator","updateCard","updateColumn","pulls","checkIfMerged","createReplyForReviewComment","createReview","createReviewComment","deletePendingReview","deleteReviewComment","dismissReview","getReview","getReviewComment","listCommentsForReview","listFiles","listRequestedReviewers","listReviewComments","listReviewCommentsForRepo","listReviews","merge","removeRequestedReviewers","requestReviewers","submitReview","updateBranch","updateReview","updateReviewComment","rateLimit","reactions","createForCommitComment","createForIssue","createForIssueComment","createForPullRequestReviewComment","createForRelease","createForTeamDiscussionCommentInOrg","createForTeamDiscussionInOrg","deleteForCommitComment","deleteForIssue","deleteForIssueComment","deleteForPullRequestComment","deleteForRelease","deleteForTeamDiscussion","deleteForTeamDiscussionComment","listForCommitComment","listForIssue","listForIssueComment","listForPullRequestReviewComment","listForRelease","listForTeamDiscussionCommentInOrg","listForTeamDiscussionInOrg","repos","acceptInvitation","acceptInvitationForAuthenticatedUser","addAppAccessRestrictions","mapToData","addStatusCheckContexts","addTeamAccessRestrictions","addUserAccessRestrictions","checkCollaborator","checkVulnerabilityAlerts","codeownersErrors","compareCommits","compareCommitsWithBasehead","createAutolink","createCommitComment","createCommitSignatureProtection","createCommitStatus","createDeployKey","createDeployment","createDeploymentStatus","createDispatchEvent","createFork","createInOrg","createOrUpdateEnvironment","createOrUpdateFileContents","createPagesSite","createRelease","createTagProtection","createUsingTemplate","declineInvitation","declineInvitationForAuthenticatedUser","deleteAccessRestrictions","deleteAdminBranchProtection","deleteAnEnvironment","deleteAutolink","deleteBranchProtection","deleteCommitComment","deleteCommitSignatureProtection","deleteDeployKey","deleteDeployment","deleteFile","deleteInvitation","deletePagesSite","deletePullRequestReviewProtection","deleteRelease","deleteReleaseAsset","deleteTagProtection","disableAutomatedSecurityFixes","disableLfsForRepo","disableVulnerabilityAlerts","downloadArchive","downloadTarballArchive","downloadZipballArchive","enableAutomatedSecurityFixes","enableLfsForRepo","enableVulnerabilityAlerts","generateReleaseNotes","getAccessRestrictions","getAdminBranchProtection","getAllEnvironments","getAllStatusCheckContexts","getAllTopics","getAppsWithAccessToProtectedBranch","getAutolink","getBranch","getBranchProtection","getClones","getCodeFrequencyStats","getCollaboratorPermissionLevel","getCombinedStatusForRef","getCommitActivityStats","getCommitComment","getCommitSignatureProtection","getCommunityProfileMetrics","getContent","getContributorsStats","getDeployKey","getDeployment","getDeploymentStatus","getEnvironment","getLatestPagesBuild","getLatestRelease","getPages","getPagesBuild","getPagesHealthCheck","getParticipationStats","getPullRequestReviewProtection","getPunchCardStats","getReadme","getReadmeInDirectory","getRelease","getReleaseAsset","getReleaseByTag","getStatusChecksProtection","getTeamsWithAccessToProtectedBranch","getTopPaths","getTopReferrers","getUsersWithAccessToProtectedBranch","getViews","getWebhookConfigForRepo","listAutolinks","listBranches","listBranchesForHeadCommit","listCommentsForCommit","listCommitCommentsForRepo","listCommitStatusesForRef","listContributors","listDeployKeys","listDeploymentStatuses","listDeployments","listInvitations","listInvitationsForAuthenticatedUser","listLanguages","listPagesBuilds","listPullRequestsAssociatedWithCommit","listReleaseAssets","listReleases","listTagProtection","listTags","listTeams","mergeUpstream","removeAppAccessRestrictions","removeStatusCheckContexts","removeStatusCheckProtection","removeTeamAccessRestrictions","removeUserAccessRestrictions","renameBranch","replaceAllTopics","requestPagesBuild","setAdminBranchProtection","setAppAccessRestrictions","setStatusCheckContexts","setTeamAccessRestrictions","setUserAccessRestrictions","testPushWebhook","transfer","updateBranchProtection","updateCommitComment","updateInformationAboutPagesSite","updateInvitation","updatePullRequestReviewProtection","updateRelease","updateReleaseAsset","updateStatusCheckPotection","updateStatusCheckProtection","updateWebhookConfigForRepo","uploadReleaseAsset","baseUrl","search","code","commits","issuesAndPullRequests","labels","topics","users","secretScanning","listAlertsForEnterprise","listLocationsForAlert","teams","addOrUpdateMembershipForUserInOrg","addOrUpdateProjectPermissionsInOrg","addOrUpdateRepoPermissionsInOrg","checkPermissionsForProjectInOrg","checkPermissionsForRepoInOrg","createDiscussionCommentInOrg","createDiscussionInOrg","deleteDiscussionCommentInOrg","deleteDiscussionInOrg","deleteInOrg","getByName","getDiscussionCommentInOrg","getDiscussionInOrg","getMembershipForUserInOrg","listChildInOrg","listDiscussionCommentsInOrg","listDiscussionsInOrg","listMembersInOrg","listPendingInvitationsInOrg","listProjectsInOrg","listReposInOrg","removeMembershipForUserInOrg","removeProjectInOrg","removeRepoInOrg","updateDiscussionCommentInOrg","updateDiscussionInOrg","updateInOrg","addEmailForAuthenticated","addEmailForAuthenticatedUser","block","checkBlocked","checkFollowingForUser","checkPersonIsFollowedByAuthenticated","createGpgKeyForAuthenticated","createGpgKeyForAuthenticatedUser","createPublicSshKeyForAuthenticated","createPublicSshKeyForAuthenticatedUser","deleteEmailForAuthenticated","deleteEmailForAuthenticatedUser","deleteGpgKeyForAuthenticated","deleteGpgKeyForAuthenticatedUser","deletePublicSshKeyForAuthenticated","deletePublicSshKeyForAuthenticatedUser","follow","getByUsername","getContextForUser","getGpgKeyForAuthenticated","getGpgKeyForAuthenticatedUser","getPublicSshKeyForAuthenticated","getPublicSshKeyForAuthenticatedUser","listBlockedByAuthenticated","listBlockedByAuthenticatedUser","listEmailsForAuthenticated","listEmailsForAuthenticatedUser","listFollowedByAuthenticated","listFollowedByAuthenticatedUser","listFollowersForAuthenticatedUser","listFollowersForUser","listFollowingForUser","listGpgKeysForAuthenticated","listGpgKeysForAuthenticatedUser","listGpgKeysForUser","listPublicEmailsForAuthenticated","listPublicEmailsForAuthenticatedUser","listPublicKeysForUser","listPublicSshKeysForAuthenticated","listPublicSshKeysForAuthenticatedUser","setPrimaryEmailVisibilityForAuthenticated","setPrimaryEmailVisibilityForAuthenticatedUser","unblock","unfollow","updateAuthenticated","VERSION","endpointsToMethods","octokit","endpointsMap","newMethods","scope","endpoints","Object","entries","methodName","endpoint","route","defaults","decorations","method","url","split","endpointDefaults","assign","scopeMethods","decorate","request","requestWithDefaults","withDecorations","args","options","data","undefined","newScope","newMethodName","log","warn","deprecated","name","alias","restEndpointMethods","api","ENDPOINTS","rest","legacyRestEndpointMethods"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,MAAMA,SAAS,GAAG;AACdC,EAAAA,OAAO,EAAE;AACLC,IAAAA,uCAAuC,EAAE,CACrC,qDADqC,CADpC;AAILC,IAAAA,wCAAwC,EAAE,CACtC,+DADsC,CAJrC;AAOLC,IAAAA,0BAA0B,EAAE,CACxB,4EADwB,CAPvB;AAULC,IAAAA,kBAAkB,EAAE,CAChB,0DADgB,CAVf;AAaLC,IAAAA,iBAAiB,EAAE,CACf,yDADe,CAbd;AAgBLC,IAAAA,+BAA+B,EAAE,CAC7B,yFAD6B,CAhB5B;AAmBLC,IAAAA,uBAAuB,EAAE,CAAC,+CAAD,CAnBpB;AAoBLC,IAAAA,wBAAwB,EAAE,CACtB,yDADsB,CApBrB;AAuBLC,IAAAA,6BAA6B,EAAE,CAC3B,qDAD2B,CAvB1B;AA0BLC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,CA1B3B;AA6BLC,IAAAA,uBAAuB,EAAE,CAAC,+CAAD,CA7BpB;AA8BLC,IAAAA,wBAAwB,EAAE,CACtB,yDADsB,CA9BrB;AAiCLC,IAAAA,sBAAsB,EAAE,CACpB,uEADoB,CAjCnB;AAoCLC,IAAAA,sBAAsB,EAAE,CACpB,wDADoB,CApCnB;AAuCLC,IAAAA,uBAAuB,EAAE,CACrB,uDADqB,CAvCpB;AA0CLC,IAAAA,cAAc,EAAE,CACZ,8DADY,CA1CX;AA6CLC,IAAAA,uBAAuB,EAAE,CACrB,4FADqB,CA7CpB;AAgDLC,IAAAA,eAAe,EAAE,CAAC,kDAAD,CAhDZ;AAiDLC,IAAAA,gBAAgB,EAAE,CACd,4DADc,CAjDb;AAoDLC,IAAAA,6BAA6B,EAAE,CAC3B,gDAD2B,CApD1B;AAuDLC,IAAAA,8BAA8B,EAAE,CAC5B,0DAD4B,CAvD3B;AA0DLC,IAAAA,iBAAiB,EAAE,CAAC,oDAAD,CA1Dd;AA2DLC,IAAAA,qBAAqB,EAAE,CACnB,yDADmB,CA3DlB;AA8DLC,IAAAA,kDAAkD,EAAE,CAChD,qEADgD,CA9D/C;AAiELC,IAAAA,eAAe,EAAE,CACb,mEADa,CAjEZ;AAoELC,IAAAA,gBAAgB,EAAE,CACd,4EADc,CApEb;AAuELC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CAvE1B;AA0ELC,IAAAA,8BAA8B,EAAE,CAC5B,gFAD4B,CA1E3B;AA6ELC,IAAAA,uBAAuB,EAAE,CACrB,sDADqB,CA7EpB;AAgFLC,IAAAA,iDAAiD,EAAE,CAC/C,kEAD+C,CAhF9C;AAmFLC,IAAAA,cAAc,EAAE,CACZ,kEADY,CAnFX;AAsFLC,IAAAA,mBAAmB,EAAE,CAAC,0CAAD,CAtFhB;AAuFLC,IAAAA,oBAAoB,EAAE,CAAC,+CAAD,CAvFjB;AAwFLC,IAAAA,gCAAgC,EAAE,CAC9B,mDAD8B,CAxF7B;AA2FLC,IAAAA,iCAAiC,EAAE,CAC/B,mDAD+B,CA3F9B;AA8FLC,IAAAA,0BAA0B,EAAE,CAAC,qCAAD,CA9FvB;AA+FLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CA/F1B;AAkGLC,IAAAA,2BAA2B,EAAE,CACzB,gEADyB,CAlGxB;AAqGLC,IAAAA,WAAW,EAAE,CAAC,2DAAD,CArGR;AAsGLC,IAAAA,uBAAuB,EAAE,CACrB,sFADqB,CAtGpB;AAyGLC,IAAAA,oBAAoB,EAAE,CAClB,yFADkB,CAzGjB;AA4GLC,IAAAA,oDAAoD,EAAE,CAClD,4DADkD,CA5GjD;AA+GLC,IAAAA,sDAAsD,EAAE,CACpD,8CADoD,CA/GnD;AAkHLC,IAAAA,oDAAoD,EAAE,CAClD,wDADkD,CAlHjD;AAqHLC,IAAAA,uCAAuC,EAAE,CACrC,qCADqC,CArHpC;AAwHLC,IAAAA,qCAAqC,EAAE,CACnC,+CADmC,CAxHlC;AA2HLC,IAAAA,oBAAoB,EAAE,CAAC,iDAAD,CA3HjB;AA4HLC,IAAAA,eAAe,EAAE,CAAC,4CAAD,CA5HZ;AA6HLC,IAAAA,YAAY,EAAE,CAAC,+CAAD,CA7HT;AA8HLC,IAAAA,2BAA2B,EAAE,CACzB,qEADyB,CA9HxB;AAiILC,IAAAA,kBAAkB,EAAE,CAChB,+CADgB,EAEhB,EAFgB,EAGhB;AAAEC,MAAAA,OAAO,EAAE,CAAC,SAAD,EAAY,uCAAZ;AAAX,KAHgB,CAjIf;AAsILC,IAAAA,gBAAgB,EAAE,CAAC,sDAAD,CAtIb;AAuILC,IAAAA,aAAa,EAAE,CAAC,yDAAD,CAvIV;AAwILC,IAAAA,gBAAgB,EAAE,CACd,2DADc,CAxIb;AA2ILC,IAAAA,yBAAyB,EAAE,CAAC,6CAAD,CA3ItB;AA4ILC,IAAAA,0BAA0B,EAAE,CACxB,uDADwB,CA5IvB;AA+ILC,IAAAA,WAAW,EAAE,CAAC,2DAAD,CA/IR;AAgJLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CAhJ1B;AAmJLC,IAAAA,cAAc,EAAE,CAAC,iDAAD,CAnJX;AAoJLC,IAAAA,qBAAqB,EAAE,CACnB,2EADmB,CApJlB;AAuJLC,IAAAA,mBAAmB,EAAE,CACjB,wDADiB,CAvJhB;AA0JLC,IAAAA,gBAAgB,EAAE,CACd,kEADc,CA1Jb;AA6JLC,IAAAA,oBAAoB,EAAE,CAAC,6CAAD,CA7JjB;AA8JLC,IAAAA,sBAAsB,EAAE,CACpB,2EADoB,CA9JnB;AAiKLC,IAAAA,sBAAsB,EAAE,CACpB,sDADoB,CAjKnB;AAoKLC,IAAAA,6BAA6B,EAAE,CAC3B,gFAD2B,CApK1B;AAuKLC,IAAAA,mCAAmC,EAAE,CACjC,oDADiC,CAvKhC;AA0KLC,IAAAA,oCAAoC,EAAE,CAClC,8DADkC,CA1KjC;AA6KLC,IAAAA,cAAc,EAAE,CAAC,iCAAD,CA7KX;AA8KLC,IAAAA,eAAe,EAAE,CAAC,2CAAD,CA9KZ;AA+KLC,IAAAA,iBAAiB,EAAE,CAAC,6CAAD,CA/Kd;AAgLLC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CAhLzB;AAiLLC,IAAAA,6BAA6B,EAAE,CAC3B,qDAD2B,CAjL1B;AAoLLC,IAAAA,6BAA6B,EAAE,CAC3B,4DAD2B,CApL1B;AAuLLC,IAAAA,wDAAwD,EAAE,CACtD,kDADsD,CAvLrD;AA0LLC,IAAAA,2BAA2B,EAAE,CAAC,iCAAD,CA1LxB;AA2LLC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CA3LzB;AA4LLC,IAAAA,wBAAwB,EAAE,CACtB,2DADsB,CA5LrB;AA+LLC,IAAAA,gBAAgB,EAAE,CACd,gEADc,CA/Lb;AAkMLC,IAAAA,uBAAuB,EAAE,CAAC,wCAAD,CAlMpB;AAmMLC,IAAAA,sBAAsB,EAAE,CACpB,wDADoB,CAnMnB;AAsMLC,IAAAA,aAAa,EAAE,CAAC,wDAAD,CAtMV;AAuMLC,IAAAA,uBAAuB,EAAE,CACrB,oEADqB,CAvMpB;AA0MLC,IAAAA,+CAA+C,EAAE,CAC7C,uDAD6C,CA1M5C;AA6MLC,IAAAA,gDAAgD,EAAE,CAC9C,iEAD8C,CA7M7C;AAgNLC,IAAAA,2CAA2C,EAAE,CACzC,8DADyC,CAhNxC;AAmNLC,IAAAA,4CAA4C,EAAE,CAC1C,wEAD0C,CAnNzC;AAsNLC,IAAAA,+BAA+B,EAAE,CAC7B,+EAD6B,CAtN5B;AAyNLC,IAAAA,8BAA8B,EAAE,CAC5B,sEAD4B,CAzN3B;AA4NLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CA5N1B;AA+NLC,IAAAA,2BAA2B,EAAE,CACzB,gEADyB,CA/NxB;AAkOLC,IAAAA,wCAAwC,EAAE,CACtC,oDADsC,CAlOrC;AAqOLC,IAAAA,yCAAyC,EAAE,CACvC,8DADuC,CArOtC;AAwOLC,IAAAA,oDAAoD,EAAE,CAClD,4DADkD,CAxOjD;AA2OLC,IAAAA,sDAAsD,EAAE,CACpD,8CADoD,CA3OnD;AA8OLC,IAAAA,oDAAoD,EAAE,CAClD,wDADkD,CA9OjD;AAiPLC,IAAAA,uCAAuC,EAAE,CACrC,qCADqC,CAjPpC;AAoPLC,IAAAA,qCAAqC,EAAE,CACnC,+CADmC,CApPlC;AAuPLC,IAAAA,4BAA4B,EAAE,CAC1B,4DAD0B,CAvPzB;AA0PLC,IAAAA,uDAAuD,EAAE,CACrD,kDADqD,CA1PpD;AA6PLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B;AA7P1B,GADK;AAkQdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,qCAAqC,EAAE,CAAC,kCAAD,CADjC;AAENC,IAAAA,sBAAsB,EAAE,CAAC,2CAAD,CAFlB;AAGNC,IAAAA,wBAAwB,EAAE,CACtB,wDADsB,CAHpB;AAMNC,IAAAA,QAAQ,EAAE,CAAC,YAAD,CANJ;AAONC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAPf;AAQNC,IAAAA,SAAS,EAAE,CAAC,wCAAD,CARL;AASNC,IAAAA,yCAAyC,EAAE,CACvC,qDADuC,CATrC;AAYNC,IAAAA,8BAA8B,EAAE,CAAC,8BAAD,CAZ1B;AAaNC,IAAAA,qCAAqC,EAAE,CAAC,oBAAD,CAbjC;AAcNC,IAAAA,iCAAiC,EAAE,CAC/B,yCAD+B,CAd7B;AAiBNC,IAAAA,gBAAgB,EAAE,CAAC,aAAD,CAjBZ;AAkBNC,IAAAA,8BAA8B,EAAE,CAAC,qCAAD,CAlB1B;AAmBNC,IAAAA,uBAAuB,EAAE,CAAC,qCAAD,CAnBnB;AAoBNC,IAAAA,mBAAmB,EAAE,CAAC,wBAAD,CApBf;AAqBNC,IAAAA,yBAAyB,EAAE,CAAC,uCAAD,CArBrB;AAsBNC,IAAAA,+BAA+B,EAAE,CAC7B,8CAD6B,CAtB3B;AAyBNC,IAAAA,cAAc,EAAE,CAAC,kCAAD,CAzBV;AA0BNC,IAAAA,yCAAyC,EAAE,CACvC,yCADuC,CA1BrC;AA6BNC,IAAAA,mCAAmC,EAAE,CAAC,mBAAD,CA7B/B;AA8BNC,IAAAA,sBAAsB,EAAE,CAAC,+BAAD,CA9BlB;AA+BNC,IAAAA,sBAAsB,EAAE,CAAC,qCAAD,CA/BlB;AAgCNC,IAAAA,qBAAqB,EAAE,CAAC,sCAAD,CAhCjB;AAiCNC,IAAAA,oCAAoC,EAAE,CAAC,yBAAD,CAjChC;AAkCNC,IAAAA,mBAAmB,EAAE,CAAC,uCAAD,CAlCf;AAmCNC,IAAAA,uBAAuB,EAAE,CAAC,oBAAD,CAnCnB;AAoCNC,IAAAA,2BAA2B,EAAE,CAAC,yCAAD,CApCvB;AAqCNC,IAAAA,gBAAgB,EAAE,CAAC,0CAAD,CArCZ;AAsCNC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAtCf;AAuCNC,IAAAA,qBAAqB,EAAE,CACnB,qDADmB,CAvCjB;AA0CNC,IAAAA,4BAA4B,EAAE,CAAC,kCAAD,CA1CxB;AA2CNC,IAAAA,8BAA8B,EAAE,CAAC,qCAAD;AA3C1B,GAlQI;AA+SdC,EAAAA,IAAI,EAAE;AACFC,IAAAA,qBAAqB,EAAE,CACnB,wEADmB,EAEnB,EAFmB,EAGnB;AAAEpF,MAAAA,OAAO,EAAE,CAAC,MAAD,EAAS,2CAAT;AAAX,KAHmB,CADrB;AAMFqF,IAAAA,yCAAyC,EAAE,CACvC,wEADuC,CANzC;AASFC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CATV;AAUFC,IAAAA,kBAAkB,EAAE,CAAC,wCAAD,CAVlB;AAWFC,IAAAA,6BAA6B,EAAE,CAC3B,yDAD2B,CAX7B;AAcFC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAdnB;AAeFC,IAAAA,kBAAkB,EAAE,CAAC,6CAAD,CAflB;AAgBFC,IAAAA,WAAW,EAAE,CAAC,wCAAD,CAhBX;AAiBFC,IAAAA,gBAAgB,EAAE,CAAC,UAAD,CAjBhB;AAkBFC,IAAAA,SAAS,EAAE,CAAC,sBAAD,CAlBT;AAmBFC,IAAAA,eAAe,EAAE,CAAC,0CAAD,CAnBf;AAoBFC,IAAAA,kBAAkB,EAAE,CAAC,8BAAD,CApBlB;AAqBFC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CArBnB;AAsBFC,IAAAA,6BAA6B,EAAE,CAC3B,gDAD2B,CAtB7B;AAyBFC,IAAAA,oCAAoC,EAAE,CAClC,wDADkC,CAzBpC;AA4BFC,IAAAA,mBAAmB,EAAE,CAAC,oCAAD,CA5BnB;AA6BFC,IAAAA,sBAAsB,EAAE,CAAC,sBAAD,CA7BtB;AA8BFC,IAAAA,kBAAkB,EAAE,CAAC,wCAAD,CA9BlB;AA+BFC,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CA/BnB;AAgCFC,IAAAA,0BAA0B,EAAE,CACxB,2DADwB,CAhC1B;AAmCFC,IAAAA,yCAAyC,EAAE,CACvC,wDADuC,CAnCzC;AAsCFC,IAAAA,iBAAiB,EAAE,CAAC,wBAAD,CAtCjB;AAuCFC,IAAAA,qCAAqC,EAAE,CAAC,yBAAD,CAvCrC;AAwCFC,IAAAA,SAAS,EAAE,CAAC,gCAAD,CAxCT;AAyCFC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CAzChB;AA0CFC,IAAAA,iCAAiC,EAAE,CAAC,gCAAD,CA1CjC;AA2CFC,IAAAA,qCAAqC,EAAE,CAAC,iCAAD,CA3CrC;AA4CFC,IAAAA,4CAA4C,EAAE,CAC1C,yCAD0C,CA5C5C;AA+CFC,IAAAA,qBAAqB,EAAE,CAAC,0BAAD,CA/CrB;AAgDFC,IAAAA,wBAAwB,EAAE,CACtB,kDADsB,CAhDxB;AAmDFC,IAAAA,0BAA0B,EAAE,CACxB,2EADwB,EAExB,EAFwB,EAGxB;AAAElH,MAAAA,OAAO,EAAE,CAAC,MAAD,EAAS,gDAAT;AAAX,KAHwB,CAnD1B;AAwDFmH,IAAAA,8CAA8C,EAAE,CAC5C,2EAD4C,CAxD9C;AA2DFC,IAAAA,UAAU,EAAE,CAAC,uCAAD,CA3DV;AA4DFC,IAAAA,6BAA6B,EAAE,CAAC,4BAAD,CA5D7B;AA6DFC,IAAAA,UAAU,EAAE,CAAC,6CAAD,CA7DV;AA8DFC,IAAAA,mBAAmB,EAAE,CAAC,oDAAD,CA9DnB;AA+DFC,IAAAA,qBAAqB,EAAE,CACnB,uDADmB,CA/DrB;AAkEFC,IAAAA,yBAAyB,EAAE,CAAC,wBAAD;AAlEzB,GA/SQ;AAmXdC,EAAAA,OAAO,EAAE;AACLC,IAAAA,0BAA0B,EAAE,CAAC,0CAAD,CADvB;AAELC,IAAAA,2BAA2B,EAAE,CACzB,gDADyB,CAFxB;AAKLC,IAAAA,mCAAmC,EAAE,CACjC,kEADiC,CALhC;AAQLC,IAAAA,mCAAmC,EAAE,CACjC,oDADiC,CARhC;AAWLC,IAAAA,2BAA2B,EAAE,CAAC,2CAAD,CAXxB;AAYLC,IAAAA,4BAA4B,EAAE,CAC1B,iDAD0B,CAZzB;AAeLC,IAAAA,0BAA0B,EAAE,CACxB,iDADwB,CAfvB;AAkBLC,IAAAA,2BAA2B,EAAE,CACzB,uDADyB;AAlBxB,GAnXK;AAyYdC,EAAAA,MAAM,EAAE;AACJC,IAAAA,MAAM,EAAE,CAAC,uCAAD,CADJ;AAEJC,IAAAA,WAAW,EAAE,CAAC,yCAAD,CAFT;AAGJC,IAAAA,GAAG,EAAE,CAAC,qDAAD,CAHD;AAIJC,IAAAA,QAAQ,EAAE,CAAC,yDAAD,CAJN;AAKJC,IAAAA,eAAe,EAAE,CACb,iEADa,CALb;AAQJC,IAAAA,UAAU,EAAE,CAAC,oDAAD,CARR;AASJC,IAAAA,YAAY,EAAE,CACV,oEADU,CATV;AAYJC,IAAAA,gBAAgB,EAAE,CAAC,sDAAD,CAZd;AAaJC,IAAAA,YAAY,EAAE,CACV,gEADU,CAbV;AAgBJC,IAAAA,cAAc,EAAE,CACZ,oEADY,CAhBZ;AAmBJC,IAAAA,oBAAoB,EAAE,CAClB,sDADkB,CAnBlB;AAsBJC,IAAAA,MAAM,EAAE,CAAC,uDAAD;AAtBJ,GAzYM;AAiadC,EAAAA,YAAY,EAAE;AACVC,IAAAA,cAAc,EAAE,CACZ,oFADY,CADN;AAIVC,IAAAA,QAAQ,EAAE,CACN,+DADM,EAEN,EAFM,EAGN;AAAEC,MAAAA,iBAAiB,EAAE;AAAEC,QAAAA,QAAQ,EAAE;AAAZ;AAArB,KAHM,CAJA;AASVC,IAAAA,WAAW,EAAE,CACT,gEADS,CATH;AAYVC,IAAAA,QAAQ,EAAE,CAAC,2DAAD,CAZA;AAaVC,IAAAA,kBAAkB,EAAE,CAChB,yEADgB,CAbV;AAgBVC,IAAAA,gBAAgB,EAAE,CAAC,sCAAD,CAhBR;AAiBVC,IAAAA,iBAAiB,EAAE,CAAC,gDAAD,CAjBT;AAkBVC,IAAAA,mBAAmB,EAAE,CACjB,yEADiB,EAEjB,EAFiB,EAGjB;AAAE1J,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,oBAAjB;AAAX,KAHiB,CAlBX;AAuBV2J,IAAAA,kBAAkB,EAAE,CAAC,kDAAD,CAvBV;AAwBVC,IAAAA,WAAW,EAAE,CACT,iEADS,CAxBH;AA2BVC,IAAAA,WAAW,EAAE,CAAC,iDAAD;AA3BH,GAjaA;AA8bdC,EAAAA,cAAc,EAAE;AACZC,IAAAA,oBAAoB,EAAE,CAAC,uBAAD,CADV;AAEZC,IAAAA,cAAc,EAAE,CAAC,6BAAD;AAFJ,GA9bF;AAkcdC,EAAAA,UAAU,EAAE;AACRC,IAAAA,0CAA0C,EAAE,CACxC,yEADwC,CADpC;AAIRC,IAAAA,qCAAqC,EAAE,CACnC,gDADmC,CAJ/B;AAORC,IAAAA,0BAA0B,EAAE,CAAC,uBAAD,CAPpB;AAQRhN,IAAAA,wBAAwB,EAAE,CACtB,4DADsB,CARlB;AAWRiN,IAAAA,wCAAwC,EAAE,CACtC,4CADsC,CAXlC;AAcRC,IAAAA,gCAAgC,EAAE,CAC9B,2DAD8B,CAd1B;AAiBRC,IAAAA,kCAAkC,EAAE,CAChC,uCADgC,CAjB5B;AAoBRC,IAAAA,0BAA0B,EAAE,CAAC,0CAAD,CApBpB;AAqBRC,IAAAA,sBAAsB,EAAE,CACpB,mEADoB,CArBhB;AAwBR1M,IAAAA,gBAAgB,EAAE,CACd,+DADc,CAxBV;AA2BR2M,IAAAA,gCAAgC,EAAE,CAC9B,+CAD8B,CA3B1B;AA8BRC,IAAAA,0BAA0B,EAAE,CACxB,gDADwB,CA9BpB;AAiCRC,IAAAA,oCAAoC,EAAE,CAClC,2DADkC,CAjC9B;AAoCRC,IAAAA,uBAAuB,EAAE,CAAC,uCAAD,CApCjB;AAqCRC,IAAAA,gCAAgC,EAAE,CAC9B,yCAD8B,CArC1B;AAwCR7K,IAAAA,gBAAgB,EAAE,CACd,yDADc,CAxCV;AA2CRC,IAAAA,aAAa,EAAE,CACX,4DADW,CA3CP;AA8CR6K,IAAAA,6BAA6B,EAAE,CAC3B,4CAD2B,CA9CvB;AAiDRC,IAAAA,iDAAiD,EAAE,CAC/C,oDAD+C,CAjD3C;AAoDRC,IAAAA,wBAAwB,EAAE,CAAC,sBAAD,CApDlB;AAqDRC,IAAAA,kBAAkB,EAAE,CAChB,4BADgB,EAEhB,EAFgB,EAGhB;AAAE/B,MAAAA,iBAAiB,EAAE;AAAEgC,QAAAA,MAAM,EAAE;AAAV;AAArB,KAHgB,CArDZ;AA0DRC,IAAAA,oCAAoC,EAAE,CAClC,sCADkC,CA1D9B;AA6DRjK,IAAAA,eAAe,EAAE,CAAC,8CAAD,CA7DT;AA8DRkK,IAAAA,6CAA6C,EAAE,CAC3C,yDAD2C,CA9DvC;AAiERC,IAAAA,+BAA+B,EAAE,CAAC,8BAAD,CAjEzB;AAkERC,IAAAA,6CAA6C,EAAE,CAC3C,4EAD2C,CAlEvC;AAqERC,IAAAA,gCAAgC,EAAE,CAC9B,+CAD8B,CArE1B;AAwERC,IAAAA,4CAA4C,EAAE,CAC1C,yDAD0C,CAxEtC;AA2ERC,IAAAA,yBAAyB,EAAE,CAAC,8CAAD,CA3EnB;AA4ERC,IAAAA,wBAAwB,EAAE,CAAC,6CAAD,CA5ElB;AA6ERC,IAAAA,kBAAkB,EAAE,CAChB,sEADgB,CA7EZ;AAgFRC,IAAAA,0BAA0B,EAAE,CAAC,yCAAD;AAhFpB,GAlcE;AAohBdC,EAAAA,UAAU,EAAE;AACR/O,IAAAA,0BAA0B,EAAE,CACxB,+EADwB,CADpB;AAIRI,IAAAA,uBAAuB,EAAE,CACrB,kDADqB,CAJjB;AAORC,IAAAA,wBAAwB,EAAE,CACtB,4DADsB,CAPlB;AAURU,IAAAA,eAAe,EAAE,CAAC,qDAAD,CAVT;AAWRC,IAAAA,gBAAgB,EAAE,CACd,+DADc,CAXV;AAcR6B,IAAAA,eAAe,EAAE,CAAC,+CAAD,CAdT;AAeRC,IAAAA,YAAY,EAAE,CAAC,kDAAD,CAfN;AAgBRI,IAAAA,gBAAgB,EAAE,CACd,yDADc,CAhBV;AAmBRC,IAAAA,aAAa,EAAE,CACX,4DADW,CAnBP;AAsBRgB,IAAAA,cAAc,EAAE,CAAC,oCAAD,CAtBR;AAuBRC,IAAAA,eAAe,EAAE,CAAC,8CAAD,CAvBT;AAwBRI,IAAAA,6BAA6B,EAAE,CAC3B,+DAD2B,CAxBvB;AA2BRc,IAAAA,+BAA+B,EAAE,CAC7B,kFAD6B,CA3BzB;AA8BRW,IAAAA,4BAA4B,EAAE,CAC1B,+DAD0B;AA9BtB,GAphBE;AAsjBd+I,EAAAA,eAAe,EAAE;AACbC,IAAAA,wBAAwB,EAAE,CACtB,uDADsB,CADb;AAIbC,IAAAA,SAAS,EAAE,CACP,+DADO;AAJE,GAtjBH;AA8jBdC,EAAAA,MAAM,EAAE;AAAE5D,IAAAA,GAAG,EAAE,CAAC,aAAD;AAAP,GA9jBM;AA+jBd6D,EAAAA,eAAe,EAAE;AACbC,IAAAA,8CAA8C,EAAE,CAC5C,mEAD4C,CADnC;AAIbC,IAAAA,kDAAkD,EAAE,CAChD,6EADgD,CAJvC;AAObC,IAAAA,iDAAiD,EAAE,CAC/C,0EAD+C,CAPtC;AAUbC,IAAAA,2BAA2B,EAAE,CACzB,oEADyB,CAVhB;AAabC,IAAAA,qCAAqC,EAAE,CACnC,mDADmC,CAb1B;AAgBbC,IAAAA,mBAAmB,EAAE,CACjB,oEADiB,CAhBR;AAmBbC,IAAAA,0CAA0C,EAAE,CACxC,kEADwC,CAnB/B;AAsBbC,IAAAA,uDAAuD,EAAE,CACrD,iEADqD,CAtB5C;AAyBbC,IAAAA,sDAAsD,EAAE,CACpD,qEADoD,CAzB3C;AA4BbC,IAAAA,kDAAkD,EAAE,CAChD,4EADgD,CA5BvC;AA+BbC,IAAAA,2BAA2B,EAAE,CACzB,oEADyB,CA/BhB;AAkCbC,IAAAA,+CAA+C,EAAE,CAC7C,kEAD6C,CAlCpC;AAqCbC,IAAAA,qCAAqC,EAAE,CACnC,mDADmC,CArC1B;AAwCbC,IAAAA,sDAAsD,EAAE,CACpD,iEADoD;AAxC3C,GA/jBH;AA2mBdC,EAAAA,KAAK,EAAE;AACHC,IAAAA,cAAc,EAAE,CAAC,2BAAD,CADb;AAEH/E,IAAAA,MAAM,EAAE,CAAC,aAAD,CAFL;AAGHgF,IAAAA,aAAa,EAAE,CAAC,gCAAD,CAHZ;AAIHC,IAAAA,MAAM,EAAE,CAAC,yBAAD,CAJL;AAKHC,IAAAA,aAAa,EAAE,CAAC,+CAAD,CALZ;AAMHC,IAAAA,IAAI,EAAE,CAAC,6BAAD,CANH;AAOHjF,IAAAA,GAAG,EAAE,CAAC,sBAAD,CAPF;AAQHkF,IAAAA,UAAU,EAAE,CAAC,4CAAD,CART;AASHC,IAAAA,WAAW,EAAE,CAAC,4BAAD,CATV;AAUHC,IAAAA,IAAI,EAAE,CAAC,YAAD,CAVH;AAWHC,IAAAA,YAAY,EAAE,CAAC,+BAAD,CAXX;AAYHC,IAAAA,WAAW,EAAE,CAAC,8BAAD,CAZV;AAaHC,IAAAA,WAAW,EAAE,CAAC,6BAAD,CAbV;AAcHC,IAAAA,SAAS,EAAE,CAAC,4BAAD,CAdR;AAeHC,IAAAA,UAAU,EAAE,CAAC,mBAAD,CAfT;AAgBHC,IAAAA,WAAW,EAAE,CAAC,oBAAD,CAhBV;AAiBHC,IAAAA,IAAI,EAAE,CAAC,2BAAD,CAjBH;AAkBHC,IAAAA,MAAM,EAAE,CAAC,8BAAD,CAlBL;AAmBHnF,IAAAA,MAAM,EAAE,CAAC,wBAAD,CAnBL;AAoBHoF,IAAAA,aAAa,EAAE,CAAC,8CAAD;AApBZ,GA3mBO;AAioBdC,EAAAA,GAAG,EAAE;AACDC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CADX;AAEDC,IAAAA,YAAY,EAAE,CAAC,wCAAD,CAFb;AAGDC,IAAAA,SAAS,EAAE,CAAC,qCAAD,CAHV;AAIDC,IAAAA,SAAS,EAAE,CAAC,qCAAD,CAJV;AAKDC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CALX;AAMDC,IAAAA,SAAS,EAAE,CAAC,6CAAD,CANV;AAODC,IAAAA,OAAO,EAAE,CAAC,gDAAD,CAPR;AAQDC,IAAAA,SAAS,EAAE,CAAC,oDAAD,CARV;AASDC,IAAAA,MAAM,EAAE,CAAC,yCAAD,CATP;AAUDC,IAAAA,MAAM,EAAE,CAAC,8CAAD,CAVP;AAWDC,IAAAA,OAAO,EAAE,CAAC,gDAAD,CAXR;AAYDC,IAAAA,gBAAgB,EAAE,CAAC,mDAAD,CAZjB;AAaDC,IAAAA,SAAS,EAAE,CAAC,4CAAD;AAbV,GAjoBS;AAgpBdC,EAAAA,SAAS,EAAE;AACPC,IAAAA,eAAe,EAAE,CAAC,0BAAD,CADV;AAEPC,IAAAA,WAAW,EAAE,CAAC,iCAAD;AAFN,GAhpBG;AAopBdC,EAAAA,YAAY,EAAE;AACVC,IAAAA,mCAAmC,EAAE,CAAC,8BAAD,CAD3B;AAEVC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CAFb;AAGVC,IAAAA,sBAAsB,EAAE,CAAC,8CAAD,CAHd;AAIVC,IAAAA,iCAAiC,EAAE,CAC/B,8BAD+B,EAE/B,EAF+B,EAG/B;AAAEzP,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,qCAAjB;AAAX,KAH+B,CAJzB;AASV0P,IAAAA,sCAAsC,EAAE,CAAC,iCAAD,CAT9B;AAUVC,IAAAA,wBAAwB,EAAE,CAAC,uCAAD,CAVhB;AAWVC,IAAAA,yBAAyB,EAAE,CACvB,iDADuB,CAXjB;AAcVC,IAAAA,oCAAoC,EAAE,CAClC,iCADkC,EAElC,EAFkC,EAGlC;AAAE7P,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,wCAAjB;AAAX,KAHkC,CAd5B;AAmBV8P,IAAAA,mCAAmC,EAAE,CAAC,8BAAD,CAnB3B;AAoBVC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CApBb;AAqBVC,IAAAA,sBAAsB,EAAE,CAAC,8CAAD,CArBd;AAsBVC,IAAAA,iCAAiC,EAAE,CAC/B,8BAD+B,EAE/B,EAF+B,EAG/B;AAAEjQ,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,qCAAjB;AAAX,KAH+B;AAtBzB,GAppBA;AAgrBdkQ,EAAAA,MAAM,EAAE;AACJC,IAAAA,YAAY,EAAE,CACV,4DADU,CADV;AAIJC,IAAAA,SAAS,EAAE,CAAC,yDAAD,CAJP;AAKJC,IAAAA,sBAAsB,EAAE,CAAC,gDAAD,CALpB;AAMJjI,IAAAA,MAAM,EAAE,CAAC,mCAAD,CANJ;AAOJgF,IAAAA,aAAa,EAAE,CACX,2DADW,CAPX;AAUJkD,IAAAA,WAAW,EAAE,CAAC,mCAAD,CAVT;AAWJC,IAAAA,eAAe,EAAE,CAAC,uCAAD,CAXb;AAYJjD,IAAAA,aAAa,EAAE,CACX,2DADW,CAZX;AAeJkD,IAAAA,WAAW,EAAE,CAAC,4CAAD,CAfT;AAgBJC,IAAAA,eAAe,EAAE,CACb,4DADa,CAhBb;AAmBJnI,IAAAA,GAAG,EAAE,CAAC,iDAAD,CAnBD;AAoBJkF,IAAAA,UAAU,EAAE,CAAC,wDAAD,CApBR;AAqBJkD,IAAAA,QAAQ,EAAE,CAAC,oDAAD,CArBN;AAsBJC,IAAAA,QAAQ,EAAE,CAAC,yCAAD,CAtBN;AAuBJC,IAAAA,YAAY,EAAE,CAAC,yDAAD,CAvBV;AAwBJlD,IAAAA,IAAI,EAAE,CAAC,aAAD,CAxBF;AAyBJmD,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAzBX;AA0BJlD,IAAAA,YAAY,EAAE,CAAC,0DAAD,CA1BV;AA2BJmD,IAAAA,mBAAmB,EAAE,CAAC,2CAAD,CA3BjB;AA4BJC,IAAAA,UAAU,EAAE,CAAC,wDAAD,CA5BR;AA6BJC,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CA7Bf;AA8BJC,IAAAA,qBAAqB,EAAE,CACnB,0DADmB,CA9BnB;AAiCJhG,IAAAA,wBAAwB,EAAE,CAAC,kBAAD,CAjCtB;AAkCJiG,IAAAA,UAAU,EAAE,CAAC,wBAAD,CAlCR;AAmCJC,IAAAA,WAAW,EAAE,CAAC,kCAAD,CAnCT;AAoCJC,IAAAA,sBAAsB,EAAE,CACpB,gEADoB,CApCpB;AAuCJC,IAAAA,iBAAiB,EAAE,CAAC,kCAAD,CAvCf;AAwCJC,IAAAA,iBAAiB,EAAE,CACf,wDADe,CAxCf;AA2CJC,IAAAA,cAAc,EAAE,CAAC,sCAAD,CA3CZ;AA4CJC,IAAAA,IAAI,EAAE,CAAC,sDAAD,CA5CF;AA6CJC,IAAAA,eAAe,EAAE,CACb,2DADa,CA7Cb;AAgDJC,IAAAA,eAAe,EAAE,CACb,8DADa,CAhDb;AAmDJC,IAAAA,WAAW,EAAE,CACT,kEADS,CAnDT;AAsDJC,IAAAA,SAAS,EAAE,CAAC,wDAAD,CAtDP;AAuDJC,IAAAA,MAAM,EAAE,CAAC,yDAAD,CAvDJ;AAwDJ9I,IAAAA,MAAM,EAAE,CAAC,mDAAD,CAxDJ;AAyDJoF,IAAAA,aAAa,EAAE,CAAC,0DAAD,CAzDX;AA0DJ2D,IAAAA,WAAW,EAAE,CAAC,2CAAD,CA1DT;AA2DJC,IAAAA,eAAe,EAAE,CACb,2DADa;AA3Db,GAhrBM;AA+uBdC,EAAAA,QAAQ,EAAE;AACN1J,IAAAA,GAAG,EAAE,CAAC,yBAAD,CADC;AAEN2J,IAAAA,kBAAkB,EAAE,CAAC,eAAD,CAFd;AAGNC,IAAAA,UAAU,EAAE,CAAC,mCAAD;AAHN,GA/uBI;AAovBdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,MAAM,EAAE,CAAC,gBAAD,CADF;AAENC,IAAAA,SAAS,EAAE,CACP,oBADO,EAEP;AAAEC,MAAAA,OAAO,EAAE;AAAE,wBAAgB;AAAlB;AAAX,KAFO;AAFL,GApvBI;AA2vBdC,EAAAA,IAAI,EAAE;AACFjK,IAAAA,GAAG,EAAE,CAAC,WAAD,CADH;AAEFkK,IAAAA,UAAU,EAAE,CAAC,cAAD,CAFV;AAGFC,IAAAA,MAAM,EAAE,CAAC,UAAD,CAHN;AAIFC,IAAAA,IAAI,EAAE,CAAC,OAAD;AAJJ,GA3vBQ;AAiwBdC,EAAAA,UAAU,EAAE;AACRC,IAAAA,YAAY,EAAE,CAAC,qCAAD,CADN;AAERC,IAAAA,iCAAiC,EAAE,CAC/B,gDAD+B,CAF3B;AAKRC,IAAAA,mBAAmB,EAAE,CACjB,sDADiB,CALb;AAQRC,IAAAA,qBAAqB,EAAE,CACnB,mDADmB,CARf;AAWRC,IAAAA,8BAA8B,EAAE,CAC5B,6CAD4B,CAXxB;AAcRC,IAAAA,gBAAgB,EAAE,CAAC,0CAAD,CAdV;AAeRC,IAAAA,eAAe,EAAE,CAAC,kCAAD,CAfT;AAgBRC,IAAAA,aAAa,EAAE,CAAC,8CAAD,CAhBP;AAiBRC,IAAAA,6BAA6B,EAAE,CAAC,qCAAD,CAjBvB;AAkBRC,IAAAA,eAAe,EAAE,CAAC,2CAAD,CAlBT;AAmBRpI,IAAAA,wBAAwB,EAAE,CAAC,sBAAD,CAnBlB;AAoBRiG,IAAAA,UAAU,EAAE,CAAC,4BAAD,CApBJ;AAqBRoC,IAAAA,6BAA6B,EAAE,CAC3B,kDAD2B,CArBvB;AAwBRC,IAAAA,eAAe,EAAE,CAAC,wDAAD,CAxBT;AAyBRC,IAAAA,gBAAgB,EAAE,CACd,kDADc,EAEd,EAFc,EAGd;AAAExT,MAAAA,OAAO,EAAE,CAAC,YAAD,EAAe,+BAAf;AAAX,KAHc,CAzBV;AA8BRyT,IAAAA,eAAe,EAAE,CAAC,wDAAD,CA9BT;AA+BRC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CA/BV;AAgCRhI,IAAAA,yBAAyB,EAAE,CAAC,uBAAD,CAhCnB;AAiCRiI,IAAAA,WAAW,EAAE,CAAC,6BAAD,CAjCL;AAkCRC,IAAAA,WAAW,EAAE,CAAC,kCAAD,CAlCL;AAmCRC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,CAnCxB;AAsCRC,IAAAA,gBAAgB,EAAE,CACd,qEADc,CAtCV;AAyCRC,IAAAA,YAAY,EAAE,CAAC,oCAAD;AAzCN,GAjwBE;AA4yBdC,EAAAA,IAAI,EAAE;AACFC,IAAAA,SAAS,EAAE,CAAC,mCAAD,CADT;AAEFC,IAAAA,gBAAgB,EAAE,CAAC,gDAAD,CAFhB;AAGFC,IAAAA,gBAAgB,EAAE,CAAC,mCAAD,CAHhB;AAIFC,IAAAA,sBAAsB,EAAE,CAAC,oCAAD,CAJtB;AAKFC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CAL5B;AAMFC,IAAAA,kCAAkC,EAAE,CAChC,kDADgC,CANlC;AASFC,IAAAA,gBAAgB,EAAE,CAAC,8BAAD,CAThB;AAUFC,IAAAA,aAAa,EAAE,CAAC,wBAAD,CAVb;AAWFC,IAAAA,aAAa,EAAE,CAAC,oCAAD,CAXb;AAYFnM,IAAAA,GAAG,EAAE,CAAC,iBAAD,CAZH;AAaFoM,IAAAA,iCAAiC,EAAE,CAAC,kCAAD,CAbjC;AAcFC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAdpB;AAeFC,IAAAA,UAAU,EAAE,CAAC,iCAAD,CAfV;AAgBFC,IAAAA,sBAAsB,EAAE,CAAC,wCAAD,CAhBtB;AAiBFxO,IAAAA,kBAAkB,EAAE,CAChB,0DADgB,CAjBlB;AAoBFqH,IAAAA,IAAI,EAAE,CAAC,oBAAD,CApBJ;AAqBFoH,IAAAA,oBAAoB,EAAE,CAAC,+BAAD,CArBpB;AAsBFC,IAAAA,gBAAgB,EAAE,CAAC,wBAAD,CAtBhB;AAuBFC,IAAAA,eAAe,EAAE,CAAC,mDAAD,CAvBf;AAwBFC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CAxBrB;AAyBFhK,IAAAA,wBAAwB,EAAE,CAAC,gBAAD,CAzBxB;AA0BF4C,IAAAA,WAAW,EAAE,CAAC,4BAAD,CA1BX;AA2BFqH,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CA3BnB;AA4BFC,IAAAA,WAAW,EAAE,CAAC,yBAAD,CA5BX;AA6BFC,IAAAA,mCAAmC,EAAE,CAAC,4BAAD,CA7BnC;AA8BFC,IAAAA,wBAAwB,EAAE,CAAC,uCAAD,CA9BxB;AA+BFC,IAAAA,sBAAsB,EAAE,CAAC,6BAAD,CA/BtB;AAgCFC,IAAAA,iBAAiB,EAAE,CAAC,gCAAD,CAhCjB;AAiCFvO,IAAAA,qBAAqB,EAAE,CAAC,4CAAD,CAjCrB;AAkCFwO,IAAAA,YAAY,EAAE,CAAC,uBAAD,CAlCZ;AAmCFC,IAAAA,WAAW,EAAE,CAAC,wCAAD,CAnCX;AAoCFxO,IAAAA,wBAAwB,EAAE,CACtB,oEADsB,CApCxB;AAuCFyO,IAAAA,YAAY,EAAE,CAAC,uCAAD,CAvCZ;AAwCFC,IAAAA,uBAAuB,EAAE,CAAC,2CAAD,CAxCvB;AAyCFC,IAAAA,yBAAyB,EAAE,CACvB,qDADuB,CAzCzB;AA4CFC,IAAAA,0CAA0C,EAAE,CACxC,8CADwC,CA5C1C;AA+CFC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CA/CpB;AAgDFC,IAAAA,uCAAuC,EAAE,CACrC,2CADqC,CAhDvC;AAmDFC,IAAAA,WAAW,EAAE,CAAC,sCAAD,CAnDX;AAoDFjN,IAAAA,MAAM,EAAE,CAAC,mBAAD,CApDN;AAqDFkN,IAAAA,oCAAoC,EAAE,CAClC,oCADkC,CArDpC;AAwDFC,IAAAA,aAAa,EAAE,CAAC,mCAAD,CAxDb;AAyDFC,IAAAA,yBAAyB,EAAE,CAAC,0CAAD;AAzDzB,GA5yBQ;AAu2BdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,iCAAiC,EAAE,CAC/B,qDAD+B,CAD7B;AAINC,IAAAA,mBAAmB,EAAE,CACjB,2DADiB,CAJf;AAONC,IAAAA,oBAAoB,EAAE,CAClB,iEADkB,CAPhB;AAUNC,IAAAA,wCAAwC,EAAE,CACtC,mFADsC,CAVpC;AAaNC,IAAAA,0BAA0B,EAAE,CACxB,yFADwB,CAbtB;AAgBNC,IAAAA,2BAA2B,EAAE,CACzB,+FADyB,CAhBvB;AAmBNC,IAAAA,4CAA4C,EAAE,CAC1C,iEAD0C,EAE1C,EAF0C,EAG1C;AAAE3W,MAAAA,OAAO,EAAE,CAAC,UAAD,EAAa,2CAAb;AAAX,KAH0C,CAnBxC;AAwBN4W,IAAAA,2DAA2D,EAAE,CACzD,2DADyD,EAEzD,EAFyD,EAGzD;AACI5W,MAAAA,OAAO,EAAE,CACL,UADK,EAEL,yDAFK;AADb,KAHyD,CAxBvD;AAkCN6W,IAAAA,uDAAuD,EAAE,CACrD,2DADqD,CAlCnD;AAqCNC,IAAAA,yCAAyC,EAAE,CACvC,iEADuC,CArCrC;AAwCNC,IAAAA,0CAA0C,EAAE,CACxC,uEADwC,CAxCtC;AA2CNC,IAAAA,8BAA8B,EAAE,CAC5B,kDAD4B,CA3C1B;AA8CNC,IAAAA,yBAAyB,EAAE,CACvB,wDADuB,CA9CrB;AAiDNC,IAAAA,iBAAiB,EAAE,CACf,8DADe,CAjDb;AAoDNC,IAAAA,qCAAqC,EAAE,CACnC,gFADmC,CApDjC;AAuDNC,IAAAA,gCAAgC,EAAE,CAC9B,sFAD8B,CAvD5B;AA0DNC,IAAAA,wBAAwB,EAAE,CACtB,4FADsB,CA1DpB;AA6DNC,IAAAA,gCAAgC,EAAE,CAAC,oBAAD,CA7D5B;AA8DNC,IAAAA,2BAA2B,EAAE,CAAC,0BAAD,CA9DvB;AA+DNC,IAAAA,mBAAmB,EAAE,CAAC,gCAAD,CA/Df;AAgENC,IAAAA,kCAAkC,EAAE,CAChC,mEADgC,CAhE9B;AAmENC,IAAAA,oBAAoB,EAAE,CAClB,yEADkB,CAnEhB;AAsENC,IAAAA,qBAAqB,EAAE,CACnB,+EADmB,CAtEjB;AAyENC,IAAAA,yCAAyC,EAAE,CACvC,yFADuC,CAzErC;AA4ENC,IAAAA,2BAA2B,EAAE,CACzB,+FADyB,CA5EvB;AA+ENC,IAAAA,4BAA4B,EAAE,CAC1B,qGAD0B;AA/ExB,GAv2BI;AA07BdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,eAAe,EAAE,CAAC,qDAAD,CADX;AAENC,IAAAA,UAAU,EAAE,CAAC,0CAAD,CAFN;AAGNC,IAAAA,YAAY,EAAE,CAAC,qCAAD,CAHR;AAIN9N,IAAAA,0BAA0B,EAAE,CAAC,qBAAD,CAJtB;AAKN+N,IAAAA,YAAY,EAAE,CAAC,2BAAD,CALR;AAMNC,IAAAA,aAAa,EAAE,CAAC,qCAAD,CANT;AAON/K,IAAAA,MAAM,EAAE,CAAC,+BAAD,CAPF;AAQNgL,IAAAA,UAAU,EAAE,CAAC,0CAAD,CARN;AASNC,IAAAA,YAAY,EAAE,CAAC,sCAAD,CATR;AAUNhQ,IAAAA,GAAG,EAAE,CAAC,4BAAD,CAVC;AAWNiQ,IAAAA,OAAO,EAAE,CAAC,uCAAD,CAXH;AAYNC,IAAAA,SAAS,EAAE,CAAC,mCAAD,CAZL;AAaNC,IAAAA,oBAAoB,EAAE,CAClB,gEADkB,CAbhB;AAgBNC,IAAAA,SAAS,EAAE,CAAC,yCAAD,CAhBL;AAiBNC,IAAAA,iBAAiB,EAAE,CAAC,0CAAD,CAjBb;AAkBNC,IAAAA,WAAW,EAAE,CAAC,oCAAD,CAlBP;AAmBN1H,IAAAA,UAAU,EAAE,CAAC,0BAAD,CAnBN;AAoBNC,IAAAA,WAAW,EAAE,CAAC,oCAAD,CApBP;AAqBNtD,IAAAA,WAAW,EAAE,CAAC,gCAAD,CArBP;AAsBNgL,IAAAA,QAAQ,EAAE,CAAC,8CAAD,CAtBJ;AAuBNC,IAAAA,UAAU,EAAE,CAAC,0CAAD,CAvBN;AAwBNC,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,CAxBd;AA2BNhQ,IAAAA,MAAM,EAAE,CAAC,8BAAD,CA3BF;AA4BNiQ,IAAAA,UAAU,EAAE,CAAC,yCAAD,CA5BN;AA6BNC,IAAAA,YAAY,EAAE,CAAC,qCAAD;AA7BR,GA17BI;AAy9BdC,EAAAA,KAAK,EAAE;AACHC,IAAAA,aAAa,EAAE,CAAC,qDAAD,CADZ;AAEH/Q,IAAAA,MAAM,EAAE,CAAC,kCAAD,CAFL;AAGHgR,IAAAA,2BAA2B,EAAE,CACzB,8EADyB,CAH1B;AAMHC,IAAAA,YAAY,EAAE,CAAC,wDAAD,CANX;AAOHC,IAAAA,mBAAmB,EAAE,CACjB,yDADiB,CAPlB;AAUHC,IAAAA,mBAAmB,EAAE,CACjB,sEADiB,CAVlB;AAaHC,IAAAA,mBAAmB,EAAE,CACjB,0DADiB,CAblB;AAgBHC,IAAAA,aAAa,EAAE,CACX,8EADW,CAhBZ;AAmBHnR,IAAAA,GAAG,EAAE,CAAC,+CAAD,CAnBF;AAoBHoR,IAAAA,SAAS,EAAE,CACP,mEADO,CApBR;AAuBHC,IAAAA,gBAAgB,EAAE,CAAC,uDAAD,CAvBf;AAwBHjM,IAAAA,IAAI,EAAE,CAAC,iCAAD,CAxBH;AAyBHkM,IAAAA,qBAAqB,EAAE,CACnB,4EADmB,CAzBpB;AA4BHhM,IAAAA,WAAW,EAAE,CAAC,uDAAD,CA5BV;AA6BHiM,IAAAA,SAAS,EAAE,CAAC,qDAAD,CA7BR;AA8BHC,IAAAA,sBAAsB,EAAE,CACpB,mEADoB,CA9BrB;AAiCHC,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,CAjCjB;AAoCHC,IAAAA,yBAAyB,EAAE,CAAC,0CAAD,CApCxB;AAqCHC,IAAAA,WAAW,EAAE,CAAC,uDAAD,CArCV;AAsCHC,IAAAA,KAAK,EAAE,CAAC,qDAAD,CAtCJ;AAuCHC,IAAAA,wBAAwB,EAAE,CACtB,sEADsB,CAvCvB;AA0CHC,IAAAA,gBAAgB,EAAE,CACd,oEADc,CA1Cf;AA6CHC,IAAAA,YAAY,EAAE,CACV,2EADU,CA7CX;AAgDHtR,IAAAA,MAAM,EAAE,CAAC,iDAAD,CAhDL;AAiDHuR,IAAAA,YAAY,EAAE,CACV,6DADU,CAjDX;AAoDHC,IAAAA,YAAY,EAAE,CACV,mEADU,CApDX;AAuDHC,IAAAA,mBAAmB,EAAE,CACjB,yDADiB;AAvDlB,GAz9BO;AAohCdC,EAAAA,SAAS,EAAE;AAAEnS,IAAAA,GAAG,EAAE,CAAC,iBAAD;AAAP,GAphCG;AAqhCdoS,EAAAA,SAAS,EAAE;AACPC,IAAAA,sBAAsB,EAAE,CACpB,4DADoB,CADjB;AAIPC,IAAAA,cAAc,EAAE,CACZ,4DADY,CAJT;AAOPC,IAAAA,qBAAqB,EAAE,CACnB,mEADmB,CAPhB;AAUPC,IAAAA,iCAAiC,EAAE,CAC/B,kEAD+B,CAV5B;AAaPC,IAAAA,gBAAgB,EAAE,CACd,4DADc,CAbX;AAgBPC,IAAAA,mCAAmC,EAAE,CACjC,wGADiC,CAhB9B;AAmBPC,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,CAnBvB;AAsBPC,IAAAA,sBAAsB,EAAE,CACpB,4EADoB,CAtBjB;AAyBPC,IAAAA,cAAc,EAAE,CACZ,4EADY,CAzBT;AA4BPC,IAAAA,qBAAqB,EAAE,CACnB,mFADmB,CA5BhB;AA+BPC,IAAAA,2BAA2B,EAAE,CACzB,kFADyB,CA/BtB;AAkCPC,IAAAA,gBAAgB,EAAE,CACd,4EADc,CAlCX;AAqCPC,IAAAA,uBAAuB,EAAE,CACrB,8FADqB,CArClB;AAwCPC,IAAAA,8BAA8B,EAAE,CAC5B,wHAD4B,CAxCzB;AA2CPC,IAAAA,oBAAoB,EAAE,CAClB,2DADkB,CA3Cf;AA8CPC,IAAAA,YAAY,EAAE,CAAC,2DAAD,CA9CP;AA+CPC,IAAAA,mBAAmB,EAAE,CACjB,kEADiB,CA/Cd;AAkDPC,IAAAA,+BAA+B,EAAE,CAC7B,iEAD6B,CAlD1B;AAqDPC,IAAAA,cAAc,EAAE,CACZ,2DADY,CArDT;AAwDPC,IAAAA,iCAAiC,EAAE,CAC/B,uGAD+B,CAxD5B;AA2DPC,IAAAA,0BAA0B,EAAE,CACxB,6EADwB;AA3DrB,GArhCG;AAolCdC,EAAAA,KAAK,EAAE;AACHC,IAAAA,gBAAgB,EAAE,CACd,oDADc,EAEd,EAFc,EAGd;AAAEjc,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,sCAAV;AAAX,KAHc,CADf;AAMHkc,IAAAA,oCAAoC,EAAE,CAClC,oDADkC,CANnC;AASHC,IAAAA,wBAAwB,EAAE,CACtB,2EADsB,EAEtB,EAFsB,EAGtB;AAAEC,MAAAA,SAAS,EAAE;AAAb,KAHsB,CATvB;AAcHpE,IAAAA,eAAe,EAAE,CAAC,oDAAD,CAdd;AAeHqE,IAAAA,sBAAsB,EAAE,CACpB,yFADoB,EAEpB,EAFoB,EAGpB;AAAED,MAAAA,SAAS,EAAE;AAAb,KAHoB,CAfrB;AAoBHE,IAAAA,yBAAyB,EAAE,CACvB,4EADuB,EAEvB,EAFuB,EAGvB;AAAEF,MAAAA,SAAS,EAAE;AAAb,KAHuB,CApBxB;AAyBHG,IAAAA,yBAAyB,EAAE,CACvB,4EADuB,EAEvB,EAFuB,EAGvB;AAAEH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAzBxB;AA8BHI,IAAAA,iBAAiB,EAAE,CAAC,oDAAD,CA9BhB;AA+BHC,IAAAA,wBAAwB,EAAE,CACtB,gDADsB,CA/BvB;AAkCHC,IAAAA,gBAAgB,EAAE,CAAC,6CAAD,CAlCf;AAmCHC,IAAAA,cAAc,EAAE,CAAC,mDAAD,CAnCb;AAoCHC,IAAAA,0BAA0B,EAAE,CACxB,8CADwB,CApCzB;AAuCHC,IAAAA,cAAc,EAAE,CAAC,sCAAD,CAvCb;AAwCHC,IAAAA,mBAAmB,EAAE,CACjB,0DADiB,CAxClB;AA2CHC,IAAAA,+BAA+B,EAAE,CAC7B,6EAD6B,CA3C9B;AA8CHC,IAAAA,kBAAkB,EAAE,CAAC,2CAAD,CA9CjB;AA+CHC,IAAAA,eAAe,EAAE,CAAC,iCAAD,CA/Cd;AAgDHC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CAhDf;AAiDHC,IAAAA,sBAAsB,EAAE,CACpB,iEADoB,CAjDrB;AAoDHC,IAAAA,mBAAmB,EAAE,CAAC,uCAAD,CApDlB;AAqDHhT,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CArDzB;AAsDHiT,IAAAA,UAAU,EAAE,CAAC,kCAAD,CAtDT;AAuDHC,IAAAA,WAAW,EAAE,CAAC,wBAAD,CAvDV;AAwDHC,IAAAA,yBAAyB,EAAE,CACvB,2DADuB,CAxDxB;AA2DHC,IAAAA,0BAA0B,EAAE,CAAC,2CAAD,CA3DzB;AA4DHC,IAAAA,eAAe,EAAE,CAAC,kCAAD,CA5Dd;AA6DHC,IAAAA,aAAa,EAAE,CAAC,qCAAD,CA7DZ;AA8DHC,IAAAA,mBAAmB,EAAE,CAAC,4CAAD,CA9DlB;AA+DHC,IAAAA,mBAAmB,EAAE,CACjB,uDADiB,CA/DlB;AAkEHpJ,IAAAA,aAAa,EAAE,CAAC,kCAAD,CAlEZ;AAmEHqJ,IAAAA,iBAAiB,EAAE,CACf,qDADe,EAEf,EAFe,EAGf;AAAE7d,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,uCAAV;AAAX,KAHe,CAnEhB;AAwEH8d,IAAAA,qCAAqC,EAAE,CACnC,qDADmC,CAxEpC;AA2EHzQ,IAAAA,MAAM,EAAE,CAAC,8BAAD,CA3EL;AA4EH0Q,IAAAA,wBAAwB,EAAE,CACtB,wEADsB,CA5EvB;AA+EHC,IAAAA,2BAA2B,EAAE,CACzB,0EADyB,CA/E1B;AAkFHC,IAAAA,mBAAmB,EAAE,CACjB,8DADiB,CAlFlB;AAqFHC,IAAAA,cAAc,EAAE,CAAC,sDAAD,CArFb;AAsFHC,IAAAA,sBAAsB,EAAE,CACpB,2DADoB,CAtFrB;AAyFHC,IAAAA,mBAAmB,EAAE,CAAC,oDAAD,CAzFlB;AA0FHC,IAAAA,+BAA+B,EAAE,CAC7B,+EAD6B,CA1F9B;AA6FHC,IAAAA,eAAe,EAAE,CAAC,4CAAD,CA7Fd;AA8FHC,IAAAA,gBAAgB,EAAE,CACd,0DADc,CA9Ff;AAiGHC,IAAAA,UAAU,EAAE,CAAC,8CAAD,CAjGT;AAkGHC,IAAAA,gBAAgB,EAAE,CACd,0DADc,CAlGf;AAqGHC,IAAAA,eAAe,EAAE,CAAC,oCAAD,CArGd;AAsGHC,IAAAA,iCAAiC,EAAE,CAC/B,yFAD+B,CAtGhC;AAyGHC,IAAAA,aAAa,EAAE,CAAC,oDAAD,CAzGZ;AA0GHC,IAAAA,kBAAkB,EAAE,CAChB,yDADgB,CA1GjB;AA6GHC,IAAAA,mBAAmB,EAAE,CACjB,kEADiB,CA7GlB;AAgHHrK,IAAAA,aAAa,EAAE,CAAC,8CAAD,CAhHZ;AAiHHsK,IAAAA,6BAA6B,EAAE,CAC3B,uDAD2B,CAjH5B;AAoHHC,IAAAA,iBAAiB,EAAE,CAAC,kCAAD,CApHhB;AAqHHC,IAAAA,0BAA0B,EAAE,CACxB,mDADwB,CArHzB;AAwHHC,IAAAA,eAAe,EAAE,CACb,yCADa,EAEb,EAFa,EAGb;AAAElf,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,wBAAV;AAAX,KAHa,CAxHd;AA6HHmf,IAAAA,sBAAsB,EAAE,CAAC,yCAAD,CA7HrB;AA8HHC,IAAAA,sBAAsB,EAAE,CAAC,yCAAD,CA9HrB;AA+HHC,IAAAA,4BAA4B,EAAE,CAC1B,oDAD0B,CA/H3B;AAkIHC,IAAAA,gBAAgB,EAAE,CAAC,+BAAD,CAlIf;AAmIHC,IAAAA,yBAAyB,EAAE,CACvB,gDADuB,CAnIxB;AAsIHC,IAAAA,oBAAoB,EAAE,CAClB,oDADkB,CAtInB;AAyIHlX,IAAAA,GAAG,EAAE,CAAC,2BAAD,CAzIF;AA0IHmX,IAAAA,qBAAqB,EAAE,CACnB,qEADmB,CA1IpB;AA6IHC,IAAAA,wBAAwB,EAAE,CACtB,uEADsB,CA7IvB;AAgJHC,IAAAA,kBAAkB,EAAE,CAAC,wCAAD,CAhJjB;AAiJHC,IAAAA,yBAAyB,EAAE,CACvB,wFADuB,CAjJxB;AAoJHC,IAAAA,YAAY,EAAE,CAAC,kCAAD,CApJX;AAqJHC,IAAAA,kCAAkC,EAAE,CAChC,0EADgC,CArJjC;AAwJHC,IAAAA,WAAW,EAAE,CAAC,mDAAD,CAxJV;AAyJHC,IAAAA,SAAS,EAAE,CAAC,6CAAD,CAzJR;AA0JHC,IAAAA,mBAAmB,EAAE,CACjB,wDADiB,CA1JlB;AA6JHC,IAAAA,SAAS,EAAE,CAAC,0CAAD,CA7JR;AA8JHC,IAAAA,qBAAqB,EAAE,CAAC,gDAAD,CA9JpB;AA+JHC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,CA/J7B;AAkKHC,IAAAA,uBAAuB,EAAE,CAAC,gDAAD,CAlKtB;AAmKHzR,IAAAA,SAAS,EAAE,CAAC,yCAAD,CAnKR;AAoKH0R,IAAAA,sBAAsB,EAAE,CAAC,iDAAD,CApKrB;AAqKHC,IAAAA,gBAAgB,EAAE,CAAC,iDAAD,CArKf;AAsKHC,IAAAA,4BAA4B,EAAE,CAC1B,4EAD0B,CAtK3B;AAyKHC,IAAAA,0BAA0B,EAAE,CAAC,6CAAD,CAzKzB;AA0KHC,IAAAA,UAAU,EAAE,CAAC,2CAAD,CA1KT;AA2KHC,IAAAA,oBAAoB,EAAE,CAAC,8CAAD,CA3KnB;AA4KHC,IAAAA,YAAY,EAAE,CAAC,yCAAD,CA5KX;AA6KHC,IAAAA,aAAa,EAAE,CAAC,uDAAD,CA7KZ;AA8KHC,IAAAA,mBAAmB,EAAE,CACjB,4EADiB,CA9KlB;AAiLHC,IAAAA,cAAc,EAAE,CACZ,2DADY,CAjLb;AAoLHC,IAAAA,mBAAmB,EAAE,CAAC,+CAAD,CApLlB;AAqLHC,IAAAA,gBAAgB,EAAE,CAAC,2CAAD,CArLf;AAsLHC,IAAAA,QAAQ,EAAE,CAAC,iCAAD,CAtLP;AAuLHC,IAAAA,aAAa,EAAE,CAAC,mDAAD,CAvLZ;AAwLHC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAxLlB;AAyLHC,IAAAA,qBAAqB,EAAE,CAAC,+CAAD,CAzLpB;AA0LHC,IAAAA,8BAA8B,EAAE,CAC5B,sFAD4B,CA1L7B;AA6LHC,IAAAA,iBAAiB,EAAE,CAAC,4CAAD,CA7LhB;AA8LHC,IAAAA,SAAS,EAAE,CAAC,kCAAD,CA9LR;AA+LHC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CA/LnB;AAgMHC,IAAAA,UAAU,EAAE,CAAC,iDAAD,CAhMT;AAiMHC,IAAAA,eAAe,EAAE,CAAC,sDAAD,CAjMd;AAkMHC,IAAAA,eAAe,EAAE,CAAC,+CAAD,CAlMd;AAmMHC,IAAAA,yBAAyB,EAAE,CACvB,+EADuB,CAnMxB;AAsMHC,IAAAA,mCAAmC,EAAE,CACjC,2EADiC,CAtMlC;AAyMHC,IAAAA,WAAW,EAAE,CAAC,iDAAD,CAzMV;AA0MHC,IAAAA,eAAe,EAAE,CAAC,qDAAD,CA1Md;AA2MHC,IAAAA,mCAAmC,EAAE,CACjC,2EADiC,CA3MlC;AA8MHC,IAAAA,QAAQ,EAAE,CAAC,yCAAD,CA9MP;AA+MHtN,IAAAA,UAAU,EAAE,CAAC,2CAAD,CA/MT;AAgNHuN,IAAAA,uBAAuB,EAAE,CACrB,kDADqB,CAhNtB;AAmNH9b,IAAAA,kBAAkB,EAAE,CAChB,oEADgB,CAnNjB;AAsNH+b,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAtNZ;AAuNHC,IAAAA,YAAY,EAAE,CAAC,oCAAD,CAvNX;AAwNHC,IAAAA,yBAAyB,EAAE,CACvB,oEADuB,CAxNxB;AA2NH3J,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CA3NhB;AA4NH4J,IAAAA,qBAAqB,EAAE,CACnB,yDADmB,CA5NpB;AA+NHC,IAAAA,yBAAyB,EAAE,CAAC,oCAAD,CA/NxB;AAgOHC,IAAAA,wBAAwB,EAAE,CACtB,kDADsB,CAhOvB;AAmOH7U,IAAAA,WAAW,EAAE,CAAC,mCAAD,CAnOV;AAoOH8U,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CApOf;AAqOHC,IAAAA,cAAc,EAAE,CAAC,gCAAD,CArOb;AAsOHC,IAAAA,sBAAsB,EAAE,CACpB,gEADoB,CAtOrB;AAyOHC,IAAAA,eAAe,EAAE,CAAC,uCAAD,CAzOd;AA0OH5X,IAAAA,wBAAwB,EAAE,CAAC,iBAAD,CA1OvB;AA2OHiG,IAAAA,UAAU,EAAE,CAAC,uBAAD,CA3OT;AA4OHrD,IAAAA,WAAW,EAAE,CAAC,6BAAD,CA5OV;AA6OHC,IAAAA,SAAS,EAAE,CAAC,iCAAD,CA7OR;AA8OHgV,IAAAA,eAAe,EAAE,CAAC,uCAAD,CA9Od;AA+OHC,IAAAA,mCAAmC,EAAE,CAAC,kCAAD,CA/OlC;AAgPHC,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAhPZ;AAiPHC,IAAAA,eAAe,EAAE,CAAC,wCAAD,CAjPd;AAkPHlV,IAAAA,UAAU,EAAE,CAAC,mBAAD,CAlPT;AAmPHmV,IAAAA,oCAAoC,EAAE,CAClC,sDADkC,CAnPnC;AAsPHC,IAAAA,iBAAiB,EAAE,CACf,wDADe,CAtPhB;AAyPHC,IAAAA,YAAY,EAAE,CAAC,oCAAD,CAzPX;AA0PHC,IAAAA,iBAAiB,EAAE,CAAC,2CAAD,CA1PhB;AA2PHC,IAAAA,QAAQ,EAAE,CAAC,gCAAD,CA3PP;AA4PHC,IAAAA,SAAS,EAAE,CAAC,iCAAD,CA5PR;AA6PHvc,IAAAA,qBAAqB,EAAE,CACnB,sDADmB,CA7PpB;AAgQHwO,IAAAA,YAAY,EAAE,CAAC,iCAAD,CAhQX;AAiQH0E,IAAAA,KAAK,EAAE,CAAC,mCAAD,CAjQJ;AAkQHsJ,IAAAA,aAAa,EAAE,CAAC,2CAAD,CAlQZ;AAmQH/N,IAAAA,WAAW,EAAE,CAAC,kDAAD,CAnQV;AAoQHxO,IAAAA,wBAAwB,EAAE,CACtB,8EADsB,CApQvB;AAuQHwc,IAAAA,2BAA2B,EAAE,CACzB,6EADyB,EAEzB,EAFyB,EAGzB;AAAErH,MAAAA,SAAS,EAAE;AAAb,KAHyB,CAvQ1B;AA4QHrD,IAAAA,kBAAkB,EAAE,CAChB,uDADgB,CA5QjB;AA+QH2K,IAAAA,yBAAyB,EAAE,CACvB,2FADuB,EAEvB,EAFuB,EAGvB;AAAEtH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CA/QxB;AAoRHuH,IAAAA,2BAA2B,EAAE,CACzB,kFADyB,CApR1B;AAuRHC,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B,EAF0B,EAG1B;AAAExH,MAAAA,SAAS,EAAE;AAAb,KAH0B,CAvR3B;AA4RHyH,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B,EAF0B,EAG1B;AAAEzH,MAAAA,SAAS,EAAE;AAAb,KAH0B,CA5R3B;AAiSH0H,IAAAA,YAAY,EAAE,CAAC,qDAAD,CAjSX;AAkSHC,IAAAA,gBAAgB,EAAE,CAAC,kCAAD,CAlSf;AAmSHC,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CAnShB;AAoSHC,IAAAA,wBAAwB,EAAE,CACtB,wEADsB,CApSvB;AAuSHC,IAAAA,wBAAwB,EAAE,CACtB,0EADsB,EAEtB,EAFsB,EAGtB;AAAE9H,MAAAA,SAAS,EAAE;AAAb,KAHsB,CAvSvB;AA4SH+H,IAAAA,sBAAsB,EAAE,CACpB,wFADoB,EAEpB,EAFoB,EAGpB;AAAE/H,MAAAA,SAAS,EAAE;AAAb,KAHoB,CA5SrB;AAiTHgI,IAAAA,yBAAyB,EAAE,CACvB,2EADuB,EAEvB,EAFuB,EAGvB;AAAEhI,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAjTxB;AAsTHiI,IAAAA,yBAAyB,EAAE,CACvB,2EADuB,EAEvB,EAFuB,EAGvB;AAAEjI,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAtTxB;AA2THkI,IAAAA,eAAe,EAAE,CAAC,kDAAD,CA3Td;AA4THC,IAAAA,QAAQ,EAAE,CAAC,qCAAD,CA5TP;AA6THxb,IAAAA,MAAM,EAAE,CAAC,6BAAD,CA7TL;AA8THyb,IAAAA,sBAAsB,EAAE,CACpB,wDADoB,CA9TrB;AAiUHC,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CAjUlB;AAkUHC,IAAAA,+BAA+B,EAAE,CAAC,iCAAD,CAlU9B;AAmUHC,IAAAA,gBAAgB,EAAE,CACd,yDADc,CAnUf;AAsUHC,IAAAA,iCAAiC,EAAE,CAC/B,wFAD+B,CAtUhC;AAyUHC,IAAAA,aAAa,EAAE,CAAC,mDAAD,CAzUZ;AA0UHC,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,CA1UjB;AA6UHC,IAAAA,0BAA0B,EAAE,CACxB,iFADwB,EAExB,EAFwB,EAGxB;AAAE/kB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,6BAAV;AAAX,KAHwB,CA7UzB;AAkVHglB,IAAAA,2BAA2B,EAAE,CACzB,iFADyB,CAlV1B;AAqVH9O,IAAAA,aAAa,EAAE,CAAC,6CAAD,CArVZ;AAsVH+O,IAAAA,0BAA0B,EAAE,CACxB,oDADwB,CAtVzB;AAyVHC,IAAAA,kBAAkB,EAAE,CAChB,sEADgB,EAEhB;AAAEC,MAAAA,OAAO,EAAE;AAAX,KAFgB;AAzVjB,GAplCO;AAk7CdC,EAAAA,MAAM,EAAE;AACJC,IAAAA,IAAI,EAAE,CAAC,kBAAD,CADF;AAEJC,IAAAA,OAAO,EAAE,CAAC,qBAAD,CAFL;AAGJC,IAAAA,qBAAqB,EAAE,CAAC,oBAAD,CAHnB;AAIJC,IAAAA,MAAM,EAAE,CAAC,oBAAD,CAJJ;AAKJxJ,IAAAA,KAAK,EAAE,CAAC,0BAAD,CALH;AAMJyJ,IAAAA,MAAM,EAAE,CAAC,oBAAD,CANJ;AAOJC,IAAAA,KAAK,EAAE,CAAC,mBAAD;AAPH,GAl7CM;AA27CdC,EAAAA,cAAc,EAAE;AACZzc,IAAAA,QAAQ,EAAE,CACN,iEADM,CADE;AAIZ0c,IAAAA,uBAAuB,EAAE,CACrB,sDADqB,CAJb;AAOZpc,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CAPN;AAQZC,IAAAA,iBAAiB,EAAE,CAAC,kDAAD,CARP;AASZoc,IAAAA,qBAAqB,EAAE,CACnB,2EADmB,CATX;AAYZjc,IAAAA,WAAW,EAAE,CACT,mEADS;AAZD,GA37CF;AA28Cdkc,EAAAA,KAAK,EAAE;AACHC,IAAAA,iCAAiC,EAAE,CAC/B,0DAD+B,CADhC;AAIHC,IAAAA,kCAAkC,EAAE,CAChC,yDADgC,CAJjC;AAOHC,IAAAA,+BAA+B,EAAE,CAC7B,wDAD6B,CAP9B;AAUHC,IAAAA,+BAA+B,EAAE,CAC7B,yDAD6B,CAV9B;AAaHC,IAAAA,4BAA4B,EAAE,CAC1B,wDAD0B,CAb3B;AAgBH/d,IAAAA,MAAM,EAAE,CAAC,wBAAD,CAhBL;AAiBHge,IAAAA,4BAA4B,EAAE,CAC1B,6EAD0B,CAjB3B;AAoBHC,IAAAA,qBAAqB,EAAE,CAAC,gDAAD,CApBpB;AAqBHC,IAAAA,4BAA4B,EAAE,CAC1B,gGAD0B,CArB3B;AAwBHC,IAAAA,qBAAqB,EAAE,CACnB,sEADmB,CAxBpB;AA2BHC,IAAAA,WAAW,EAAE,CAAC,sCAAD,CA3BV;AA4BHC,IAAAA,SAAS,EAAE,CAAC,mCAAD,CA5BR;AA6BHC,IAAAA,yBAAyB,EAAE,CACvB,6FADuB,CA7BxB;AAgCHC,IAAAA,kBAAkB,EAAE,CAChB,mEADgB,CAhCjB;AAmCHC,IAAAA,yBAAyB,EAAE,CACvB,0DADuB,CAnCxB;AAsCHlZ,IAAAA,IAAI,EAAE,CAAC,uBAAD,CAtCH;AAuCHmZ,IAAAA,cAAc,EAAE,CAAC,yCAAD,CAvCb;AAwCHC,IAAAA,2BAA2B,EAAE,CACzB,4EADyB,CAxC1B;AA2CHC,IAAAA,oBAAoB,EAAE,CAAC,+CAAD,CA3CnB;AA4CH9b,IAAAA,wBAAwB,EAAE,CAAC,iBAAD,CA5CvB;AA6CH+b,IAAAA,gBAAgB,EAAE,CAAC,2CAAD,CA7Cf;AA8CHC,IAAAA,2BAA2B,EAAE,CACzB,+CADyB,CA9C1B;AAiDHC,IAAAA,iBAAiB,EAAE,CAAC,4CAAD,CAjDhB;AAkDHC,IAAAA,cAAc,EAAE,CAAC,yCAAD,CAlDb;AAmDHC,IAAAA,4BAA4B,EAAE,CAC1B,6DAD0B,CAnD3B;AAsDHC,IAAAA,kBAAkB,EAAE,CAChB,4DADgB,CAtDjB;AAyDHC,IAAAA,eAAe,EAAE,CACb,2DADa,CAzDd;AA4DHC,IAAAA,4BAA4B,EAAE,CAC1B,+FAD0B,CA5D3B;AA+DHC,IAAAA,qBAAqB,EAAE,CACnB,qEADmB,CA/DpB;AAkEHC,IAAAA,WAAW,EAAE,CAAC,qCAAD;AAlEV,GA38CO;AA+gDd/B,EAAAA,KAAK,EAAE;AACHgC,IAAAA,wBAAwB,EAAE,CACtB,mBADsB,EAEtB,EAFsB,EAGtB;AAAE1nB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,8BAAV;AAAX,KAHsB,CADvB;AAMH2nB,IAAAA,4BAA4B,EAAE,CAAC,mBAAD,CAN3B;AAOHC,IAAAA,KAAK,EAAE,CAAC,6BAAD,CAPJ;AAQHC,IAAAA,YAAY,EAAE,CAAC,6BAAD,CARX;AASHC,IAAAA,qBAAqB,EAAE,CAAC,+CAAD,CATpB;AAUHC,IAAAA,oCAAoC,EAAE,CAAC,gCAAD,CAVnC;AAWHC,IAAAA,4BAA4B,EAAE,CAC1B,qBAD0B,EAE1B,EAF0B,EAG1B;AAAEhoB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,kCAAV;AAAX,KAH0B,CAX3B;AAgBHioB,IAAAA,gCAAgC,EAAE,CAAC,qBAAD,CAhB/B;AAiBHC,IAAAA,kCAAkC,EAAE,CAChC,iBADgC,EAEhC,EAFgC,EAGhC;AAAEloB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,wCAAV;AAAX,KAHgC,CAjBjC;AAsBHmoB,IAAAA,sCAAsC,EAAE,CAAC,iBAAD,CAtBrC;AAuBHC,IAAAA,2BAA2B,EAAE,CACzB,qBADyB,EAEzB,EAFyB,EAGzB;AAAEpoB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,iCAAV;AAAX,KAHyB,CAvB1B;AA4BHqoB,IAAAA,+BAA+B,EAAE,CAAC,qBAAD,CA5B9B;AA6BHC,IAAAA,4BAA4B,EAAE,CAC1B,oCAD0B,EAE1B,EAF0B,EAG1B;AAAEtoB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,kCAAV;AAAX,KAH0B,CA7B3B;AAkCHuoB,IAAAA,gCAAgC,EAAE,CAAC,oCAAD,CAlC/B;AAmCHC,IAAAA,kCAAkC,EAAE,CAChC,4BADgC,EAEhC,EAFgC,EAGhC;AAAExoB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,wCAAV;AAAX,KAHgC,CAnCjC;AAwCHyoB,IAAAA,sCAAsC,EAAE,CAAC,4BAAD,CAxCrC;AAyCHC,IAAAA,MAAM,EAAE,CAAC,gCAAD,CAzCL;AA0CH9iB,IAAAA,gBAAgB,EAAE,CAAC,WAAD,CA1Cf;AA2CH+iB,IAAAA,aAAa,EAAE,CAAC,uBAAD,CA3CZ;AA4CHC,IAAAA,iBAAiB,EAAE,CAAC,iCAAD,CA5ChB;AA6CHC,IAAAA,yBAAyB,EAAE,CACvB,iCADuB,EAEvB,EAFuB,EAGvB;AAAE7oB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,+BAAV;AAAX,KAHuB,CA7CxB;AAkDH8oB,IAAAA,6BAA6B,EAAE,CAAC,iCAAD,CAlD5B;AAmDHC,IAAAA,+BAA+B,EAAE,CAC7B,yBAD6B,EAE7B,EAF6B,EAG7B;AAAE/oB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,qCAAV;AAAX,KAH6B,CAnD9B;AAwDHgpB,IAAAA,mCAAmC,EAAE,CAAC,yBAAD,CAxDlC;AAyDHtb,IAAAA,IAAI,EAAE,CAAC,YAAD,CAzDH;AA0DHub,IAAAA,0BAA0B,EAAE,CACxB,kBADwB,EAExB,EAFwB,EAGxB;AAAEjpB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,gCAAV;AAAX,KAHwB,CA1DzB;AA+DHkpB,IAAAA,8BAA8B,EAAE,CAAC,kBAAD,CA/D7B;AAgEHC,IAAAA,0BAA0B,EAAE,CACxB,kBADwB,EAExB,EAFwB,EAGxB;AAAEnpB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,gCAAV;AAAX,KAHwB,CAhEzB;AAqEHopB,IAAAA,8BAA8B,EAAE,CAAC,kBAAD,CArE7B;AAsEHC,IAAAA,2BAA2B,EAAE,CACzB,qBADyB,EAEzB,EAFyB,EAGzB;AAAErpB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,iCAAV;AAAX,KAHyB,CAtE1B;AA2EHspB,IAAAA,+BAA+B,EAAE,CAAC,qBAAD,CA3E9B;AA4EHC,IAAAA,iCAAiC,EAAE,CAAC,qBAAD,CA5EhC;AA6EHC,IAAAA,oBAAoB,EAAE,CAAC,iCAAD,CA7EnB;AA8EHC,IAAAA,oBAAoB,EAAE,CAAC,iCAAD,CA9EnB;AA+EHC,IAAAA,2BAA2B,EAAE,CACzB,oBADyB,EAEzB,EAFyB,EAGzB;AAAE1pB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,iCAAV;AAAX,KAHyB,CA/E1B;AAoFH2pB,IAAAA,+BAA+B,EAAE,CAAC,oBAAD,CApF9B;AAqFHC,IAAAA,kBAAkB,EAAE,CAAC,gCAAD,CArFjB;AAsFHC,IAAAA,gCAAgC,EAAE,CAC9B,yBAD8B,EAE9B,EAF8B,EAG9B;AAAE7pB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,sCAAV;AAAX,KAH8B,CAtF/B;AA2FH8pB,IAAAA,oCAAoC,EAAE,CAAC,yBAAD,CA3FnC;AA4FHC,IAAAA,qBAAqB,EAAE,CAAC,4BAAD,CA5FpB;AA6FHC,IAAAA,iCAAiC,EAAE,CAC/B,gBAD+B,EAE/B,EAF+B,EAG/B;AAAEhqB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,uCAAV;AAAX,KAH+B,CA7FhC;AAkGHiqB,IAAAA,qCAAqC,EAAE,CAAC,gBAAD,CAlGpC;AAmGHC,IAAAA,yCAAyC,EAAE,CACvC,8BADuC,EAEvC,EAFuC,EAGvC;AAAElqB,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,+CAAV;AAAX,KAHuC,CAnGxC;AAwGHmqB,IAAAA,6CAA6C,EAAE,CAC3C,8BAD2C,CAxG5C;AA2GHC,IAAAA,OAAO,EAAE,CAAC,gCAAD,CA3GN;AA4GHC,IAAAA,QAAQ,EAAE,CAAC,mCAAD,CA5GP;AA6GHC,IAAAA,mBAAmB,EAAE,CAAC,aAAD;AA7GlB;AA/gDO,CAAlB;;ACAO,MAAMC,OAAO,GAAG,mBAAhB;;ACAA,SAASC,kBAAT,CAA4BC,OAA5B,EAAqCC,YAArC,EAAmD;AACtD,QAAMC,UAAU,GAAG,EAAnB;;AACA,OAAK,MAAM,CAACC,KAAD,EAAQC,SAAR,CAAX,IAAiCC,MAAM,CAACC,OAAP,CAAeL,YAAf,CAAjC,EAA+D;AAC3D,SAAK,MAAM,CAACM,UAAD,EAAaC,QAAb,CAAX,IAAqCH,MAAM,CAACC,OAAP,CAAeF,SAAf,CAArC,EAAgE;AAC5D,YAAM,CAACK,KAAD,EAAQC,QAAR,EAAkBC,WAAlB,IAAiCH,QAAvC;AACA,YAAM,CAACI,MAAD,EAASC,GAAT,IAAgBJ,KAAK,CAACK,KAAN,CAAY,GAAZ,CAAtB;AACA,YAAMC,gBAAgB,GAAGV,MAAM,CAACW,MAAP,CAAc;AAAEJ,QAAAA,MAAF;AAAUC,QAAAA;AAAV,OAAd,EAA+BH,QAA/B,CAAzB;;AACA,UAAI,CAACR,UAAU,CAACC,KAAD,CAAf,EAAwB;AACpBD,QAAAA,UAAU,CAACC,KAAD,CAAV,GAAoB,EAApB;AACH;;AACD,YAAMc,YAAY,GAAGf,UAAU,CAACC,KAAD,CAA/B;;AACA,UAAIQ,WAAJ,EAAiB;AACbM,QAAAA,YAAY,CAACV,UAAD,CAAZ,GAA2BW,QAAQ,CAAClB,OAAD,EAAUG,KAAV,EAAiBI,UAAjB,EAA6BQ,gBAA7B,EAA+CJ,WAA/C,CAAnC;AACA;AACH;;AACDM,MAAAA,YAAY,CAACV,UAAD,CAAZ,GAA2BP,OAAO,CAACmB,OAAR,CAAgBT,QAAhB,CAAyBK,gBAAzB,CAA3B;AACH;AACJ;;AACD,SAAOb,UAAP;AACH;;AACD,SAASgB,QAAT,CAAkBlB,OAAlB,EAA2BG,KAA3B,EAAkCI,UAAlC,EAA8CG,QAA9C,EAAwDC,WAAxD,EAAqE;AACjE,QAAMS,mBAAmB,GAAGpB,OAAO,CAACmB,OAAR,CAAgBT,QAAhB,CAAyBA,QAAzB,CAA5B;AACA;;AACA,WAASW,eAAT,CAAyB,GAAGC,IAA5B,EAAkC;AAC9B;AACA,QAAIC,OAAO,GAAGH,mBAAmB,CAACZ,QAApB,CAA6B/Q,KAA7B,CAAmC,GAAG6R,IAAtC,CAAd,CAF8B;;AAI9B,QAAIX,WAAW,CAAChP,SAAhB,EAA2B;AACvB4P,MAAAA,OAAO,GAAGlB,MAAM,CAACW,MAAP,CAAc,EAAd,EAAkBO,OAAlB,EAA2B;AACjCC,QAAAA,IAAI,EAAED,OAAO,CAACZ,WAAW,CAAChP,SAAb,CADoB;AAEjC,SAACgP,WAAW,CAAChP,SAAb,GAAyB8P;AAFQ,OAA3B,CAAV;AAIA,aAAOL,mBAAmB,CAACG,OAAD,CAA1B;AACH;;AACD,QAAIZ,WAAW,CAACprB,OAAhB,EAAyB;AACrB,YAAM,CAACmsB,QAAD,EAAWC,aAAX,IAA4BhB,WAAW,CAACprB,OAA9C;AACAyqB,MAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAkB,WAAU1B,KAAM,IAAGI,UAAW,kCAAiCmB,QAAS,IAAGC,aAAc,IAA3G;AACH;;AACD,QAAIhB,WAAW,CAACmB,UAAhB,EAA4B;AACxB9B,MAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAiBlB,WAAW,CAACmB,UAA7B;AACH;;AACD,QAAInB,WAAW,CAACjiB,iBAAhB,EAAmC;AAC/B;AACA,YAAM6iB,OAAO,GAAGH,mBAAmB,CAACZ,QAApB,CAA6B/Q,KAA7B,CAAmC,GAAG6R,IAAtC,CAAhB;;AACA,WAAK,MAAM,CAACS,IAAD,EAAOC,KAAP,CAAX,IAA4B3B,MAAM,CAACC,OAAP,CAAeK,WAAW,CAACjiB,iBAA3B,CAA5B,EAA2E;AACvE,YAAIqjB,IAAI,IAAIR,OAAZ,EAAqB;AACjBvB,UAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAkB,IAAGE,IAAK,0CAAyC5B,KAAM,IAAGI,UAAW,aAAYyB,KAAM,WAAzG;;AACA,cAAI,EAAEA,KAAK,IAAIT,OAAX,CAAJ,EAAyB;AACrBA,YAAAA,OAAO,CAACS,KAAD,CAAP,GAAiBT,OAAO,CAACQ,IAAD,CAAxB;AACH;;AACD,iBAAOR,OAAO,CAACQ,IAAD,CAAd;AACH;AACJ;;AACD,aAAOX,mBAAmB,CAACG,OAAD,CAA1B;AACH,KA/B6B;;;AAiC9B,WAAOH,mBAAmB,CAAC,GAAGE,IAAJ,CAA1B;AACH;;AACD,SAAOjB,MAAM,CAACW,MAAP,CAAcK,eAAd,EAA+BD,mBAA/B,CAAP;AACH;;ACxDM,SAASa,mBAAT,CAA6BjC,OAA7B,EAAsC;AACzC,QAAMkC,GAAG,GAAGnC,kBAAkB,CAACC,OAAD,EAAUmC,SAAV,CAA9B;AACA,SAAO;AACHC,IAAAA,IAAI,EAAEF;AADH,GAAP;AAGH;AACDD,mBAAmB,CAACnC,OAApB,GAA8BA,OAA9B;AACA,AAAO,SAASuC,yBAAT,CAAmCrC,OAAnC,EAA4C;AAC/C,QAAMkC,GAAG,GAAGnC,kBAAkB,CAACC,OAAD,EAAUmC,SAAV,CAA9B;AACA,2CACOD,GADP;AAEIE,IAAAA,IAAI,EAAEF;AAFV;AAIH;AACDG,yBAAyB,CAACvC,OAA1B,GAAoCA,OAApC;;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js new file mode 100644 index 0000000..32c2f39 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js @@ -0,0 +1,60 @@ +export function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ method, url }, defaults); + if (!newMethods[scope]) { + newMethods[scope] = {}; + } + const scopeMethods = newMethods[scope]; + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; + } + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + } + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + /* istanbul ignore next */ + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); + // There are currently no other decorations than `.mapToData` + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined, + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); + if (!(alias in options)) { + options[alias] = options[name]; + } + delete options[name]; + } + } + return requestWithDefaults(options); + } + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js new file mode 100644 index 0000000..e61f6ac --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js @@ -0,0 +1,1664 @@ +const Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels", + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve", + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel", + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", + ], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token", + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token", + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token", + ], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}", + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}", + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}", + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}", + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}", + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs", + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}", + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs", + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs", + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}", + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository", + ], + getActionsCacheUsageForEnterprise: [ + "GET /enterprises/{enterprise}/actions/cache/usage", + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions", + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions", + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key", + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", + ], + getGithubActionsDefaultWorkflowPermissionsEnterprise: [ + "GET /enterprises/{enterprise}/actions/permissions/workflow", + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow", + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow", + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions", + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions", + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] }, + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals", + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}", + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access", + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}", + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing", + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels", + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads", + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories", + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun", + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs", + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels", + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}", + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}", + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions", + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions", + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels", + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + ], + setGithubActionsDefaultWorkflowPermissionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions/workflow", + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow", + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow", + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions", + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions", + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories", + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories", + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access", + ], + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription", + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription", + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}", + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public", + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications", + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription", + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"], + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] }, + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens", + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}", + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}", + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories", + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed", + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts", + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] }, + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended", + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"], + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions", + ], + getGithubAdvancedSecurityBillingGhe: [ + "GET /enterprises/{enterprise}/settings/billing/advanced-security", + ], + getGithubAdvancedSecurityBillingOrg: [ + "GET /orgs/{org}/settings/billing/advanced-security", + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages", + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage", + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage", + ], + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest", + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences", + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"], + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}", + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } }, + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", + ], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] }, + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"], + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"], + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}", + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines", + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}", + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}", + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces", + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces", + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}", + ], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}", + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}", + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports", + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}", + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key", + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key", + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}", + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}", + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } }, + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces", + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories", + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}", + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines", + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories", + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop", + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"], + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}", + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}", + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}", + ], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key", + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}", + ], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + ], + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots", + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}", + ], + }, + emojis: { get: ["GET /emojis"] }, + enterpriseAdmin: { + addCustomLabelsToSelfHostedRunnerForEnterprise: [ + "POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels", + ], + disableSelectedOrganizationGithubActionsEnterprise: [ + "DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}", + ], + enableSelectedOrganizationGithubActionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}", + ], + getAllowedActionsEnterprise: [ + "GET /enterprises/{enterprise}/actions/permissions/selected-actions", + ], + getGithubActionsPermissionsEnterprise: [ + "GET /enterprises/{enterprise}/actions/permissions", + ], + getServerStatistics: [ + "GET /enterprise-installation/{enterprise_or_org}/server-statistics", + ], + listLabelsForSelfHostedRunnerForEnterprise: [ + "GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels", + ], + listSelectedOrganizationsEnabledGithubActionsEnterprise: [ + "GET /enterprises/{enterprise}/actions/permissions/organizations", + ], + removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: [ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels", + ], + removeCustomLabelFromSelfHostedRunnerForEnterprise: [ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}", + ], + setAllowedActionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions/selected-actions", + ], + setCustomLabelsForSelfHostedRunnerForEnterprise: [ + "PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels", + ], + setGithubActionsPermissionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions", + ], + setSelectedOrganizationsEnabledGithubActionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions/organizations", + ], + }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"], + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"], + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"], + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }, + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits", + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }, + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }, + ], + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees", + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments", + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}", + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}", + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels", + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees", + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}", + ], + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"], + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } }, + ], + }, + meta: { + get: ["GET /meta"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"], + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive", + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive", + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive", + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive", + ], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories", + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] }, + ], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", + ], + updateImport: ["PATCH /repos/{owner}/{repo}/import"], + }, + orgs: { + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}", + ], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}", + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}", + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}", + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}", + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}", + ], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"], + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}", + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}", + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}", + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] }, + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser", + ], + }, + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions", + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}", + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}", + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}", + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}", + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}", + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}", + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + ], + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission", + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}", + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"], + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments", + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}", + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}", + ], + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions", + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}", + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + ], + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] }, + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}", + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" }, + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" }, + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" }, + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" }, + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts", + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}", + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments", + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}", + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate", + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] }, + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}", + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}", + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection", + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}", + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}", + ], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}", + ], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}", + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes", + ], + disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts", + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] }, + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes", + ], + enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts", + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes", + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection", + ], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission", + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}", + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config", + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}", + ], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" }, + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}", + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" }, + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" }, + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" }, + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" }, + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" }, + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" }, + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" }, + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection", + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}", + ], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}", + ], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] }, + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config", + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" }, + ], + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"], + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts", + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", + ], + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}", + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}", + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations", + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}", + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}", + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"], + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] }, + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] }, + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] }, + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] }, + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] }, + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] }, + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] }, + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] }, + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] }, + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] }, + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] }, + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] }, + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] }, + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] }, + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] }, + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility", + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"], + }, +}; +export default Endpoints; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js new file mode 100644 index 0000000..53d539d --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js @@ -0,0 +1,18 @@ +import ENDPOINTS from "./generated/endpoints"; +import { VERSION } from "./version"; +import { endpointsToMethods } from "./endpoints-to-methods"; +export function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, ENDPOINTS); + return { + rest: api, + }; +} +restEndpointMethods.VERSION = VERSION; +export function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, ENDPOINTS); + return { + ...api, + rest: api, + }; +} +legacyRestEndpointMethods.VERSION = VERSION; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/types.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js new file mode 100644 index 0000000..80a383f --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js @@ -0,0 +1 @@ +export const VERSION = "5.16.2"; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts new file mode 100644 index 0000000..2a97a4b --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts @@ -0,0 +1,4 @@ +import { Octokit } from "@octokit/core"; +import { EndpointsDefaultsAndDecorations } from "./types"; +import { RestEndpointMethods } from "./generated/method-types"; +export declare function endpointsToMethods(octokit: Octokit, endpointsMap: EndpointsDefaultsAndDecorations): RestEndpointMethods; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts new file mode 100644 index 0000000..a3c1d92 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts @@ -0,0 +1,3 @@ +import { EndpointsDefaultsAndDecorations } from "../types"; +declare const Endpoints: EndpointsDefaultsAndDecorations; +export default Endpoints; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts new file mode 100644 index 0000000..91c55f3 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts @@ -0,0 +1,9945 @@ +import { EndpointInterface, RequestInterface } from "@octokit/types"; +import { RestEndpointMethodTypes } from "./parameters-and-response-types"; +export declare type RestEndpointMethods = { + actions: { + /** + * Add custom labels to a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + addCustomLabelsToSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["addCustomLabelsToSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Add custom labels to a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + addCustomLabelsToSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["addCustomLabelsToSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + addSelectedRepoToOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["addSelectedRepoToOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + approveWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["approveWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + cancelWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["cancelWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates an environment secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateEnvironmentSecret: { + (params?: RestEndpointMethodTypes["actions"]["createOrUpdateEnvironmentSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to + * use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["createOrUpdateOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateRepoSecret: { + (params?: RestEndpointMethodTypes["actions"]["createOrUpdateRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org --token TOKEN + * ``` + */ + createRegistrationTokenForOrg: { + (params?: RestEndpointMethodTypes["actions"]["createRegistrationTokenForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate + * using an access token with the `repo` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN + * ``` + */ + createRegistrationTokenForRepo: { + (params?: RestEndpointMethodTypes["actions"]["createRegistrationTokenForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + createRemoveTokenForOrg: { + (params?: RestEndpointMethodTypes["actions"]["createRemoveTokenForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + createRemoveTokenForRepo: { + (params?: RestEndpointMethodTypes["actions"]["createRemoveTokenForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)." + */ + createWorkflowDispatch: { + (params?: RestEndpointMethodTypes["actions"]["createWorkflowDispatch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a GitHub Actions cache for a repository, using a cache ID. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + deleteActionsCacheById: { + (params?: RestEndpointMethodTypes["actions"]["deleteActionsCacheById"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes one or more GitHub Actions caches for a repository, using a complete cache key. By default, all caches that match the provided key are deleted, but you can optionally provide a Git ref to restrict deletions to caches that match both the provided key and the Git ref. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + deleteActionsCacheByKey: { + (params?: RestEndpointMethodTypes["actions"]["deleteActionsCacheByKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + deleteArtifact: { + (params?: RestEndpointMethodTypes["actions"]["deleteArtifact"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + deleteEnvironmentSecret: { + (params?: RestEndpointMethodTypes["actions"]["deleteEnvironmentSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + deleteOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["deleteOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + deleteRepoSecret: { + (params?: RestEndpointMethodTypes["actions"]["deleteRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + deleteSelfHostedRunnerFromOrg: { + (params?: RestEndpointMethodTypes["actions"]["deleteSelfHostedRunnerFromOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `repo` + * scope to use this endpoint. + */ + deleteSelfHostedRunnerFromRepo: { + (params?: RestEndpointMethodTypes["actions"]["deleteSelfHostedRunnerFromRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is + * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use + * this endpoint. + */ + deleteWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["deleteWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + deleteWorkflowRunLogs: { + (params?: RestEndpointMethodTypes["actions"]["deleteWorkflowRunLogs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + disableSelectedRepositoryGithubActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["disableSelectedRepositoryGithubActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + disableWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["disableWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in + * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + downloadArtifact: { + (params?: RestEndpointMethodTypes["actions"]["downloadArtifact"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look + * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can + * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must + * have the `actions:read` permission to use this endpoint. + */ + downloadJobLogsForWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["downloadJobLogsForWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download an archive of log files for a specific workflow run attempt. This link expires after + * 1 minute. Look for `Location:` in the response header to find the URL for the download. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + downloadWorkflowRunAttemptLogs: { + (params?: RestEndpointMethodTypes["actions"]["downloadWorkflowRunAttemptLogs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for + * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use + * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have + * the `actions:read` permission to use this endpoint. + */ + downloadWorkflowRunLogs: { + (params?: RestEndpointMethodTypes["actions"]["downloadWorkflowRunLogs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + enableSelectedRepositoryGithubActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["enableSelectedRepositoryGithubActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + enableWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["enableWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the GitHub Actions caches for a repository. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getActionsCacheList: { + (params?: RestEndpointMethodTypes["actions"]["getActionsCacheList"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets GitHub Actions cache usage for a repository. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getActionsCacheUsage: { + (params?: RestEndpointMethodTypes["actions"]["getActionsCacheUsage"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories and their GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + getActionsCacheUsageByRepoForOrg: { + (params?: RestEndpointMethodTypes["actions"]["getActionsCacheUsageByRepoForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the total GitHub Actions cache usage for an enterprise. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + getActionsCacheUsageForEnterprise: { + (params?: RestEndpointMethodTypes["actions"]["getActionsCacheUsageForEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the total GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + getActionsCacheUsageForOrg: { + (params?: RestEndpointMethodTypes["actions"]["getActionsCacheUsageForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the selected actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + getAllowedActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["getAllowedActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the settings for selected actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + getAllowedActionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["getAllowedActionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getArtifact: { + (params?: RestEndpointMethodTypes["actions"]["getArtifact"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + getEnvironmentPublicKey: { + (params?: RestEndpointMethodTypes["actions"]["getEnvironmentPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + getEnvironmentSecret: { + (params?: RestEndpointMethodTypes["actions"]["getEnvironmentSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + getGithubActionsDefaultWorkflowPermissionsEnterprise: { + (params?: RestEndpointMethodTypes["actions"]["getGithubActionsDefaultWorkflowPermissionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + getGithubActionsDefaultWorkflowPermissionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["getGithubActionsDefaultWorkflowPermissionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, + * as well as if GitHub Actions can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + getGithubActionsDefaultWorkflowPermissionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["getGithubActionsDefaultWorkflowPermissionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + getGithubActionsPermissionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["getGithubActionsPermissionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + getGithubActionsPermissionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["getGithubActionsPermissionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getJobForWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["getJobForWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + getOrgPublicKey: { + (params?: RestEndpointMethodTypes["actions"]["getOrgPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + getOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["getOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get all deployment environments for a workflow run that are waiting for protection rules to pass. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getPendingDeploymentsForRun: { + (params?: RestEndpointMethodTypes["actions"]["getPendingDeploymentsForRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + * @deprecated octokit.rest.actions.getRepoPermissions() has been renamed to octokit.rest.actions.getGithubActionsPermissionsRepository() (2020-11-10) + */ + getRepoPermissions: { + (params?: RestEndpointMethodTypes["actions"]["getRepoPermissions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + getRepoPublicKey: { + (params?: RestEndpointMethodTypes["actions"]["getRepoPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + getRepoSecret: { + (params?: RestEndpointMethodTypes["actions"]["getRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getReviewsForRun: { + (params?: RestEndpointMethodTypes["actions"]["getReviewsForRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + getSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["getSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + getSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["getSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + getWorkflowAccessToRepository: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowAccessToRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific workflow run attempt. Anyone with read access to the repository + * can use this endpoint. If the repository is private you must use an access token + * with the `repo` scope. GitHub Apps must have the `actions:read` permission to + * use this endpoint. + */ + getWorkflowRunAttempt: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowRunAttempt"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getWorkflowRunUsage: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowRunUsage"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getWorkflowUsage: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowUsage"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + listArtifactsForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listArtifactsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + listEnvironmentSecrets: { + (params?: RestEndpointMethodTypes["actions"]["listEnvironmentSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + */ + listJobsForWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["listJobsForWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists jobs for a specific workflow run attempt. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + */ + listJobsForWorkflowRunAttempt: { + (params?: RestEndpointMethodTypes["actions"]["listJobsForWorkflowRunAttempt"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all labels for a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listLabelsForSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["listLabelsForSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all labels for a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + listLabelsForSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listLabelsForSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + listOrgSecrets: { + (params?: RestEndpointMethodTypes["actions"]["listOrgSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + listRepoSecrets: { + (params?: RestEndpointMethodTypes["actions"]["listRepoSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + listRepoWorkflows: { + (params?: RestEndpointMethodTypes["actions"]["listRepoWorkflows"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listRunnerApplicationsForOrg: { + (params?: RestEndpointMethodTypes["actions"]["listRunnerApplicationsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + listRunnerApplicationsForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listRunnerApplicationsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + listSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["listSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + listSelectedRepositoriesEnabledGithubActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["listSelectedRepositoriesEnabledGithubActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all self-hosted runners configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listSelfHostedRunnersForOrg: { + (params?: RestEndpointMethodTypes["actions"]["listSelfHostedRunnersForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + listSelfHostedRunnersForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listSelfHostedRunnersForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + listWorkflowRunArtifacts: { + (params?: RestEndpointMethodTypes["actions"]["listWorkflowRunArtifacts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + */ + listWorkflowRuns: { + (params?: RestEndpointMethodTypes["actions"]["listWorkflowRuns"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + listWorkflowRunsForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listWorkflowRunsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Re-run a job and its dependent jobs in a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + reRunJobForWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["reRunJobForWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + reRunWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["reRunWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Re-run all of the failed jobs and their dependent jobs in a workflow run using the `id` of the workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + reRunWorkflowFailedJobs: { + (params?: RestEndpointMethodTypes["actions"]["reRunWorkflowFailedJobs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all custom labels from a self-hosted runner configured in an + * organization. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + removeAllCustomLabelsFromSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["removeAllCustomLabelsFromSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all custom labels from a self-hosted runner configured in a + * repository. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + removeAllCustomLabelsFromSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["removeAllCustomLabelsFromSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in an organization. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + removeCustomLabelFromSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["removeCustomLabelFromSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in a repository. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + removeCustomLabelFromSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["removeCustomLabelFromSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + removeSelectedRepoFromOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["removeSelectedRepoFromOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Approve or reject pending deployments that are waiting on approval by a required reviewer. + * + * Required reviewers with read access to the repository contents and deployments can use this endpoint. Required reviewers must authenticate using an access token with the `repo` scope to use this endpoint. + */ + reviewPendingDeploymentsForRun: { + (params?: RestEndpointMethodTypes["actions"]["reviewPendingDeploymentsForRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * If the organization belongs to an enterprise that has `selected` actions and reusable workflows set at the enterprise level, then you cannot override any of the enterprise's allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the organization must belong to an enterprise. If the organization does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories in the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + setAllowedActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["setAllowedActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * If the repository belongs to an organization or enterprise that has `selected` actions and reusable workflows set at the organization or enterprise levels, then you cannot override any of the allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the repository must belong to an enterprise. If the repository does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + setAllowedActionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["setAllowedActionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + setCustomLabelsForSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["setCustomLabelsForSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + setCustomLabelsForSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["setCustomLabelsForSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, and sets + * whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + setGithubActionsDefaultWorkflowPermissionsEnterprise: { + (params?: RestEndpointMethodTypes["actions"]["setGithubActionsDefaultWorkflowPermissionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, and sets if GitHub Actions + * can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + setGithubActionsDefaultWorkflowPermissionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["setGithubActionsDefaultWorkflowPermissionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, and sets if GitHub Actions + * can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + setGithubActionsDefaultWorkflowPermissionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["setGithubActionsDefaultWorkflowPermissionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * If the organization belongs to an enterprise that has set restrictive permissions at the enterprise level, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + setGithubActionsPermissionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["setGithubActionsPermissionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions and reusable workflows in the repository. + * + * If the repository belongs to an organization or enterprise that has set restrictive permissions at the organization or enterprise levels, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + setGithubActionsPermissionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["setGithubActionsPermissionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + setSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["setSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + setSelectedRepositoriesEnabledGithubActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["setSelectedRepositoriesEnabledGithubActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + setWorkflowAccessToRepository: { + (params?: RestEndpointMethodTypes["actions"]["setWorkflowAccessToRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + activity: { + checkRepoIsStarredByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["checkRepoIsStarredByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). + */ + deleteRepoSubscription: { + (params?: RestEndpointMethodTypes["activity"]["deleteRepoSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. + */ + deleteThreadSubscription: { + (params?: RestEndpointMethodTypes["activity"]["deleteThreadSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: + * + * * **Timeline**: The GitHub global public timeline + * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) + * * **Current user public**: The public timeline for the authenticated user + * * **Current user**: The private timeline for the authenticated user + * * **Current user actor**: The private timeline for activity created by the authenticated user + * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. + * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. + * + * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. + */ + getFeeds: { + (params?: RestEndpointMethodTypes["activity"]["getFeeds"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getRepoSubscription: { + (params?: RestEndpointMethodTypes["activity"]["getRepoSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getThread: { + (params?: RestEndpointMethodTypes["activity"]["getThread"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). + * + * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. + */ + getThreadSubscriptionForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["getThreadSubscriptionForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. + */ + listEventsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listEventsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all notifications for the current user, sorted by most recently updated. + */ + listNotificationsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listNotificationsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This is the user's organization dashboard. You must be authenticated as the user to view this. + */ + listOrgEventsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listOrgEventsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. + */ + listPublicEvents: { + (params?: RestEndpointMethodTypes["activity"]["listPublicEvents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listPublicEventsForRepoNetwork: { + (params?: RestEndpointMethodTypes["activity"]["listPublicEventsForRepoNetwork"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listPublicEventsForUser: { + (params?: RestEndpointMethodTypes["activity"]["listPublicEventsForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listPublicOrgEvents: { + (params?: RestEndpointMethodTypes["activity"]["listPublicOrgEvents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. + */ + listReceivedEventsForUser: { + (params?: RestEndpointMethodTypes["activity"]["listReceivedEventsForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listReceivedPublicEventsForUser: { + (params?: RestEndpointMethodTypes["activity"]["listReceivedPublicEventsForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listRepoEvents: { + (params?: RestEndpointMethodTypes["activity"]["listRepoEvents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all notifications for the current user. + */ + listRepoNotificationsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listRepoNotificationsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories the authenticated user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + listReposStarredByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listReposStarredByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories a user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + listReposStarredByUser: { + (params?: RestEndpointMethodTypes["activity"]["listReposStarredByUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories a user is watching. + */ + listReposWatchedByUser: { + (params?: RestEndpointMethodTypes["activity"]["listReposWatchedByUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people that have starred the repository. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + listStargazersForRepo: { + (params?: RestEndpointMethodTypes["activity"]["listStargazersForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories the authenticated user is watching. + */ + listWatchedReposForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listWatchedReposForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people watching the specified repository. + */ + listWatchersForRepo: { + (params?: RestEndpointMethodTypes["activity"]["listWatchersForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Marks all notifications as "read" removes it from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. + */ + markNotificationsAsRead: { + (params?: RestEndpointMethodTypes["activity"]["markNotificationsAsRead"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Marks all notifications in a repository as "read" removes them from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. + */ + markRepoNotificationsAsRead: { + (params?: RestEndpointMethodTypes["activity"]["markRepoNotificationsAsRead"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + markThreadAsRead: { + (params?: RestEndpointMethodTypes["activity"]["markThreadAsRead"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. + */ + setRepoSubscription: { + (params?: RestEndpointMethodTypes["activity"]["setRepoSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. + * + * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. + * + * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. + */ + setThreadSubscription: { + (params?: RestEndpointMethodTypes["activity"]["setThreadSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + starRepoForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["starRepoForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + unstarRepoForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["unstarRepoForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + apps: { + /** + * Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + * @deprecated octokit.rest.apps.addRepoToInstallation() has been renamed to octokit.rest.apps.addRepoToInstallationForAuthenticatedUser() (2021-10-05) + */ + addRepoToInstallation: { + (params?: RestEndpointMethodTypes["apps"]["addRepoToInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + addRepoToInstallationForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["addRepoToInstallationForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the OAuth application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. + */ + checkToken: { + (params?: RestEndpointMethodTypes["apps"]["checkToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. + */ + createFromManifest: { + (params?: RestEndpointMethodTypes["apps"]["createFromManifest"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + createInstallationAccessToken: { + (params?: RestEndpointMethodTypes["apps"]["createInstallationAccessToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. + * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + deleteAuthorization: { + (params?: RestEndpointMethodTypes["apps"]["deleteAuthorization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + deleteInstallation: { + (params?: RestEndpointMethodTypes["apps"]["deleteInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. + */ + deleteToken: { + (params?: RestEndpointMethodTypes["apps"]["deleteToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getAuthenticated: { + (params?: RestEndpointMethodTypes["apps"]["getAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). + * + * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + getBySlug: { + (params?: RestEndpointMethodTypes["apps"]["getBySlug"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables an authenticated GitHub App to find an installation's information using the installation id. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getInstallation: { + (params?: RestEndpointMethodTypes["apps"]["getInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables an authenticated GitHub App to find the organization's installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getOrgInstallation: { + (params?: RestEndpointMethodTypes["apps"]["getOrgInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getRepoInstallation: { + (params?: RestEndpointMethodTypes["apps"]["getRepoInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + getSubscriptionPlanForAccount: { + (params?: RestEndpointMethodTypes["apps"]["getSubscriptionPlanForAccount"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + getSubscriptionPlanForAccountStubbed: { + (params?: RestEndpointMethodTypes["apps"]["getSubscriptionPlanForAccountStubbed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables an authenticated GitHub App to find the user’s installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getUserInstallation: { + (params?: RestEndpointMethodTypes["apps"]["getUserInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getWebhookConfigForApp: { + (params?: RestEndpointMethodTypes["apps"]["getWebhookConfigForApp"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getWebhookDelivery: { + (params?: RestEndpointMethodTypes["apps"]["getWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + listAccountsForPlan: { + (params?: RestEndpointMethodTypes["apps"]["listAccountsForPlan"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + listAccountsForPlanStubbed: { + (params?: RestEndpointMethodTypes["apps"]["listAccountsForPlanStubbed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The access the user has to each repository is included in the hash under the `permissions` key. + */ + listInstallationReposForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["listInstallationReposForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + * + * The permissions the installation has are included under the `permissions` key. + */ + listInstallations: { + (params?: RestEndpointMethodTypes["apps"]["listInstallations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You can find the permissions for the installation under the `permissions` key. + */ + listInstallationsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["listInstallationsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + listPlans: { + (params?: RestEndpointMethodTypes["apps"]["listPlans"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + listPlansStubbed: { + (params?: RestEndpointMethodTypes["apps"]["listPlansStubbed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List repositories that an app installation can access. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + listReposAccessibleToInstallation: { + (params?: RestEndpointMethodTypes["apps"]["listReposAccessibleToInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). + */ + listSubscriptionsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["listSubscriptionsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). + */ + listSubscriptionsForAuthenticatedUserStubbed: { + (params?: RestEndpointMethodTypes["apps"]["listSubscriptionsForAuthenticatedUserStubbed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a list of webhook deliveries for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + listWebhookDeliveries: { + (params?: RestEndpointMethodTypes["apps"]["listWebhookDeliveries"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Redeliver a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + redeliverWebhookDelivery: { + (params?: RestEndpointMethodTypes["apps"]["redeliverWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove a single repository from an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + * @deprecated octokit.rest.apps.removeRepoFromInstallation() has been renamed to octokit.rest.apps.removeRepoFromInstallationForAuthenticatedUser() (2021-10-05) + */ + removeRepoFromInstallation: { + (params?: RestEndpointMethodTypes["apps"]["removeRepoFromInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove a single repository from an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + removeRepoFromInstallationForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["removeRepoFromInstallationForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. + */ + resetToken: { + (params?: RestEndpointMethodTypes["apps"]["resetToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Revokes the installation token you're using to authenticate as an installation and access this endpoint. + * + * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + revokeInstallationAccessToken: { + (params?: RestEndpointMethodTypes["apps"]["revokeInstallationAccessToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Use a non-scoped user-to-server OAuth access token to create a repository scoped and/or permission scoped user-to-server OAuth access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. + */ + scopeToken: { + (params?: RestEndpointMethodTypes["apps"]["scopeToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + suspendInstallation: { + (params?: RestEndpointMethodTypes["apps"]["suspendInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a GitHub App installation suspension. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + unsuspendInstallation: { + (params?: RestEndpointMethodTypes["apps"]["unsuspendInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + updateWebhookConfigForApp: { + (params?: RestEndpointMethodTypes["apps"]["updateWebhookConfigForApp"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + billing: { + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + getGithubActionsBillingOrg: { + (params?: RestEndpointMethodTypes["billing"]["getGithubActionsBillingOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `user` scope. + */ + getGithubActionsBillingUser: { + (params?: RestEndpointMethodTypes["billing"]["getGithubActionsBillingUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the GitHub Advanced Security active committers for an enterprise per repository. + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the total_advanced_security_committers is not the sum of active_users for each repository. + */ + getGithubAdvancedSecurityBillingGhe: { + (params?: RestEndpointMethodTypes["billing"]["getGithubAdvancedSecurityBillingGhe"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the GitHub Advanced Security active committers for an organization per repository. + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the total_advanced_security_committers is not the sum of advanced_security_committers for each repository. + * If this organization defers to an enterprise for billing, the total_advanced_security_committers returned from the organization API may include some users that are in more than one organization, so they will only consume a single Advanced Security seat at the enterprise level. + */ + getGithubAdvancedSecurityBillingOrg: { + (params?: RestEndpointMethodTypes["billing"]["getGithubAdvancedSecurityBillingOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + getGithubPackagesBillingOrg: { + (params?: RestEndpointMethodTypes["billing"]["getGithubPackagesBillingOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + getGithubPackagesBillingUser: { + (params?: RestEndpointMethodTypes["billing"]["getGithubPackagesBillingUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + getSharedStorageBillingOrg: { + (params?: RestEndpointMethodTypes["billing"]["getSharedStorageBillingOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + getSharedStorageBillingUser: { + (params?: RestEndpointMethodTypes["billing"]["getSharedStorageBillingUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + checks: { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. + * + * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. + */ + create: { + (params?: RestEndpointMethodTypes["checks"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. + */ + createSuite: { + (params?: RestEndpointMethodTypes["checks"]["createSuite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: { + (params?: RestEndpointMethodTypes["checks"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + getSuite: { + (params?: RestEndpointMethodTypes["checks"]["getSuite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. + */ + listAnnotations: { + (params?: RestEndpointMethodTypes["checks"]["listAnnotations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + listForRef: { + (params?: RestEndpointMethodTypes["checks"]["listForRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + listForSuite: { + (params?: RestEndpointMethodTypes["checks"]["listForSuite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + listSuitesForRef: { + (params?: RestEndpointMethodTypes["checks"]["listSuitesForRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Triggers GitHub to rerequest an existing check run, without pushing new code to a repository. This endpoint will trigger the [`check_run` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) event with the action `rerequested`. When a check run is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check run, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + rerequestRun: { + (params?: RestEndpointMethodTypes["checks"]["rerequestRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + rerequestSuite: { + (params?: RestEndpointMethodTypes["checks"]["rerequestSuite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. + */ + setSuitesPreferences: { + (params?: RestEndpointMethodTypes["checks"]["setSuitesPreferences"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. + */ + update: { + (params?: RestEndpointMethodTypes["checks"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + codeScanning: { + /** + * Deletes a specified code scanning analysis from a repository. For + * private repositories, you must use an access token with the `repo` scope. For public repositories, + * you must use an access token with `public_repo` scope. + * GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * You can delete one analysis at a time. + * To delete a series of analyses, start with the most recent analysis and work backwards. + * Conceptually, the process is similar to the undo function in a text editor. + * + * When you list the analyses for a repository, + * one or more will be identified as deletable in the response: + * + * ``` + * "deletable": true + * ``` + * + * An analysis is deletable when it's the most recent in a set of analyses. + * Typically, a repository will have multiple sets of analyses + * for each enabled code scanning tool, + * where a set is determined by a unique combination of analysis values: + * + * * `ref` + * * `tool` + * * `analysis_key` + * * `environment` + * + * If you attempt to delete an analysis that is not the most recent in a set, + * you'll get a 400 response with the message: + * + * ``` + * Analysis specified is not deletable. + * ``` + * + * The response from a successful `DELETE` operation provides you with + * two alternative URLs for deleting the next analysis in the set: + * `next_analysis_url` and `confirm_delete_url`. + * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis + * in a set. This is a useful option if you want to preserve at least one analysis + * for the specified tool in your repository. + * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. + * When you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url` + * in the 200 response is `null`. + * + * As an example of the deletion process, + * let's imagine that you added a workflow that configured a particular code scanning tool + * to analyze the code in a repository. This tool has added 15 analyses: + * 10 on the default branch, and another 5 on a topic branch. + * You therefore have two separate sets of analyses for this tool. + * You've now decided that you want to remove all of the analyses for the tool. + * To do this you must make 15 separate deletion requests. + * To start, you must find an analysis that's identified as deletable. + * Each set of analyses always has one that's identified as deletable. + * Having found the deletable analysis for one of the two sets, + * delete this analysis and then continue deleting the next analysis in the set until they're all deleted. + * Then repeat the process for the second set. + * The procedure therefore consists of a nested loop: + * + * **Outer loop**: + * * List the analyses for the repository, filtered by tool. + * * Parse this list to find a deletable analysis. If found: + * + * **Inner loop**: + * * Delete the identified analysis. + * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. + * + * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. + */ + deleteAnalysis: { + (params?: RestEndpointMethodTypes["codeScanning"]["deleteAnalysis"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The instances field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The same information can now be retrieved via a GET request to the URL specified by `instances_url`. + */ + getAlert: { + (params?: RestEndpointMethodTypes["codeScanning"]["getAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specified code scanning analysis for a repository. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * The default JSON response contains fields that describe the analysis. + * This includes the Git reference and commit SHA to which the analysis relates, + * the datetime of the analysis, the name of the code scanning tool, + * and the number of alerts. + * + * The `rules_count` field in the default response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * If you use the Accept header `application/sarif+json`, + * the response contains the analysis data that was uploaded. + * This is formatted as + * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). + */ + getAnalysis: { + (params?: RestEndpointMethodTypes["codeScanning"]["getAnalysis"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + getSarif: { + (params?: RestEndpointMethodTypes["codeScanning"]["getSarif"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + listAlertInstances: { + (params?: RestEndpointMethodTypes["codeScanning"]["listAlertInstances"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all code scanning alerts for the default branch (usually `main` + * or `master`) for all eligible repositories in an organization. + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + listAlertsForOrg: { + (params?: RestEndpointMethodTypes["codeScanning"]["listAlertsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all open code scanning alerts for the default branch (usually `main` + * or `master`). You must use an access token with the `security_events` scope to use + * this endpoint with private repos, the `public_repo` scope also grants permission to read + * security events on public repos only. GitHub Apps must have the `security_events` read + * permission to use this endpoint. + * + * The response includes a `most_recent_instance` object. + * This provides details of the most recent instance of this alert + * for the default branch or for the specified Git reference + * (if you used `ref` in the request). + */ + listAlertsForRepo: { + (params?: RestEndpointMethodTypes["codeScanning"]["listAlertsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * @deprecated octokit.rest.codeScanning.listAlertsInstances() has been renamed to octokit.rest.codeScanning.listAlertInstances() (2021-04-30) + */ + listAlertsInstances: { + (params?: RestEndpointMethodTypes["codeScanning"]["listAlertsInstances"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the details of all code scanning analyses for a repository, + * starting with the most recent. + * The response is paginated and you can use the `page` and `per_page` parameters + * to list the analyses you're interested in. + * By default 30 analyses are listed per page. + * + * The `rules_count` field in the response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. + */ + listRecentAnalyses: { + (params?: RestEndpointMethodTypes["codeScanning"]["listRecentAnalyses"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + */ + updateAlert: { + (params?: RestEndpointMethodTypes["codeScanning"]["updateAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint for private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * There are two places where you can upload code scanning results. + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * + * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: + * + * ``` + * gzip -c analysis-data.sarif | base64 -w0 + * ``` + * + * SARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries. + * + * The `202 Accepted`, response includes an `id` value. + * You can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint. + * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." + */ + uploadSarif: { + (params?: RestEndpointMethodTypes["codeScanning"]["uploadSarif"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + codesOfConduct: { + getAllCodesOfConduct: { + (params?: RestEndpointMethodTypes["codesOfConduct"]["getAllCodesOfConduct"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getConductCode: { + (params?: RestEndpointMethodTypes["codesOfConduct"]["getConductCode"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + codespaces: { + /** + * Adds a repository to the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on the referenced repository to use this endpoint. + */ + addRepositoryForSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["addRepositoryForSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the machine types a codespace can transition to use. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + codespaceMachinesForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["codespaceMachinesForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new codespace, owned by the authenticated user. + * + * This endpoint requires either a `repository_id` OR a `pull_request` but not both. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + createForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["createForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository + * permission to use this endpoint. + * + * #### Example of encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example of encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example of encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example of encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateRepoSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["createOrUpdateRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must also have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["createOrUpdateSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a codespace owned by the authenticated user for the specified pull request. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + createWithPrForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["createWithPrForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a codespace owned by the authenticated user in the specified repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + createWithRepoForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["createWithRepoForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + deleteForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + deleteFromOrganization: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteFromOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. + */ + deleteRepoSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + deleteSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Triggers an export of the specified codespace and returns a URL and ID where the status of the export can be monitored. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + exportForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["exportForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about an export of a codespace. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + getExportDetailsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["getExportDetailsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + getForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["getForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + getPublicKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["getPublicKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. + */ + getRepoPublicKey: { + (params?: RestEndpointMethodTypes["codespaces"]["getRepoPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. + */ + getRepoSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["getRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a secret available to a user's codespaces without revealing its encrypted value. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + getSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["getSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the devcontainer.json files associated with a specified repository and the authenticated user. These files + * specify launchpoint configurations for codespaces created within the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + listDevcontainersInRepositoryForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listDevcontainersInRepositoryForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the authenticated user's codespaces. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the codespaces associated to a specified organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listInOrganization: { + (params?: RestEndpointMethodTypes["codespaces"]["listInOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the codespaces associated to a specified repository and the authenticated user. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + listInRepositoryForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listInRepositoryForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. + */ + listRepoSecrets: { + (params?: RestEndpointMethodTypes["codespaces"]["listRepoSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the repositories that have been granted the ability to use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + listRepositoriesForSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listRepositoriesForSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available for a user's Codespaces without revealing their + * encrypted values. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + listSecretsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listSecretsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + removeRepositoryForSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["removeRepositoryForSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the machine types available for a given repository based on its configuration. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_metadata` repository permission to use this endpoint. + */ + repoMachinesForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["repoMachinesForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Select the repositories that will use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + setRepositoriesForSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["setRepositoriesForSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Starts a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + startForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["startForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + stopForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["stopForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + stopInOrganization: { + (params?: RestEndpointMethodTypes["codespaces"]["stopInOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a codespace owned by the authenticated user. Currently only the codespace's machine type and recent folders can be modified using this endpoint. + * + * If you specify a new machine type it will be applied the next time your codespace is started. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + updateForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["updateForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + dependabot: { + /** + * Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + addSelectedRepoToOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["addSelectedRepoToOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["createOrUpdateOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateRepoSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["createOrUpdateRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + deleteOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["deleteOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + deleteRepoSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["deleteRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + getOrgPublicKey: { + (params?: RestEndpointMethodTypes["dependabot"]["getOrgPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + getOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["getOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + getRepoPublicKey: { + (params?: RestEndpointMethodTypes["dependabot"]["getRepoPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + getRepoSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["getRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + listOrgSecrets: { + (params?: RestEndpointMethodTypes["dependabot"]["listOrgSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + listRepoSecrets: { + (params?: RestEndpointMethodTypes["dependabot"]["listRepoSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + listSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["listSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + removeSelectedRepoFromOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["removeSelectedRepoFromOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + setSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["setSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + dependencyGraph: { + /** + * Create a new snapshot of a repository's dependencies. You must authenticate using an access token with the `repo` scope to use this endpoint for a repository that the requesting user has access to. + */ + createRepositorySnapshot: { + (params?: RestEndpointMethodTypes["dependencyGraph"]["createRepositorySnapshot"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the diff of the dependency changes between two commits of a repository, based on the changes to the dependency manifests made in those commits. + */ + diffRange: { + (params?: RestEndpointMethodTypes["dependencyGraph"]["diffRange"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + emojis: { + /** + * Lists all the emojis available to use on GitHub. + */ + get: { + (params?: RestEndpointMethodTypes["emojis"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + enterpriseAdmin: { + /** + * Add custom labels to a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + addCustomLabelsToSelfHostedRunnerForEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["addCustomLabelsToSelfHostedRunnerForEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + disableSelectedOrganizationGithubActionsEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["disableSelectedOrganizationGithubActionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + enableSelectedOrganizationGithubActionsEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["enableSelectedOrganizationGithubActionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the selected actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + getAllowedActionsEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["getAllowedActionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + getGithubActionsPermissionsEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["getGithubActionsPermissionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns aggregate usage metrics for your GitHub Enterprise Server 3.5+ instance for a specified time period up to 365 days. + * + * To use this endpoint, your GitHub Enterprise Server instance must be connected to GitHub Enterprise Cloud using GitHub Connect. You must enable Server Statistics, and for the API request provide your enterprise account name or organization name connected to the GitHub Enterprise Server. For more information, see "[Enabling Server Statistics for your enterprise](/admin/configuration/configuring-github-connect/enabling-server-statistics-for-your-enterprise)" in the GitHub Enterprise Server documentation. + * + * You'll need to use a personal access token: + * - If you connected your GitHub Enterprise Server to an enterprise account and enabled Server Statistics, you'll need a personal access token with the `read:enterprise` permission. + * - If you connected your GitHub Enterprise Server to an organization account and enabled Server Statistics, you'll need a personal access token with the `read:org` permission. + * + * For more information on creating a personal access token, see "[Creating a personal access token](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token)." + */ + getServerStatistics: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["getServerStatistics"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all labels for a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + listLabelsForSelfHostedRunnerForEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["listLabelsForSelfHostedRunnerForEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the organizations that are selected to have GitHub Actions enabled in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + listSelectedOrganizationsEnabledGithubActionsEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["listSelectedOrganizationsEnabledGithubActionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all custom labels from a self-hosted runner configured in an + * enterprise. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["removeAllCustomLabelsFromSelfHostedRunnerForEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in an enterprise. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + removeCustomLabelFromSelfHostedRunnerForEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["removeCustomLabelFromSelfHostedRunnerForEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + setAllowedActionsEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["setAllowedActionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + setCustomLabelsForSelfHostedRunnerForEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["setCustomLabelsForSelfHostedRunnerForEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + setGithubActionsPermissionsEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["setGithubActionsPermissionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + setSelectedOrganizationsEnabledGithubActionsEnterprise: { + (params?: RestEndpointMethodTypes["enterpriseAdmin"]["setSelectedOrganizationsEnabledGithubActionsEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + gists: { + checkIsStarred: { + (params?: RestEndpointMethodTypes["gists"]["checkIsStarred"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Allows you to add a new gist with one or more files. + * + * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. + */ + create: { + (params?: RestEndpointMethodTypes["gists"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + createComment: { + (params?: RestEndpointMethodTypes["gists"]["createComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + delete: { + (params?: RestEndpointMethodTypes["gists"]["delete"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteComment: { + (params?: RestEndpointMethodTypes["gists"]["deleteComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note**: This was previously `/gists/:gist_id/fork`. + */ + fork: { + (params?: RestEndpointMethodTypes["gists"]["fork"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + get: { + (params?: RestEndpointMethodTypes["gists"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getComment: { + (params?: RestEndpointMethodTypes["gists"]["getComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getRevision: { + (params?: RestEndpointMethodTypes["gists"]["getRevision"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: + */ + list: { + (params?: RestEndpointMethodTypes["gists"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listComments: { + (params?: RestEndpointMethodTypes["gists"]["listComments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listCommits: { + (params?: RestEndpointMethodTypes["gists"]["listCommits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists public gists for the specified user: + */ + listForUser: { + (params?: RestEndpointMethodTypes["gists"]["listForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listForks: { + (params?: RestEndpointMethodTypes["gists"]["listForks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List public gists sorted by most recently updated to least recently updated. + * + * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. + */ + listPublic: { + (params?: RestEndpointMethodTypes["gists"]["listPublic"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the authenticated user's starred gists: + */ + listStarred: { + (params?: RestEndpointMethodTypes["gists"]["listStarred"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + star: { + (params?: RestEndpointMethodTypes["gists"]["star"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + unstar: { + (params?: RestEndpointMethodTypes["gists"]["unstar"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Allows you to update or delete a gist file and rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. + */ + update: { + (params?: RestEndpointMethodTypes["gists"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateComment: { + (params?: RestEndpointMethodTypes["gists"]["updateComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + git: { + createBlob: { + (params?: RestEndpointMethodTypes["git"]["createBlob"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + createCommit: { + (params?: RestEndpointMethodTypes["git"]["createCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. + */ + createRef: { + (params?: RestEndpointMethodTypes["git"]["createRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + createTag: { + (params?: RestEndpointMethodTypes["git"]["createTag"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. + * + * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." + */ + createTree: { + (params?: RestEndpointMethodTypes["git"]["createTree"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteRef: { + (params?: RestEndpointMethodTypes["git"]["deleteRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The `content` in the response will always be Base64 encoded. + * + * _Note_: This API supports blobs up to 100 megabytes in size. + */ + getBlob: { + (params?: RestEndpointMethodTypes["git"]["getBlob"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + getCommit: { + (params?: RestEndpointMethodTypes["git"]["getCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + */ + getRef: { + (params?: RestEndpointMethodTypes["git"]["getRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + getTag: { + (params?: RestEndpointMethodTypes["git"]["getTag"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a single tree using the SHA1 value for that tree. + * + * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. + */ + getTree: { + (params?: RestEndpointMethodTypes["git"]["getTree"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. + * + * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. + */ + listMatchingRefs: { + (params?: RestEndpointMethodTypes["git"]["listMatchingRefs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateRef: { + (params?: RestEndpointMethodTypes["git"]["updateRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + gitignore: { + /** + * List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). + */ + getAllTemplates: { + (params?: RestEndpointMethodTypes["gitignore"]["getAllTemplates"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The API also allows fetching the source of a single template. + * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. + */ + getTemplate: { + (params?: RestEndpointMethodTypes["gitignore"]["getTemplate"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + interactions: { + /** + * Shows which type of GitHub user can interact with your public repositories and when the restriction expires. + */ + getRestrictionsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. + */ + getRestrictionsForOrg: { + (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. + */ + getRestrictionsForRepo: { + (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows which type of GitHub user can interact with your public repositories and when the restriction expires. + * @deprecated octokit.rest.interactions.getRestrictionsForYourPublicRepos() has been renamed to octokit.rest.interactions.getRestrictionsForAuthenticatedUser() (2021-02-02) + */ + getRestrictionsForYourPublicRepos: { + (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForYourPublicRepos"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes any interaction restrictions from your public repositories. + */ + removeRestrictionsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. + */ + removeRestrictionsForOrg: { + (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. + */ + removeRestrictionsForRepo: { + (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes any interaction restrictions from your public repositories. + * @deprecated octokit.rest.interactions.removeRestrictionsForYourPublicRepos() has been renamed to octokit.rest.interactions.removeRestrictionsForAuthenticatedUser() (2021-02-02) + */ + removeRestrictionsForYourPublicRepos: { + (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForYourPublicRepos"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. + */ + setRestrictionsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. + */ + setRestrictionsForOrg: { + (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. + */ + setRestrictionsForRepo: { + (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. + * @deprecated octokit.rest.interactions.setRestrictionsForYourPublicRepos() has been renamed to octokit.rest.interactions.setRestrictionsForAuthenticatedUser() (2021-02-02) + */ + setRestrictionsForYourPublicRepos: { + (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForYourPublicRepos"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + issues: { + /** + * Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. + */ + addAssignees: { + (params?: RestEndpointMethodTypes["issues"]["addAssignees"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + addLabels: { + (params?: RestEndpointMethodTypes["issues"]["addLabels"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Checks if a user has permission to be assigned to an issue in this repository. + * + * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + checkUserCanBeAssigned: { + (params?: RestEndpointMethodTypes["issues"]["checkUserCanBeAssigned"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://docs.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + create: { + (params?: RestEndpointMethodTypes["issues"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createComment: { + (params?: RestEndpointMethodTypes["issues"]["createComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + createLabel: { + (params?: RestEndpointMethodTypes["issues"]["createLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + createMilestone: { + (params?: RestEndpointMethodTypes["issues"]["createMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteComment: { + (params?: RestEndpointMethodTypes["issues"]["deleteComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteLabel: { + (params?: RestEndpointMethodTypes["issues"]["deleteLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteMilestone: { + (params?: RestEndpointMethodTypes["issues"]["deleteMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was + * [transferred](https://docs.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If + * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API + * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read + * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe + * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: { + (params?: RestEndpointMethodTypes["issues"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getComment: { + (params?: RestEndpointMethodTypes["issues"]["getComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getEvent: { + (params?: RestEndpointMethodTypes["issues"]["getEvent"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getLabel: { + (params?: RestEndpointMethodTypes["issues"]["getLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getMilestone: { + (params?: RestEndpointMethodTypes["issues"]["getMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List issues assigned to the authenticated user across all visible repositories including owned repositories, member + * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not + * necessarily assigned to you. + * + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + list: { + (params?: RestEndpointMethodTypes["issues"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the [available assignees](https://docs.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. + */ + listAssignees: { + (params?: RestEndpointMethodTypes["issues"]["listAssignees"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Issue Comments are ordered by ascending ID. + */ + listComments: { + (params?: RestEndpointMethodTypes["issues"]["listComments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * By default, Issue Comments are ordered by ascending ID. + */ + listCommentsForRepo: { + (params?: RestEndpointMethodTypes["issues"]["listCommentsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listEvents: { + (params?: RestEndpointMethodTypes["issues"]["listEvents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listEventsForRepo: { + (params?: RestEndpointMethodTypes["issues"]["listEventsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listEventsForTimeline: { + (params?: RestEndpointMethodTypes["issues"]["listEventsForTimeline"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List issues across owned and member repositories assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["issues"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List issues in an organization assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + listForOrg: { + (params?: RestEndpointMethodTypes["issues"]["listForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List issues in a repository. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + listForRepo: { + (params?: RestEndpointMethodTypes["issues"]["listForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listLabelsForMilestone: { + (params?: RestEndpointMethodTypes["issues"]["listLabelsForMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listLabelsForRepo: { + (params?: RestEndpointMethodTypes["issues"]["listLabelsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listLabelsOnIssue: { + (params?: RestEndpointMethodTypes["issues"]["listLabelsOnIssue"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listMilestones: { + (params?: RestEndpointMethodTypes["issues"]["listMilestones"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access can lock an issue or pull request's conversation. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + lock: { + (params?: RestEndpointMethodTypes["issues"]["lock"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + removeAllLabels: { + (params?: RestEndpointMethodTypes["issues"]["removeAllLabels"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes one or more assignees from an issue. + */ + removeAssignees: { + (params?: RestEndpointMethodTypes["issues"]["removeAssignees"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. + */ + removeLabel: { + (params?: RestEndpointMethodTypes["issues"]["removeLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes any previous labels and sets the new labels for an issue. + */ + setLabels: { + (params?: RestEndpointMethodTypes["issues"]["setLabels"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access can unlock an issue's conversation. + */ + unlock: { + (params?: RestEndpointMethodTypes["issues"]["unlock"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Issue owners and users with push access can edit an issue. + */ + update: { + (params?: RestEndpointMethodTypes["issues"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateComment: { + (params?: RestEndpointMethodTypes["issues"]["updateComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateLabel: { + (params?: RestEndpointMethodTypes["issues"]["updateLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateMilestone: { + (params?: RestEndpointMethodTypes["issues"]["updateMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + licenses: { + get: { + (params?: RestEndpointMethodTypes["licenses"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getAllCommonlyUsed: { + (params?: RestEndpointMethodTypes["licenses"]["getAllCommonlyUsed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This method returns the contents of the repository's license file, if one is detected. + * + * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. + */ + getForRepo: { + (params?: RestEndpointMethodTypes["licenses"]["getForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + markdown: { + render: { + (params?: RestEndpointMethodTypes["markdown"]["render"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. + */ + renderRaw: { + (params?: RestEndpointMethodTypes["markdown"]["renderRaw"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + meta: { + /** + * Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://docs.github.com/articles/about-github-s-ip-addresses/)." + * + * **Note:** The IP addresses shown in the documentation's response are only example values. You must always query the API directly to get the latest list of IP addresses. + */ + get: { + (params?: RestEndpointMethodTypes["meta"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the octocat as ASCII art + */ + getOctocat: { + (params?: RestEndpointMethodTypes["meta"]["getOctocat"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a random sentence from the Zen of GitHub + */ + getZen: { + (params?: RestEndpointMethodTypes["meta"]["getZen"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get Hypermedia links to resources accessible in GitHub's REST API + */ + root: { + (params?: RestEndpointMethodTypes["meta"]["root"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + migrations: { + /** + * Stop an import for a repository. + */ + cancelImport: { + (params?: RestEndpointMethodTypes["migrations"]["cancelImport"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/reference/migrations#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/reference/migrations#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. + */ + deleteArchiveForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["deleteArchiveForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a previous migration archive. Migration archives are automatically deleted after seven days. + */ + deleteArchiveForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["deleteArchiveForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Fetches the URL to a migration archive. + */ + downloadArchiveForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["downloadArchiveForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: + * + * * attachments + * * bases + * * commit\_comments + * * issue\_comments + * * issue\_events + * * issues + * * milestones + * * organizations + * * projects + * * protected\_branches + * * pull\_request\_reviews + * * pull\_requests + * * releases + * * repositories + * * review\_comments + * * schema + * * users + * + * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. + */ + getArchiveForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["getArchiveForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. + * + * This endpoint and the [Map a commit author](https://docs.github.com/rest/reference/migrations#map-a-commit-author) endpoint allow you to provide correct Git author information. + */ + getCommitAuthors: { + (params?: RestEndpointMethodTypes["migrations"]["getCommitAuthors"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View the progress of an import. + * + * **Import status** + * + * This section includes details about the possible values of the `status` field of the Import Progress response. + * + * An import that does not have errors will progress through these steps: + * + * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. + * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). + * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. + * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". + * * `complete` - the import is complete, and the repository is ready on GitHub. + * + * If there are problems, you will see one of these in the `status` field: + * + * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api) for more information. + * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/reference/migrations#cancel-an-import) and [retry](https://docs.github.com/rest/reference/migrations#start-an-import) with the correct URL. + * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * + * **The project_choices field** + * + * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. + * + * **Git LFS related fields** + * + * This section includes details about Git LFS related fields that may be present in the Import Progress response. + * + * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. + * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. + * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. + * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. + */ + getImportStatus: { + (params?: RestEndpointMethodTypes["migrations"]["getImportStatus"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List files larger than 100MB found during the import + */ + getLargeFiles: { + (params?: RestEndpointMethodTypes["migrations"]["getLargeFiles"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: + * + * * `pending` - the migration hasn't started yet. + * * `exporting` - the migration is in progress. + * * `exported` - the migration finished successfully. + * * `failed` - the migration failed. + * + * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive). + */ + getStatusForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["getStatusForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Fetches the status of a migration. + * + * The `state` of a migration can be one of the following values: + * + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + getStatusForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["getStatusForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all migrations a user has started. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the most recent migrations. + */ + listForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["listForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all the repositories for this user migration. + */ + listReposForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["listReposForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all the repositories for this organization migration. + */ + listReposForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["listReposForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all the repositories for this user migration. + * @deprecated octokit.rest.migrations.listReposForUser() has been renamed to octokit.rest.migrations.listReposForAuthenticatedUser() (2021-10-05) + */ + listReposForUser: { + (params?: RestEndpointMethodTypes["migrations"]["listReposForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Update an author's identity for the import. Your application can continue updating authors any time before you push new commits to the repository. + */ + mapCommitAuthor: { + (params?: RestEndpointMethodTypes["migrations"]["mapCommitAuthor"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability is powered by [Git LFS](https://git-lfs.github.com). You can learn more about our LFS feature and working with large files [on our help site](https://docs.github.com/articles/versioning-large-files/). + */ + setLfsPreference: { + (params?: RestEndpointMethodTypes["migrations"]["setLfsPreference"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Initiates the generation of a user migration archive. + */ + startForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["startForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Initiates the generation of a migration archive. + */ + startForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["startForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Start a source import to a GitHub repository using GitHub Importer. + */ + startImport: { + (params?: RestEndpointMethodTypes["migrations"]["startImport"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/reference/migrations#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/reference/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. + */ + unlockRepoForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["unlockRepoForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/reference/repos#delete-a-repository) when the migration is complete and you no longer need the source data. + */ + unlockRepoForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["unlockRepoForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API + * request. If no parameters are provided, the import will be restarted. + * + * Some servers (e.g. TFS servers) can have several projects at a single URL. In those cases the import progress will + * have the status `detection_found_multiple` and the Import Progress response will include a `project_choices` array. + * You can select the project to import by providing one of the objects in the `project_choices` array in the update request. + */ + updateImport: { + (params?: RestEndpointMethodTypes["migrations"]["updateImport"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + orgs: { + blockUser: { + (params?: RestEndpointMethodTypes["orgs"]["blockUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). + */ + cancelInvitation: { + (params?: RestEndpointMethodTypes["orgs"]["cancelInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + checkBlockedUser: { + (params?: RestEndpointMethodTypes["orgs"]["checkBlockedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Check if a user is, publicly or privately, a member of the organization. + */ + checkMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["checkMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + checkPublicMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["checkPublicMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://docs.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". Converting an organization member to an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + */ + convertMemberToOutsideCollaborator: { + (params?: RestEndpointMethodTypes["orgs"]["convertMemberToOutsideCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createInvitation: { + (params?: RestEndpointMethodTypes["orgs"]["createInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Here's how you can create a hook that posts payloads in JSON format: + */ + createWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["createWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["deleteWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://docs.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). + * + * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." + */ + get: { + (params?: RestEndpointMethodTypes["orgs"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getMembershipForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["getMembershipForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. + */ + getMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["getMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." + */ + getWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["getWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. + */ + getWebhookConfigForOrg: { + (params?: RestEndpointMethodTypes["orgs"]["getWebhookConfigForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a delivery for a webhook configured in an organization. + */ + getWebhookDelivery: { + (params?: RestEndpointMethodTypes["orgs"]["getWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all organizations, in the order that they were created on GitHub. + * + * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of organizations. + */ + list: { + (params?: RestEndpointMethodTypes["orgs"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. + */ + listAppInstallations: { + (params?: RestEndpointMethodTypes["orgs"]["listAppInstallations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the users blocked by an organization. + */ + listBlockedUsers: { + (params?: RestEndpointMethodTypes["orgs"]["listBlockedUsers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the custom repository roles available in this organization. In order to see custom + * repository roles in an organization, the authenticated user must be an organization owner. + * + * For more information on custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)". + */ + listCustomRoles: { + (params?: RestEndpointMethodTypes["orgs"]["listCustomRoles"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. + */ + listFailedInvitations: { + (params?: RestEndpointMethodTypes["orgs"]["listFailedInvitations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List organizations for the authenticated user. + * + * **OAuth scope requirements** + * + * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List [public organization memberships](https://docs.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. + * + * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. + */ + listForUser: { + (params?: RestEndpointMethodTypes["orgs"]["listForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. + */ + listInvitationTeams: { + (params?: RestEndpointMethodTypes["orgs"]["listInvitationTeams"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. + */ + listMembers: { + (params?: RestEndpointMethodTypes["orgs"]["listMembers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listMembershipsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["listMembershipsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all users who are outside collaborators of an organization. + */ + listOutsideCollaborators: { + (params?: RestEndpointMethodTypes["orgs"]["listOutsideCollaborators"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + listPendingInvitations: { + (params?: RestEndpointMethodTypes["orgs"]["listPendingInvitations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Members of an organization can choose to have their membership publicized or not. + */ + listPublicMembers: { + (params?: RestEndpointMethodTypes["orgs"]["listPublicMembers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a list of webhook deliveries for a webhook configured in an organization. + */ + listWebhookDeliveries: { + (params?: RestEndpointMethodTypes["orgs"]["listWebhookDeliveries"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listWebhooks: { + (params?: RestEndpointMethodTypes["orgs"]["listWebhooks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. + */ + pingWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["pingWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Redeliver a delivery for a webhook configured in an organization. + */ + redeliverWebhookDelivery: { + (params?: RestEndpointMethodTypes["orgs"]["redeliverWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. + */ + removeMember: { + (params?: RestEndpointMethodTypes["orgs"]["removeMember"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * In order to remove a user's membership with an organization, the authenticated user must be an organization owner. + * + * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. + */ + removeMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["removeMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removing a user from this list will remove them from all the organization's repositories. + */ + removeOutsideCollaborator: { + (params?: RestEndpointMethodTypes["orgs"]["removeOutsideCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + removePublicMembershipForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["removePublicMembershipForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Only authenticated organization owners can add a member to the organization or update the member's role. + * + * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. + * + * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. + * + * **Rate limits** + * + * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. + */ + setMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["setMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The user can publicize their own membership. (A user cannot publicize the membership for another user.) + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + setPublicMembershipForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["setPublicMembershipForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + unblockUser: { + (params?: RestEndpointMethodTypes["orgs"]["unblockUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). + * + * Enables an authenticated organization owner with the `admin:org` scope to update the organization's profile and member privileges. + */ + update: { + (params?: RestEndpointMethodTypes["orgs"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateMembershipForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["updateMembershipForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." + */ + updateWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["updateWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. + */ + updateWebhookConfigForOrg: { + (params?: RestEndpointMethodTypes["orgs"]["updateWebhookConfigForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + packages: { + /** + * Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + deletePackageForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + deletePackageForOrg: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an entire package for a user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + deletePackageForUser: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + deletePackageVersionForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageVersionForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + deletePackageVersionForOrg: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageVersionForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a specific package version for a user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + deletePackageVersionForUser: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageVersionForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns all package versions for a package owned by an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + * @deprecated octokit.rest.packages.getAllPackageVersionsForAPackageOwnedByAnOrg() has been renamed to octokit.rest.packages.getAllPackageVersionsForPackageOwnedByOrg() (2021-03-24) + */ + getAllPackageVersionsForAPackageOwnedByAnOrg: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForAPackageOwnedByAnOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns all package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + * @deprecated octokit.rest.packages.getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser() has been renamed to octokit.rest.packages.getAllPackageVersionsForPackageOwnedByAuthenticatedUser() (2021-03-24) + */ + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns all package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForPackageOwnedByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns all package versions for a package owned by an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + getAllPackageVersionsForPackageOwnedByOrg: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForPackageOwnedByOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns all package versions for a public package owned by a specified user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + getAllPackageVersionsForPackageOwnedByUser: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForPackageOwnedByUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + getPackageForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["getPackageForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package in an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + getPackageForOrganization: { + (params?: RestEndpointMethodTypes["packages"]["getPackageForOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package metadata for a public package owned by a user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + getPackageForUser: { + (params?: RestEndpointMethodTypes["packages"]["getPackageForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package version for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + getPackageVersionForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["getPackageVersionForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package version in an organization. + * + * You must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + getPackageVersionForOrganization: { + (params?: RestEndpointMethodTypes["packages"]["getPackageVersionForOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package version for a public package owned by a specified user. + * + * At this time, to use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + getPackageVersionForUser: { + (params?: RestEndpointMethodTypes["packages"]["getPackageVersionForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists packages owned by the authenticated user within the user's namespace. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + listPackagesForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["listPackagesForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all packages in an organization readable by the user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + listPackagesForOrganization: { + (params?: RestEndpointMethodTypes["packages"]["listPackagesForOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all packages in a user's namespace for which the requesting user has access. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + listPackagesForUser: { + (params?: RestEndpointMethodTypes["packages"]["listPackagesForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores a package owned by the authenticated user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + restorePackageForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores an entire package in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + restorePackageForOrg: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores an entire package for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + restorePackageForUser: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores a package version owned by the authenticated user. + * + * You can restore a deleted package version under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + restorePackageVersionForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageVersionForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores a specific package version in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + restorePackageVersionForOrg: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageVersionForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores a specific package version for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + restorePackageVersionForUser: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageVersionForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + projects: { + /** + * Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. + */ + addCollaborator: { + (params?: RestEndpointMethodTypes["projects"]["addCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + createCard: { + (params?: RestEndpointMethodTypes["projects"]["createCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + createColumn: { + (params?: RestEndpointMethodTypes["projects"]["createColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + createForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["projects"]["createForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates an organization project board. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + createForOrg: { + (params?: RestEndpointMethodTypes["projects"]["createForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a repository project board. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + createForRepo: { + (params?: RestEndpointMethodTypes["projects"]["createForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a project board. Returns a `404 Not Found` status if projects are disabled. + */ + delete: { + (params?: RestEndpointMethodTypes["projects"]["delete"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteCard: { + (params?: RestEndpointMethodTypes["projects"]["deleteCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteColumn: { + (params?: RestEndpointMethodTypes["projects"]["deleteColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + get: { + (params?: RestEndpointMethodTypes["projects"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getCard: { + (params?: RestEndpointMethodTypes["projects"]["getCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getColumn: { + (params?: RestEndpointMethodTypes["projects"]["getColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. + */ + getPermissionForUser: { + (params?: RestEndpointMethodTypes["projects"]["getPermissionForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listCards: { + (params?: RestEndpointMethodTypes["projects"]["listCards"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. + */ + listCollaborators: { + (params?: RestEndpointMethodTypes["projects"]["listCollaborators"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listColumns: { + (params?: RestEndpointMethodTypes["projects"]["listColumns"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + listForOrg: { + (params?: RestEndpointMethodTypes["projects"]["listForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + listForRepo: { + (params?: RestEndpointMethodTypes["projects"]["listForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listForUser: { + (params?: RestEndpointMethodTypes["projects"]["listForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + moveCard: { + (params?: RestEndpointMethodTypes["projects"]["moveCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + moveColumn: { + (params?: RestEndpointMethodTypes["projects"]["moveColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. + */ + removeCollaborator: { + (params?: RestEndpointMethodTypes["projects"]["removeCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + update: { + (params?: RestEndpointMethodTypes["projects"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateCard: { + (params?: RestEndpointMethodTypes["projects"]["updateCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateColumn: { + (params?: RestEndpointMethodTypes["projects"]["updateColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + pulls: { + checkIfMerged: { + (params?: RestEndpointMethodTypes["pulls"]["checkIfMerged"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + * + * You can create a new pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. + */ + create: { + (params?: RestEndpointMethodTypes["pulls"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createReplyForReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["createReplyForReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Pull request reviews created in the `PENDING` state do not include the `submitted_at` property in the response. + * + * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API v3 offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. + * + * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + */ + createReview: { + (params?: RestEndpointMethodTypes["pulls"]["createReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. + * + * The `position` parameter is deprecated. If you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. + * + * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["createReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deletePendingReview: { + (params?: RestEndpointMethodTypes["pulls"]["deletePendingReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a review comment. + */ + deleteReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["deleteReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. + */ + dismissReview: { + (params?: RestEndpointMethodTypes["pulls"]["dismissReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists details of a pull request by providing its number. + * + * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. + * + * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: + * + * * If merged as a [merge commit](https://docs.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. + * * If merged via a [squash](https://docs.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. + * * If [rebased](https://docs.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. + * + * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + */ + get: { + (params?: RestEndpointMethodTypes["pulls"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getReview: { + (params?: RestEndpointMethodTypes["pulls"]["getReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Provides details for a review comment. + */ + getReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["getReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + list: { + (params?: RestEndpointMethodTypes["pulls"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List comments for a specific pull request review. + */ + listCommentsForReview: { + (params?: RestEndpointMethodTypes["pulls"]["listCommentsForReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. + */ + listCommits: { + (params?: RestEndpointMethodTypes["pulls"]["listCommits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. + */ + listFiles: { + (params?: RestEndpointMethodTypes["pulls"]["listFiles"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listRequestedReviewers: { + (params?: RestEndpointMethodTypes["pulls"]["listRequestedReviewers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all review comments for a pull request. By default, review comments are in ascending order by ID. + */ + listReviewComments: { + (params?: RestEndpointMethodTypes["pulls"]["listReviewComments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. + */ + listReviewCommentsForRepo: { + (params?: RestEndpointMethodTypes["pulls"]["listReviewCommentsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The list of reviews returns in chronological order. + */ + listReviews: { + (params?: RestEndpointMethodTypes["pulls"]["listReviews"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + merge: { + (params?: RestEndpointMethodTypes["pulls"]["merge"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + removeRequestedReviewers: { + (params?: RestEndpointMethodTypes["pulls"]["removeRequestedReviewers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + requestReviewers: { + (params?: RestEndpointMethodTypes["pulls"]["requestReviewers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + submitReview: { + (params?: RestEndpointMethodTypes["pulls"]["submitReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + */ + update: { + (params?: RestEndpointMethodTypes["pulls"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. + */ + updateBranch: { + (params?: RestEndpointMethodTypes["pulls"]["updateBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Update the review summary comment with new text. + */ + updateReview: { + (params?: RestEndpointMethodTypes["pulls"]["updateReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables you to edit a review comment. + */ + updateReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["updateReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + rateLimit: { + /** + * **Note:** Accessing this endpoint does not count against your REST API rate limit. + * + * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. + */ + get: { + (params?: RestEndpointMethodTypes["rateLimit"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + reactions: { + /** + * Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with an HTTP `200` status means that you already added the reaction type to this commit comment. + */ + createForCommitComment: { + (params?: RestEndpointMethodTypes["reactions"]["createForCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with an HTTP `200` status means that you already added the reaction type to this issue. + */ + createForIssue: { + (params?: RestEndpointMethodTypes["reactions"]["createForIssue"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with an HTTP `200` status means that you already added the reaction type to this issue comment. + */ + createForIssueComment: { + (params?: RestEndpointMethodTypes["reactions"]["createForIssueComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with an HTTP `200` status means that you already added the reaction type to this pull request review comment. + */ + createForPullRequestReviewComment: { + (params?: RestEndpointMethodTypes["reactions"]["createForPullRequestReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. + */ + createForRelease: { + (params?: RestEndpointMethodTypes["reactions"]["createForRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + createForTeamDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["reactions"]["createForTeamDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + createForTeamDiscussionInOrg: { + (params?: RestEndpointMethodTypes["reactions"]["createForTeamDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + deleteForCommitComment: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. + * + * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). + */ + deleteForIssue: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForIssue"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + deleteForIssueComment: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForIssueComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` + * + * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + deleteForPullRequestComment: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForPullRequestComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/releases/:release_id/reactions/:reaction_id`. + * + * Delete a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + deleteForRelease: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + deleteForTeamDiscussion: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForTeamDiscussion"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + deleteForTeamDiscussionComment: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForTeamDiscussionComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + listForCommitComment: { + (params?: RestEndpointMethodTypes["reactions"]["listForCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to an [issue](https://docs.github.com/rest/reference/issues). + */ + listForIssue: { + (params?: RestEndpointMethodTypes["reactions"]["listForIssue"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + listForIssueComment: { + (params?: RestEndpointMethodTypes["reactions"]["listForIssueComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + listForPullRequestReviewComment: { + (params?: RestEndpointMethodTypes["reactions"]["listForPullRequestReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + listForRelease: { + (params?: RestEndpointMethodTypes["reactions"]["listForRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + listForTeamDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["reactions"]["listForTeamDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + listForTeamDiscussionInOrg: { + (params?: RestEndpointMethodTypes["reactions"]["listForTeamDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + repos: { + /** + * @deprecated octokit.rest.repos.acceptInvitation() has been renamed to octokit.rest.repos.acceptInvitationForAuthenticatedUser() (2021-10-05) + */ + acceptInvitation: { + (params?: RestEndpointMethodTypes["repos"]["acceptInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + acceptInvitationForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["acceptInvitationForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + addAppAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["addAppAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Adding an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + * + * For more information on permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". There are restrictions on which permissions can be granted to organization members when an organization base role is in place. In this case, the permission being given must be equal to or higher than the org base permission. Otherwise, the request will fail with: + * + * ``` + * Cannot assign {member} permission of {role name} + * ``` + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). + * + * **Updating an existing collaborator's permission level** + * + * The endpoint can also be used to change the permissions of an existing collaborator without first removing and re-adding the collaborator. To change the permissions, use the same endpoint and pass a different `permission` parameter. The response will be a `204`, with no other indication that the permission level changed. + * + * **Rate limits** + * + * You are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. + */ + addCollaborator: { + (params?: RestEndpointMethodTypes["repos"]["addCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + addStatusCheckContexts: { + (params?: RestEndpointMethodTypes["repos"]["addStatusCheckContexts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified teams push access for this branch. You can also give push access to child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + addTeamAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["addTeamAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified people push access for this branch. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + addUserAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["addUserAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + checkCollaborator: { + (params?: RestEndpointMethodTypes["repos"]["checkCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin read access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + checkVulnerabilityAlerts: { + (params?: RestEndpointMethodTypes["repos"]["checkVulnerabilityAlerts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List any syntax errors that are detected in the CODEOWNERS + * file. + * + * For more information about the correct CODEOWNERS syntax, + * see "[About code owners](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)." + */ + codeownersErrors: { + (params?: RestEndpointMethodTypes["repos"]["codeownersErrors"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + compareCommits: { + (params?: RestEndpointMethodTypes["repos"]["compareCommits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The `basehead` param is comprised of two parts: `base` and `head`. Both must be branch names in `repo`. To compare branches across other repositories in the same network as `repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + compareCommitsWithBasehead: { + (params?: RestEndpointMethodTypes["repos"]["compareCommitsWithBasehead"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with admin access to the repository can create an autolink. + */ + createAutolink: { + (params?: RestEndpointMethodTypes["repos"]["createAutolink"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a comment for a commit using its `:commit_sha`. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createCommitComment: { + (params?: RestEndpointMethodTypes["repos"]["createCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. + */ + createCommitSignatureProtection: { + (params?: RestEndpointMethodTypes["repos"]["createCommitSignatureProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access in a repository can create commit statuses for a given SHA. + * + * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. + */ + createCommitStatus: { + (params?: RestEndpointMethodTypes["repos"]["createCommitStatus"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can create a read-only deploy key. + */ + createDeployKey: { + (params?: RestEndpointMethodTypes["repos"]["createDeployKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deployments offer a few configurable parameters with certain defaults. + * + * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them + * before we merge a pull request. + * + * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have + * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter + * makes it easier to track which environments have requested deployments. The default environment is `production`. + * + * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If + * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, + * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will + * return a failure response. + * + * By default, [commit statuses](https://docs.github.com/rest/commits/statuses) for every submitted context must be in a `success` + * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to + * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do + * not require any contexts or create any commit statuses, the deployment will always succeed. + * + * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text + * field that will be passed on when a deployment event is dispatched. + * + * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might + * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an + * application with debugging enabled. + * + * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. + * + * #### Merged branch response + * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating + * a deployment. This auto-merge happens when: + * * Auto-merge option is enabled in the repository + * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example + * * There are no merge conflicts + * + * If there are no new commits in the base branch, a new request to create a deployment should give a successful + * response. + * + * #### Merge conflict response + * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't + * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. + * + * #### Failed commit status checks + * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` + * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. + */ + createDeployment: { + (params?: RestEndpointMethodTypes["repos"]["createDeployment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with `push` access can create deployment statuses for a given deployment. + * + * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. + */ + createDeploymentStatus: { + (params?: RestEndpointMethodTypes["repos"]["createDeploymentStatus"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." + * + * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. + * + * This endpoint requires write access to the repository by providing either: + * + * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. + * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. + * + * This input example shows how you can use the `client_payload` as a test to debug your workflow. + */ + createDispatchEvent: { + (params?: RestEndpointMethodTypes["repos"]["createDispatchEvent"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new repository for the authenticated user. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository. + */ + createForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["createForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a fork for the authenticated user. + * + * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + */ + createFork: { + (params?: RestEndpointMethodTypes["repos"]["createFork"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new repository in the specified organization. The authenticated user must be a member of the organization. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + createInOrg: { + (params?: RestEndpointMethodTypes["repos"]["createInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." + * + * **Note:** Although you can use this operation to specify that only branches that match specified name patterns can deploy to this environment, you must use the UI to set the name patterns. For more information, see "[Environments](/actions/reference/environments#deployment-branches)." + * + * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." + * + * You must authenticate using an access token with the repo scope to use this endpoint. + */ + createOrUpdateEnvironment: { + (params?: RestEndpointMethodTypes["repos"]["createOrUpdateEnvironment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new file or replaces an existing file in a repository. + */ + createOrUpdateFileContents: { + (params?: RestEndpointMethodTypes["repos"]["createOrUpdateFileContents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." + */ + createPagesSite: { + (params?: RestEndpointMethodTypes["repos"]["createPagesSite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access to the repository can create a release. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createRelease: { + (params?: RestEndpointMethodTypes["repos"]["createRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This creates a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + createTagProtection: { + (params?: RestEndpointMethodTypes["repos"]["createTagProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. The authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + createUsingTemplate: { + (params?: RestEndpointMethodTypes["repos"]["createUsingTemplate"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can + * share the same `config` as long as those webhooks do not have any `events` that overlap. + */ + createWebhook: { + (params?: RestEndpointMethodTypes["repos"]["createWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * @deprecated octokit.rest.repos.declineInvitation() has been renamed to octokit.rest.repos.declineInvitationForAuthenticatedUser() (2021-10-05) + */ + declineInvitation: { + (params?: RestEndpointMethodTypes["repos"]["declineInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + declineInvitationForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["declineInvitationForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. + * + * If an organization owner has configured the organization to prevent members from deleting organization-owned + * repositories, you will get a `403 Forbidden` response. + */ + delete: { + (params?: RestEndpointMethodTypes["repos"]["delete"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Disables the ability to restrict who can push to this branch. + */ + deleteAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["deleteAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + deleteAdminBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["deleteAdminBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You must authenticate using an access token with the repo scope to use this endpoint. + */ + deleteAnEnvironment: { + (params?: RestEndpointMethodTypes["repos"]["deleteAnEnvironment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This deletes a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + deleteAutolink: { + (params?: RestEndpointMethodTypes["repos"]["deleteAutolink"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + deleteBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["deleteBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteCommitComment: { + (params?: RestEndpointMethodTypes["repos"]["deleteCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. + */ + deleteCommitSignatureProtection: { + (params?: RestEndpointMethodTypes["repos"]["deleteCommitSignatureProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. + */ + deleteDeployKey: { + (params?: RestEndpointMethodTypes["repos"]["deleteDeployKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If the repository only has one deployment, you can delete the deployment regardless of its status. If the repository has more than one deployment, you can only delete inactive deployments. This ensures that repositories with multiple deployments will always have an active deployment. Anyone with `repo` or `repo_deployment` scopes can delete a deployment. + * + * To set a deployment as inactive, you must: + * + * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. + * * Mark the active deployment as inactive by adding any non-successful deployment status. + * + * For more information, see "[Create a deployment](https://docs.github.com/rest/reference/repos/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/reference/repos#create-a-deployment-status)." + */ + deleteDeployment: { + (params?: RestEndpointMethodTypes["repos"]["deleteDeployment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a file in a repository. + * + * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. + * + * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. + * + * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. + */ + deleteFile: { + (params?: RestEndpointMethodTypes["repos"]["deleteFile"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteInvitation: { + (params?: RestEndpointMethodTypes["repos"]["deleteInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deletePagesSite: { + (params?: RestEndpointMethodTypes["repos"]["deletePagesSite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + deletePullRequestReviewProtection: { + (params?: RestEndpointMethodTypes["repos"]["deletePullRequestReviewProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access to the repository can delete a release. + */ + deleteRelease: { + (params?: RestEndpointMethodTypes["repos"]["deleteRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteReleaseAsset: { + (params?: RestEndpointMethodTypes["repos"]["deleteReleaseAsset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This deletes a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + deleteTagProtection: { + (params?: RestEndpointMethodTypes["repos"]["deleteTagProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteWebhook: { + (params?: RestEndpointMethodTypes["repos"]["deleteWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". + */ + disableAutomatedSecurityFixes: { + (params?: RestEndpointMethodTypes["repos"]["disableAutomatedSecurityFixes"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + disableLfsForRepo: { + (params?: RestEndpointMethodTypes["repos"]["disableLfsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Disables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + disableVulnerabilityAlerts: { + (params?: RestEndpointMethodTypes["repos"]["disableVulnerabilityAlerts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + * @deprecated octokit.rest.repos.downloadArchive() has been renamed to octokit.rest.repos.downloadZipballArchive() (2020-09-17) + */ + downloadArchive: { + (params?: RestEndpointMethodTypes["repos"]["downloadArchive"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + downloadTarballArchive: { + (params?: RestEndpointMethodTypes["repos"]["downloadTarballArchive"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + downloadZipballArchive: { + (params?: RestEndpointMethodTypes["repos"]["downloadZipballArchive"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". + */ + enableAutomatedSecurityFixes: { + (params?: RestEndpointMethodTypes["repos"]["enableAutomatedSecurityFixes"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + enableLfsForRepo: { + (params?: RestEndpointMethodTypes["repos"]["enableLfsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + enableVulnerabilityAlerts: { + (params?: RestEndpointMethodTypes["repos"]["enableVulnerabilityAlerts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Generate a name and body describing a [release](https://docs.github.com/rest/reference/repos#releases). The body content will be markdown formatted and contain information like the changes since last release and users who contributed. The generated release notes are not saved anywhere. They are intended to be generated and used when creating a new release. + */ + generateReleaseNotes: { + (params?: RestEndpointMethodTypes["repos"]["generateReleaseNotes"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. + */ + get: { + (params?: RestEndpointMethodTypes["repos"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists who has access to this protected branch. + * + * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. + */ + getAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["getAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getAdminBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["getAdminBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get all environments for a repository. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getAllEnvironments: { + (params?: RestEndpointMethodTypes["repos"]["getAllEnvironments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getAllStatusCheckContexts: { + (params?: RestEndpointMethodTypes["repos"]["getAllStatusCheckContexts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getAllTopics: { + (params?: RestEndpointMethodTypes["repos"]["getAllTopics"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + getAppsWithAccessToProtectedBranch: { + (params?: RestEndpointMethodTypes["repos"]["getAppsWithAccessToProtectedBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This returns a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + getAutolink: { + (params?: RestEndpointMethodTypes["repos"]["getAutolink"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getBranch: { + (params?: RestEndpointMethodTypes["repos"]["getBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["getBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. + */ + getClones: { + (params?: RestEndpointMethodTypes["repos"]["getClones"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a weekly aggregate of the number of additions and deletions pushed to a repository. + */ + getCodeFrequencyStats: { + (params?: RestEndpointMethodTypes["repos"]["getCodeFrequencyStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. + */ + getCollaboratorPermissionLevel: { + (params?: RestEndpointMethodTypes["repos"]["getCollaboratorPermissionLevel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. + * + * + * Additionally, a combined `state` is returned. The `state` is one of: + * + * * **failure** if any of the contexts report as `error` or `failure` + * * **pending** if there are no statuses or a context is `pending` + * * **success** if the latest status for all contexts is `success` + */ + getCombinedStatusForRef: { + (params?: RestEndpointMethodTypes["repos"]["getCombinedStatusForRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. + * + * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. + * + * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. + * + * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + getCommit: { + (params?: RestEndpointMethodTypes["repos"]["getCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. + */ + getCommitActivityStats: { + (params?: RestEndpointMethodTypes["repos"]["getCommitActivityStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getCommitComment: { + (params?: RestEndpointMethodTypes["repos"]["getCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://docs.github.com/articles/signing-commits-with-gpg) in GitHub Help. + * + * **Note**: You must enable branch protection to require signed commits. + */ + getCommitSignatureProtection: { + (params?: RestEndpointMethodTypes["repos"]["getCommitSignatureProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint will return all community profile metrics, including an + * overall health score, repository description, the presence of documentation, detected + * code of conduct, detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, + * README, and CONTRIBUTING files. + * + * The `health_percentage` score is defined as a percentage of how many of + * these four documents are present: README, CONTRIBUTING, LICENSE, and + * CODE_OF_CONDUCT. For example, if all four documents are present, then + * the `health_percentage` is `100`. If only one is present, then the + * `health_percentage` is `25`. + * + * `content_reports_enabled` is only returned for organization-owned repositories. + */ + getCommunityProfileMetrics: { + (params?: RestEndpointMethodTypes["repos"]["getCommunityProfileMetrics"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit + * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. + * + * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for + * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media + * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent + * object format. + * + * **Note**: + * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). + * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees + * API](https://docs.github.com/rest/reference/git#get-a-tree). + * + * #### Size limits + * If the requested file's size is: + * * 1 MB or smaller: All features of this endpoint are supported. + * * Between 1-100 MB: Only the `raw` or `object` [custom media types](https://docs.github.com/rest/repos/contents#custom-media-types-for-repository-contents) are supported. Both will work as normal, except that when using the `object` media type, the `content` field will be an empty string and the `encoding` field will be `"none"`. To get the contents of these larger files, use the `raw` media type. + * * Greater than 100 MB: This endpoint is not supported. + * + * #### If the content is a directory + * The response will be an array of objects, one object for each item in the directory. + * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value + * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). + * In the next major version of the API, the type will be returned as "submodule". + * + * #### If the content is a symlink + * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the + * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object + * describing the symlink itself. + * + * #### If the content is a submodule + * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific + * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out + * the submodule at that specific commit. + * + * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the + * github.com URLs (`html_url` and `_links["html"]`) will have null values. + */ + getContent: { + (params?: RestEndpointMethodTypes["repos"]["getContent"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: + * + * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + * * `a` - Number of additions + * * `d` - Number of deletions + * * `c` - Number of commits + */ + getContributorsStats: { + (params?: RestEndpointMethodTypes["repos"]["getContributorsStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getDeployKey: { + (params?: RestEndpointMethodTypes["repos"]["getDeployKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getDeployment: { + (params?: RestEndpointMethodTypes["repos"]["getDeployment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with pull access can view a deployment status for a deployment: + */ + getDeploymentStatus: { + (params?: RestEndpointMethodTypes["repos"]["getDeploymentStatus"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getEnvironment: { + (params?: RestEndpointMethodTypes["repos"]["getEnvironment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getLatestPagesBuild: { + (params?: RestEndpointMethodTypes["repos"]["getLatestPagesBuild"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View the latest published full release for the repository. + * + * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. + */ + getLatestRelease: { + (params?: RestEndpointMethodTypes["repos"]["getLatestRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getPages: { + (params?: RestEndpointMethodTypes["repos"]["getPages"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getPagesBuild: { + (params?: RestEndpointMethodTypes["repos"]["getPagesBuild"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. + * + * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. + * + * Users must have admin or owner permissions. GitHub Apps must have the `pages:write` and `administration:write` permission to use this endpoint. + */ + getPagesHealthCheck: { + (params?: RestEndpointMethodTypes["repos"]["getPagesHealthCheck"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. + * + * The array order is oldest week (index 0) to most recent week. + */ + getParticipationStats: { + (params?: RestEndpointMethodTypes["repos"]["getParticipationStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getPullRequestReviewProtection: { + (params?: RestEndpointMethodTypes["repos"]["getPullRequestReviewProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Each array contains the day number, hour number, and number of commits: + * + * * `0-6`: Sunday - Saturday + * * `0-23`: Hour of day + * * Number of commits + * + * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. + */ + getPunchCardStats: { + (params?: RestEndpointMethodTypes["repos"]["getPunchCardStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the preferred README for a repository. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + getReadme: { + (params?: RestEndpointMethodTypes["repos"]["getReadme"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the README from a repository directory. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + getReadmeInDirectory: { + (params?: RestEndpointMethodTypes["repos"]["getReadmeInDirectory"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). + */ + getRelease: { + (params?: RestEndpointMethodTypes["repos"]["getRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. + */ + getReleaseAsset: { + (params?: RestEndpointMethodTypes["repos"]["getReleaseAsset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a published release with the specified tag. + */ + getReleaseByTag: { + (params?: RestEndpointMethodTypes["repos"]["getReleaseByTag"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getStatusChecksProtection: { + (params?: RestEndpointMethodTypes["repos"]["getStatusChecksProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the teams who have push access to this branch. The list includes child teams. + */ + getTeamsWithAccessToProtectedBranch: { + (params?: RestEndpointMethodTypes["repos"]["getTeamsWithAccessToProtectedBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the top 10 popular contents over the last 14 days. + */ + getTopPaths: { + (params?: RestEndpointMethodTypes["repos"]["getTopPaths"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the top 10 referrers over the last 14 days. + */ + getTopReferrers: { + (params?: RestEndpointMethodTypes["repos"]["getTopReferrers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the people who have push access to this branch. + */ + getUsersWithAccessToProtectedBranch: { + (params?: RestEndpointMethodTypes["repos"]["getUsersWithAccessToProtectedBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. + */ + getViews: { + (params?: RestEndpointMethodTypes["repos"]["getViews"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." + */ + getWebhook: { + (params?: RestEndpointMethodTypes["repos"]["getWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." + * + * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. + */ + getWebhookConfigForRepo: { + (params?: RestEndpointMethodTypes["repos"]["getWebhookConfigForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a delivery for a webhook configured in a repository. + */ + getWebhookDelivery: { + (params?: RestEndpointMethodTypes["repos"]["getWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This returns a list of autolinks configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + listAutolinks: { + (params?: RestEndpointMethodTypes["repos"]["listAutolinks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listBranches: { + (params?: RestEndpointMethodTypes["repos"]["listBranches"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. + */ + listBranchesForHeadCommit: { + (params?: RestEndpointMethodTypes["repos"]["listBranchesForHeadCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * Organization members with write, maintain, or admin privileges on the organization-owned repository can use this endpoint. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + listCollaborators: { + (params?: RestEndpointMethodTypes["repos"]["listCollaborators"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Use the `:commit_sha` to specify the commit that will have its comments listed. + */ + listCommentsForCommit: { + (params?: RestEndpointMethodTypes["repos"]["listCommentsForCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). + * + * Comments are ordered by ascending ID. + */ + listCommitCommentsForRepo: { + (params?: RestEndpointMethodTypes["repos"]["listCommitCommentsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. + * + * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. + */ + listCommitStatusesForRef: { + (params?: RestEndpointMethodTypes["repos"]["listCommitStatusesForRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + listCommits: { + (params?: RestEndpointMethodTypes["repos"]["listCommits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API v3 caches contributor data to improve performance. + * + * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. + */ + listContributors: { + (params?: RestEndpointMethodTypes["repos"]["listContributors"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listDeployKeys: { + (params?: RestEndpointMethodTypes["repos"]["listDeployKeys"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with pull access can view deployment statuses for a deployment: + */ + listDeploymentStatuses: { + (params?: RestEndpointMethodTypes["repos"]["listDeploymentStatuses"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Simple filtering of deployments is available via query parameters: + */ + listDeployments: { + (params?: RestEndpointMethodTypes["repos"]["listDeployments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories for the specified organization. + */ + listForOrg: { + (params?: RestEndpointMethodTypes["repos"]["listForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. + */ + listForUser: { + (params?: RestEndpointMethodTypes["repos"]["listForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listForks: { + (params?: RestEndpointMethodTypes["repos"]["listForks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. + */ + listInvitations: { + (params?: RestEndpointMethodTypes["repos"]["listInvitations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * When authenticating as a user, this endpoint will list all currently open repository invitations for that user. + */ + listInvitationsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["listInvitationsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. + */ + listLanguages: { + (params?: RestEndpointMethodTypes["repos"]["listLanguages"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listPagesBuilds: { + (params?: RestEndpointMethodTypes["repos"]["listPagesBuilds"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all public repositories in the order that they were created. + * + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. + * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. + */ + listPublic: { + (params?: RestEndpointMethodTypes["repos"]["listPublic"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, additionally returns open pull requests associated with the commit. The results may include open and closed pull requests. + */ + listPullRequestsAssociatedWithCommit: { + (params?: RestEndpointMethodTypes["repos"]["listPullRequestsAssociatedWithCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listReleaseAssets: { + (params?: RestEndpointMethodTypes["repos"]["listReleaseAssets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). + * + * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. + */ + listReleases: { + (params?: RestEndpointMethodTypes["repos"]["listReleases"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This returns the tag protection states of a repository. + * + * This information is only available to repository administrators. + */ + listTagProtection: { + (params?: RestEndpointMethodTypes["repos"]["listTagProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listTags: { + (params?: RestEndpointMethodTypes["repos"]["listTags"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listTeams: { + (params?: RestEndpointMethodTypes["repos"]["listTeams"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a list of webhook deliveries for a webhook configured in a repository. + */ + listWebhookDeliveries: { + (params?: RestEndpointMethodTypes["repos"]["listWebhookDeliveries"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listWebhooks: { + (params?: RestEndpointMethodTypes["repos"]["listWebhooks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + merge: { + (params?: RestEndpointMethodTypes["repos"]["merge"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sync a branch of a forked repository to keep it up-to-date with the upstream repository. + */ + mergeUpstream: { + (params?: RestEndpointMethodTypes["repos"]["mergeUpstream"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. + */ + pingWebhook: { + (params?: RestEndpointMethodTypes["repos"]["pingWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Redeliver a webhook delivery for a webhook configured in a repository. + */ + redeliverWebhookDelivery: { + (params?: RestEndpointMethodTypes["repos"]["redeliverWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + removeAppAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["removeAppAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + removeCollaborator: { + (params?: RestEndpointMethodTypes["repos"]["removeCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + removeStatusCheckContexts: { + (params?: RestEndpointMethodTypes["repos"]["removeStatusCheckContexts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + removeStatusCheckProtection: { + (params?: RestEndpointMethodTypes["repos"]["removeStatusCheckProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a team to push to this branch. You can also remove push access for child teams. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Teams that should no longer have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + removeTeamAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["removeTeamAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a user to push to this branch. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + removeUserAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["removeUserAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Renames a branch in a repository. + * + * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". + * + * The permissions required to use this endpoint depends on whether you are renaming the default branch. + * + * To rename a non-default branch: + * + * * Users must have push access. + * * GitHub Apps must have the `contents:write` repository permission. + * + * To rename the default branch: + * + * * Users must have admin or owner permissions. + * * GitHub Apps must have the `administration:write` repository permission. + */ + renameBranch: { + (params?: RestEndpointMethodTypes["repos"]["renameBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + replaceAllTopics: { + (params?: RestEndpointMethodTypes["repos"]["replaceAllTopics"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. + * + * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. + */ + requestPagesBuild: { + (params?: RestEndpointMethodTypes["repos"]["requestPagesBuild"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + setAdminBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["setAdminBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + setAppAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["setAppAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + setStatusCheckContexts: { + (params?: RestEndpointMethodTypes["repos"]["setStatusCheckContexts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + setTeamAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["setTeamAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + setUserAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["setUserAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. + * + * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` + */ + testPushWebhook: { + (params?: RestEndpointMethodTypes["repos"]["testPushWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://docs.github.com/articles/about-repository-transfers/). + */ + transfer: { + (params?: RestEndpointMethodTypes["repos"]["transfer"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. + */ + update: { + (params?: RestEndpointMethodTypes["repos"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Protecting a branch requires admin or owner permissions to the repository. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + * + * **Note**: The list of users, apps, and teams in total is limited to 100 items. + */ + updateBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["updateBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateCommitComment: { + (params?: RestEndpointMethodTypes["repos"]["updateCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). + */ + updateInformationAboutPagesSite: { + (params?: RestEndpointMethodTypes["repos"]["updateInformationAboutPagesSite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateInvitation: { + (params?: RestEndpointMethodTypes["repos"]["updateInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + */ + updatePullRequestReviewProtection: { + (params?: RestEndpointMethodTypes["repos"]["updatePullRequestReviewProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access to the repository can edit a release. + */ + updateRelease: { + (params?: RestEndpointMethodTypes["repos"]["updateRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access to the repository can edit a release asset. + */ + updateReleaseAsset: { + (params?: RestEndpointMethodTypes["repos"]["updateReleaseAsset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + * @deprecated octokit.rest.repos.updateStatusCheckPotection() has been renamed to octokit.rest.repos.updateStatusCheckProtection() (2020-09-17) + */ + updateStatusCheckPotection: { + (params?: RestEndpointMethodTypes["repos"]["updateStatusCheckPotection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + */ + updateStatusCheckProtection: { + (params?: RestEndpointMethodTypes["repos"]["updateStatusCheckProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." + */ + updateWebhook: { + (params?: RestEndpointMethodTypes["repos"]["updateWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." + * + * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. + */ + updateWebhookConfigForRepo: { + (params?: RestEndpointMethodTypes["repos"]["updateWebhookConfigForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in + * the response of the [Create a release endpoint](https://docs.github.com/rest/reference/repos#create-a-release) to upload a release asset. + * + * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. + * + * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: + * + * `application/zip` + * + * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, + * you'll still need to pass your authentication to be able to upload an asset. + * + * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. + * + * **Notes:** + * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" + * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. + */ + uploadReleaseAsset: { + (params?: RestEndpointMethodTypes["repos"]["uploadReleaseAsset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + search: { + /** + * Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: + * + * `q=addClass+in:file+language:js+repo:jquery/jquery` + * + * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. + * + * #### Considerations for code search + * + * Due to the complexity of searching code, there are a few restrictions on how searches are performed: + * + * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. + * * Only files smaller than 384 KB are searchable. + * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing + * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. + */ + code: { + (params?: RestEndpointMethodTypes["search"]["code"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find commits via various criteria on the default branch (usually `master`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match + * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: + * + * `q=repo:octocat/Spoon-Knife+css` + */ + commits: { + (params?: RestEndpointMethodTypes["search"]["commits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted + * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. + * + * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` + * + * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. + * + * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." + */ + issuesAndPullRequests: { + (params?: RestEndpointMethodTypes["search"]["issuesAndPullRequests"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: + * + * `q=bug+defect+enhancement&repository_id=64778136` + * + * The labels that best match the query appear first in the search results. + */ + labels: { + (params?: RestEndpointMethodTypes["search"]["labels"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: + * + * `q=tetris+language:assembly&sort=stars&order=desc` + * + * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. + */ + repos: { + (params?: RestEndpointMethodTypes["search"]["repos"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://docs.github.com/articles/searching-topics/)" for a detailed list of qualifiers. + * + * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: + * + * `q=ruby+is:featured` + * + * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. + */ + topics: { + (params?: RestEndpointMethodTypes["search"]["topics"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for users, you can get text match metadata for the issue **login**, **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you're looking for a list of popular users, you might try this query: + * + * `q=tom+repos:%3E42+followers:%3E1000` + * + * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. + */ + users: { + (params?: RestEndpointMethodTypes["search"]["users"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + secretScanning: { + /** + * Gets a single secret scanning alert detected in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + getAlert: { + (params?: RestEndpointMethodTypes["secretScanning"]["getAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. + * To use this endpoint, you must be a member of the enterprise, and you must use an access token with the `repo` scope or `security_events` scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a [security manager](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization). + */ + listAlertsForEnterprise: { + (params?: RestEndpointMethodTypes["secretScanning"]["listAlertsForEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + listAlertsForOrg: { + (params?: RestEndpointMethodTypes["secretScanning"]["listAlertsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists secret scanning alerts for an eligible repository, from newest to oldest. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + listAlertsForRepo: { + (params?: RestEndpointMethodTypes["secretScanning"]["listAlertsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all locations for a given secret scanning alert for an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + listLocationsForAlert: { + (params?: RestEndpointMethodTypes["secretScanning"]["listLocationsForAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the status of a secret scanning alert in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. + */ + updateAlert: { + (params?: RestEndpointMethodTypes["secretScanning"]["updateAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + teams: { + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + addOrUpdateMembershipForUserInOrg: { + (params?: RestEndpointMethodTypes["teams"]["addOrUpdateMembershipForUserInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + addOrUpdateProjectPermissionsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["addOrUpdateProjectPermissionsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + * + * For more information about the permission levels, see "[Repository permission levels for an organization](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". + */ + addOrUpdateRepoPermissionsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["addOrUpdateRepoPermissionsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + checkPermissionsForProjectInOrg: { + (params?: RestEndpointMethodTypes["teams"]["checkPermissionsForProjectInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. + * + * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + checkPermissionsForRepoInOrg: { + (params?: RestEndpointMethodTypes["teams"]["checkPermissionsForRepoInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://docs.github.com/en/articles/setting-team-creation-permissions-in-your-organization)." + * + * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/about-teams)". + */ + create: { + (params?: RestEndpointMethodTypes["teams"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + createDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["teams"]["createDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. + */ + createDiscussionInOrg: { + (params?: RestEndpointMethodTypes["teams"]["createDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + deleteDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["teams"]["deleteDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + deleteDiscussionInOrg: { + (params?: RestEndpointMethodTypes["teams"]["deleteDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. + */ + deleteInOrg: { + (params?: RestEndpointMethodTypes["teams"]["deleteInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a team using the team's `slug`. GitHub generates the `slug` from the team `name`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. + */ + getByName: { + (params?: RestEndpointMethodTypes["teams"]["getByName"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + getDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["teams"]["getDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + getDiscussionInOrg: { + (params?: RestEndpointMethodTypes["teams"]["getDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + getMembershipForUserInOrg: { + (params?: RestEndpointMethodTypes["teams"]["getMembershipForUserInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all teams in an organization that are visible to the authenticated user. + */ + list: { + (params?: RestEndpointMethodTypes["teams"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the child teams of the team specified by `{team_slug}`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. + */ + listChildInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listChildInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + listDiscussionCommentsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listDiscussionCommentsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. + */ + listDiscussionsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listDiscussionsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["teams"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Team members will include the members of child teams. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + listMembersInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listMembersInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. + */ + listPendingInvitationsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listPendingInvitationsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the organization projects for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. + */ + listProjectsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listProjectsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists a team's repositories visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. + */ + listReposInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listReposInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + removeMembershipForUserInOrg: { + (params?: RestEndpointMethodTypes["teams"]["removeMembershipForUserInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + removeProjectInOrg: { + (params?: RestEndpointMethodTypes["teams"]["removeProjectInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + removeRepoInOrg: { + (params?: RestEndpointMethodTypes["teams"]["removeRepoInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + updateDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["teams"]["updateDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + updateDiscussionInOrg: { + (params?: RestEndpointMethodTypes["teams"]["updateDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. + */ + updateInOrg: { + (params?: RestEndpointMethodTypes["teams"]["updateInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + users: { + /** + * This endpoint is accessible with the `user` scope. + * @deprecated octokit.rest.users.addEmailForAuthenticated() has been renamed to octokit.rest.users.addEmailForAuthenticatedUser() (2021-10-05) + */ + addEmailForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["addEmailForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint is accessible with the `user` scope. + */ + addEmailForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["addEmailForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + block: { + (params?: RestEndpointMethodTypes["users"]["block"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + checkBlocked: { + (params?: RestEndpointMethodTypes["users"]["checkBlocked"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + checkFollowingForUser: { + (params?: RestEndpointMethodTypes["users"]["checkFollowingForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + checkPersonIsFollowedByAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["checkPersonIsFollowedByAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.createGpgKeyForAuthenticated() has been renamed to octokit.rest.users.createGpgKeyForAuthenticatedUser() (2021-10-05) + */ + createGpgKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["createGpgKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + createGpgKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["createGpgKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.createPublicSshKeyForAuthenticated() has been renamed to octokit.rest.users.createPublicSshKeyForAuthenticatedUser() (2021-10-05) + */ + createPublicSshKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["createPublicSshKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + createPublicSshKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["createPublicSshKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint is accessible with the `user` scope. + * @deprecated octokit.rest.users.deleteEmailForAuthenticated() has been renamed to octokit.rest.users.deleteEmailForAuthenticatedUser() (2021-10-05) + */ + deleteEmailForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["deleteEmailForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint is accessible with the `user` scope. + */ + deleteEmailForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["deleteEmailForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.deleteGpgKeyForAuthenticated() has been renamed to octokit.rest.users.deleteGpgKeyForAuthenticatedUser() (2021-10-05) + */ + deleteGpgKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["deleteGpgKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + deleteGpgKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["deleteGpgKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.deletePublicSshKeyForAuthenticated() has been renamed to octokit.rest.users.deletePublicSshKeyForAuthenticatedUser() (2021-10-05) + */ + deletePublicSshKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["deletePublicSshKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + deletePublicSshKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["deletePublicSshKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + follow: { + (params?: RestEndpointMethodTypes["users"]["follow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If the authenticated user is authenticated through basic authentication or OAuth with the `user` scope, then the response lists public and private profile information. + * + * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. + */ + getAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["getAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Provides publicly available information about someone with a GitHub account. + * + * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" + * + * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). + * + * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". + */ + getByUsername: { + (params?: RestEndpointMethodTypes["users"]["getByUsername"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. + * + * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: + * + * ```shell + * curl -u username:token + * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 + * ``` + */ + getContextForUser: { + (params?: RestEndpointMethodTypes["users"]["getContextForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.getGpgKeyForAuthenticated() has been renamed to octokit.rest.users.getGpgKeyForAuthenticatedUser() (2021-10-05) + */ + getGpgKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["getGpgKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + getGpgKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["getGpgKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.getPublicSshKeyForAuthenticated() has been renamed to octokit.rest.users.getPublicSshKeyForAuthenticatedUser() (2021-10-05) + */ + getPublicSshKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["getPublicSshKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + getPublicSshKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["getPublicSshKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. + * + * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of users. + */ + list: { + (params?: RestEndpointMethodTypes["users"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the users you've blocked on your personal account. + * @deprecated octokit.rest.users.listBlockedByAuthenticated() has been renamed to octokit.rest.users.listBlockedByAuthenticatedUser() (2021-10-05) + */ + listBlockedByAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listBlockedByAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the users you've blocked on your personal account. + */ + listBlockedByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listBlockedByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. + * @deprecated octokit.rest.users.listEmailsForAuthenticated() has been renamed to octokit.rest.users.listEmailsForAuthenticatedUser() (2021-10-05) + */ + listEmailsForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listEmailsForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. + */ + listEmailsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listEmailsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people who the authenticated user follows. + * @deprecated octokit.rest.users.listFollowedByAuthenticated() has been renamed to octokit.rest.users.listFollowedByAuthenticatedUser() (2021-10-05) + */ + listFollowedByAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listFollowedByAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people who the authenticated user follows. + */ + listFollowedByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listFollowedByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people following the authenticated user. + */ + listFollowersForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listFollowersForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people following the specified user. + */ + listFollowersForUser: { + (params?: RestEndpointMethodTypes["users"]["listFollowersForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people who the specified user follows. + */ + listFollowingForUser: { + (params?: RestEndpointMethodTypes["users"]["listFollowingForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.listGpgKeysForAuthenticated() has been renamed to octokit.rest.users.listGpgKeysForAuthenticatedUser() (2021-10-05) + */ + listGpgKeysForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listGpgKeysForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + listGpgKeysForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listGpgKeysForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the GPG keys for a user. This information is accessible by anyone. + */ + listGpgKeysForUser: { + (params?: RestEndpointMethodTypes["users"]["listGpgKeysForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. + * @deprecated octokit.rest.users.listPublicEmailsForAuthenticated() has been renamed to octokit.rest.users.listPublicEmailsForAuthenticatedUser() (2021-10-05) + */ + listPublicEmailsForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listPublicEmailsForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. + */ + listPublicEmailsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listPublicEmailsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the _verified_ public SSH keys for a user. This is accessible by anyone. + */ + listPublicKeysForUser: { + (params?: RestEndpointMethodTypes["users"]["listPublicKeysForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.listPublicSshKeysForAuthenticated() has been renamed to octokit.rest.users.listPublicSshKeysForAuthenticatedUser() (2021-10-05) + */ + listPublicSshKeysForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listPublicSshKeysForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + listPublicSshKeysForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listPublicSshKeysForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the visibility for your primary email addresses. + * @deprecated octokit.rest.users.setPrimaryEmailVisibilityForAuthenticated() has been renamed to octokit.rest.users.setPrimaryEmailVisibilityForAuthenticatedUser() (2021-10-05) + */ + setPrimaryEmailVisibilityForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["setPrimaryEmailVisibilityForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the visibility for your primary email addresses. + */ + setPrimaryEmailVisibilityForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["setPrimaryEmailVisibilityForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + unblock: { + (params?: RestEndpointMethodTypes["users"]["unblock"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + unfollow: { + (params?: RestEndpointMethodTypes["users"]["unfollow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. + */ + updateAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["updateAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; +}; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts new file mode 100644 index 0000000..ce2a2f1 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts @@ -0,0 +1,3211 @@ +import { Endpoints, RequestParameters } from "@octokit/types"; +export declare type RestEndpointMethodTypes = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/actions/runners/{runner_id}/labels"]["response"]; + }; + addCustomLabelsToSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"]["response"]; + }; + addSelectedRepoToOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + approveWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"]["response"]; + }; + cancelWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"]["response"]; + }; + createOrUpdateEnvironmentSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"]["response"]; + }; + createOrUpdateOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}"]["response"]; + }; + createOrUpdateRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"]["response"]; + }; + createRegistrationTokenForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/actions/runners/registration-token"]["response"]; + }; + createRegistrationTokenForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runners/registration-token"]["response"]; + }; + createRemoveTokenForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/actions/runners/remove-token"]["response"]; + }; + createRemoveTokenForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runners/remove-token"]["response"]; + }; + createWorkflowDispatch: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"]["response"]; + }; + deleteActionsCacheById: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"]["response"]; + }; + deleteActionsCacheByKey: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"]["response"]; + }; + deleteArtifact: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"]["response"]; + }; + deleteEnvironmentSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"]["response"]; + }; + deleteOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/secrets/{secret_name}"]["response"]; + }; + deleteRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"]["response"]; + }; + deleteSelfHostedRunnerFromOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/runners/{runner_id}"]["response"]; + }; + deleteSelfHostedRunnerFromRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"]["response"]; + }; + deleteWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"]["response"]; + }; + deleteWorkflowRunLogs: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"]["response"]; + }; + disableSelectedRepositoryGithubActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"]["response"]; + }; + disableWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"]["response"]; + }; + downloadArtifact: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"]["response"]; + }; + downloadJobLogsForWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"]["response"]; + }; + downloadWorkflowRunAttemptLogs: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"]["response"]; + }; + downloadWorkflowRunLogs: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"]["response"]; + }; + enableSelectedRepositoryGithubActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"]["response"]; + }; + enableWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"]["response"]; + }; + getActionsCacheList: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/caches"]["response"]; + }; + getActionsCacheUsage: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/cache/usage"]["response"]; + }; + getActionsCacheUsageByRepoForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/cache/usage-by-repository"]["response"]; + }; + getActionsCacheUsageForEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/actions/cache/usage"]["response"]; + }; + getActionsCacheUsageForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/cache/usage"]["response"]; + }; + getAllowedActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/permissions/selected-actions"]["response"]; + }; + getAllowedActionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"]["response"]; + }; + getArtifact: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"]["response"]; + }; + getEnvironmentPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"]["response"]; + }; + getEnvironmentSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"]["response"]; + }; + getGithubActionsDefaultWorkflowPermissionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/actions/permissions/workflow"]["response"]; + }; + getGithubActionsDefaultWorkflowPermissionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/permissions/workflow"]["response"]; + }; + getGithubActionsDefaultWorkflowPermissionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions/workflow"]["response"]; + }; + getGithubActionsPermissionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/permissions"]["response"]; + }; + getGithubActionsPermissionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions"]["response"]; + }; + getJobForWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"]["response"]; + }; + getOrgPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/secrets/public-key"]["response"]; + }; + getOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}"]["response"]; + }; + getPendingDeploymentsForRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"]["response"]; + }; + getRepoPermissions: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions"]["response"]; + }; + getRepoPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets/public-key"]["response"]; + }; + getRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"]["response"]; + }; + getReviewsForRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"]["response"]; + }; + getSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/runners/{runner_id}"]["response"]; + }; + getSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"]["response"]; + }; + getWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"]["response"]; + }; + getWorkflowAccessToRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions/access"]["response"]; + }; + getWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}"]["response"]; + }; + getWorkflowRunAttempt: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"]["response"]; + }; + getWorkflowRunUsage: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"]["response"]; + }; + getWorkflowUsage: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"]["response"]; + }; + listArtifactsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts"]["response"]; + }; + listEnvironmentSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets"]["response"]; + }; + listJobsForWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]["response"]; + }; + listJobsForWorkflowRunAttempt: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"]["response"]; + }; + listLabelsForSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/runners/{runner_id}/labels"]["response"]; + }; + listLabelsForSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"]["response"]; + }; + listOrgSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/secrets"]["response"]; + }; + listRepoSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets"]["response"]; + }; + listRepoWorkflows: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows"]["response"]; + }; + listRunnerApplicationsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/runners/downloads"]["response"]; + }; + listRunnerApplicationsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners/downloads"]["response"]; + }; + listSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"]; + }; + listSelectedRepositoriesEnabledGithubActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/permissions/repositories"]["response"]; + }; + listSelfHostedRunnersForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/runners"]["response"]; + }; + listSelfHostedRunnersForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners"]["response"]; + }; + listWorkflowRunArtifacts: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"]["response"]; + }; + listWorkflowRuns: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"]["response"]; + }; + listWorkflowRunsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs"]["response"]; + }; + reRunJobForWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"]["response"]; + }; + reRunWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"]["response"]; + }; + reRunWorkflowFailedJobs: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"]["response"]; + }; + removeAllCustomLabelsFromSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"]["response"]; + }; + removeAllCustomLabelsFromSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"]["response"]; + }; + removeCustomLabelFromSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"]["response"]; + }; + removeCustomLabelFromSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"]["response"]; + }; + removeSelectedRepoFromOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + reviewPendingDeploymentsForRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"]["response"]; + }; + setAllowedActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions/selected-actions"]["response"]; + }; + setAllowedActionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"]["response"]; + }; + setCustomLabelsForSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/runners/{runner_id}/labels"]["response"]; + }; + setCustomLabelsForSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"]["response"]; + }; + setGithubActionsDefaultWorkflowPermissionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /enterprises/{enterprise}/actions/permissions/workflow"]["response"]; + }; + setGithubActionsDefaultWorkflowPermissionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions/workflow"]["response"]; + }; + setGithubActionsDefaultWorkflowPermissionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions/workflow"]["response"]; + }; + setGithubActionsPermissionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions"]["response"]; + }; + setGithubActionsPermissionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions"]["response"]; + }; + setSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"]; + }; + setSelectedRepositoriesEnabledGithubActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions/repositories"]["response"]; + }; + setWorkflowAccessToRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions/access"]["response"]; + }; + }; + activity: { + checkRepoIsStarredByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/starred/{owner}/{repo}"]["response"]; + }; + deleteRepoSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/subscription"]["response"]; + }; + deleteThreadSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /notifications/threads/{thread_id}/subscription"]["response"]; + }; + getFeeds: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /feeds"]["response"]; + }; + getRepoSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/subscription"]["response"]; + }; + getThread: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /notifications/threads/{thread_id}"]["response"]; + }; + getThreadSubscriptionForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /notifications/threads/{thread_id}/subscription"]["response"]; + }; + listEventsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/events"]["response"]; + }; + listNotificationsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /notifications"]["response"]; + }; + listOrgEventsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/events/orgs/{org}"]["response"]; + }; + listPublicEvents: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /events"]["response"]; + }; + listPublicEventsForRepoNetwork: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /networks/{owner}/{repo}/events"]["response"]; + }; + listPublicEventsForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/events/public"]["response"]; + }; + listPublicOrgEvents: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/events"]["response"]; + }; + listReceivedEventsForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/received_events"]["response"]; + }; + listReceivedPublicEventsForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/received_events/public"]["response"]; + }; + listRepoEvents: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/events"]["response"]; + }; + listRepoNotificationsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/notifications"]["response"]; + }; + listReposStarredByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/starred"]["response"]; + }; + listReposStarredByUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/starred"]["response"]; + }; + listReposWatchedByUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/subscriptions"]["response"]; + }; + listStargazersForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stargazers"]["response"]; + }; + listWatchedReposForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/subscriptions"]["response"]; + }; + listWatchersForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/subscribers"]["response"]; + }; + markNotificationsAsRead: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /notifications"]["response"]; + }; + markRepoNotificationsAsRead: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/notifications"]["response"]; + }; + markThreadAsRead: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /notifications/threads/{thread_id}"]["response"]; + }; + setRepoSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/subscription"]["response"]; + }; + setThreadSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /notifications/threads/{thread_id}/subscription"]["response"]; + }; + starRepoForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/starred/{owner}/{repo}"]["response"]; + }; + unstarRepoForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/starred/{owner}/{repo}"]["response"]; + }; + }; + apps: { + addRepoToInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; + }; + addRepoToInstallationForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; + }; + checkToken: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /applications/{client_id}/token"]["response"]; + }; + createFromManifest: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /app-manifests/{code}/conversions"]["response"]; + }; + createInstallationAccessToken: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /app/installations/{installation_id}/access_tokens"]["response"]; + }; + deleteAuthorization: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /applications/{client_id}/grant"]["response"]; + }; + deleteInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /app/installations/{installation_id}"]["response"]; + }; + deleteToken: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /applications/{client_id}/token"]["response"]; + }; + getAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app"]["response"]; + }; + getBySlug: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /apps/{app_slug}"]["response"]; + }; + getInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/installations/{installation_id}"]["response"]; + }; + getOrgInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/installation"]["response"]; + }; + getRepoInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/installation"]["response"]; + }; + getSubscriptionPlanForAccount: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/accounts/{account_id}"]["response"]; + }; + getSubscriptionPlanForAccountStubbed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/stubbed/accounts/{account_id}"]["response"]; + }; + getUserInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/installation"]["response"]; + }; + getWebhookConfigForApp: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/hook/config"]["response"]; + }; + getWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/hook/deliveries/{delivery_id}"]["response"]; + }; + listAccountsForPlan: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/plans/{plan_id}/accounts"]["response"]; + }; + listAccountsForPlanStubbed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"]["response"]; + }; + listInstallationReposForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/installations/{installation_id}/repositories"]["response"]; + }; + listInstallations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/installations"]["response"]; + }; + listInstallationsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/installations"]["response"]; + }; + listPlans: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/plans"]["response"]; + }; + listPlansStubbed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/stubbed/plans"]["response"]; + }; + listReposAccessibleToInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /installation/repositories"]["response"]; + }; + listSubscriptionsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/marketplace_purchases"]["response"]; + }; + listSubscriptionsForAuthenticatedUserStubbed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/marketplace_purchases/stubbed"]["response"]; + }; + listWebhookDeliveries: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/hook/deliveries"]["response"]; + }; + redeliverWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /app/hook/deliveries/{delivery_id}/attempts"]["response"]; + }; + removeRepoFromInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; + }; + removeRepoFromInstallationForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; + }; + resetToken: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /applications/{client_id}/token"]["response"]; + }; + revokeInstallationAccessToken: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /installation/token"]["response"]; + }; + scopeToken: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /applications/{client_id}/token/scoped"]["response"]; + }; + suspendInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /app/installations/{installation_id}/suspended"]["response"]; + }; + unsuspendInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /app/installations/{installation_id}/suspended"]["response"]; + }; + updateWebhookConfigForApp: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /app/hook/config"]["response"]; + }; + }; + billing: { + getGithubActionsBillingOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/settings/billing/actions"]["response"]; + }; + getGithubActionsBillingUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/settings/billing/actions"]["response"]; + }; + getGithubAdvancedSecurityBillingGhe: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/settings/billing/advanced-security"]["response"]; + }; + getGithubAdvancedSecurityBillingOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/settings/billing/advanced-security"]["response"]; + }; + getGithubPackagesBillingOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/settings/billing/packages"]["response"]; + }; + getGithubPackagesBillingUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/settings/billing/packages"]["response"]; + }; + getSharedStorageBillingOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/settings/billing/shared-storage"]["response"]; + }; + getSharedStorageBillingUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/settings/billing/shared-storage"]["response"]; + }; + }; + checks: { + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/check-runs"]["response"]; + }; + createSuite: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/check-suites"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"]["response"]; + }; + getSuite: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"]["response"]; + }; + listAnnotations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"]["response"]; + }; + listForRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"]["response"]; + }; + listForSuite: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"]["response"]; + }; + listSuitesForRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"]["response"]; + }; + rerequestRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"]["response"]; + }; + rerequestSuite: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"]["response"]; + }; + setSuitesPreferences: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/check-suites/preferences"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]["response"]; + }; + }; + codeScanning: { + deleteAnalysis: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"]["response"]; + }; + getAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"]["response"]; + }; + getAnalysis: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"]["response"]; + }; + getSarif: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"]["response"]; + }; + listAlertInstances: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["response"]; + }; + listAlertsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/code-scanning/alerts"]["response"]; + }; + listAlertsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts"]["response"]; + }; + listAlertsInstances: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["response"]; + }; + listRecentAnalyses: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses"]["response"]; + }; + updateAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"]["response"]; + }; + uploadSarif: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/code-scanning/sarifs"]["response"]; + }; + }; + codesOfConduct: { + getAllCodesOfConduct: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /codes_of_conduct"]["response"]; + }; + getConductCode: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /codes_of_conduct/{key}"]["response"]; + }; + }; + codespaces: { + addRepositoryForSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + codespaceMachinesForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/{codespace_name}/machines"]["response"]; + }; + createForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/codespaces"]["response"]; + }; + createOrUpdateRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"]["response"]; + }; + createOrUpdateSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/codespaces/secrets/{secret_name}"]["response"]; + }; + createWithPrForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"]["response"]; + }; + createWithRepoForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/codespaces"]["response"]; + }; + deleteForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/codespaces/{codespace_name}"]["response"]; + }; + deleteFromOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"]["response"]; + }; + deleteRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"]["response"]; + }; + deleteSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/codespaces/secrets/{secret_name}"]["response"]; + }; + exportForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/codespaces/{codespace_name}/exports"]["response"]; + }; + getExportDetailsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/{codespace_name}/exports/{export_id}"]["response"]; + }; + getForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/{codespace_name}"]["response"]; + }; + getPublicKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/secrets/public-key"]["response"]; + }; + getRepoPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"]["response"]; + }; + getRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"]["response"]; + }; + getSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/secrets/{secret_name}"]["response"]; + }; + listDevcontainersInRepositoryForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/devcontainers"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces"]["response"]; + }; + listInOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/codespaces"]["response"]; + }; + listInRepositoryForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces"]["response"]; + }; + listRepoSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets"]["response"]; + }; + listRepositoriesForSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/secrets/{secret_name}/repositories"]["response"]; + }; + listSecretsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/secrets"]["response"]; + }; + removeRepositoryForSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + repoMachinesForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/machines"]["response"]; + }; + setRepositoriesForSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/codespaces/secrets/{secret_name}/repositories"]["response"]; + }; + startForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/codespaces/{codespace_name}/start"]["response"]; + }; + stopForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/codespaces/{codespace_name}/stop"]["response"]; + }; + stopInOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"]["response"]; + }; + updateForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/codespaces/{codespace_name}"]["response"]; + }; + }; + dependabot: { + addSelectedRepoToOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + createOrUpdateOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/dependabot/secrets/{secret_name}"]["response"]; + }; + createOrUpdateRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"]["response"]; + }; + deleteOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"]["response"]; + }; + deleteRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"]["response"]; + }; + getOrgPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/dependabot/secrets/public-key"]["response"]; + }; + getOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}"]["response"]; + }; + getRepoPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"]["response"]; + }; + getRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"]["response"]; + }; + listOrgSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/dependabot/secrets"]["response"]; + }; + listRepoSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets"]["response"]; + }; + listSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["response"]; + }; + removeSelectedRepoFromOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + setSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["response"]; + }; + }; + dependencyGraph: { + createRepositorySnapshot: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/dependency-graph/snapshots"]["response"]; + }; + diffRange: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"]["response"]; + }; + }; + emojis: { + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /emojis"]["response"]; + }; + }; + enterpriseAdmin: { + addCustomLabelsToSelfHostedRunnerForEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"]["response"]; + }; + disableSelectedOrganizationGithubActionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"]["response"]; + }; + enableSelectedOrganizationGithubActionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"]["response"]; + }; + getAllowedActionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/actions/permissions/selected-actions"]["response"]; + }; + getGithubActionsPermissionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/actions/permissions"]["response"]; + }; + getServerStatistics: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprise-installation/{enterprise_or_org}/server-statistics"]["response"]; + }; + listLabelsForSelfHostedRunnerForEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"]["response"]; + }; + listSelectedOrganizationsEnabledGithubActionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/actions/permissions/organizations"]["response"]; + }; + removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"]["response"]; + }; + removeCustomLabelFromSelfHostedRunnerForEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"]["response"]; + }; + setAllowedActionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"]["response"]; + }; + setCustomLabelsForSelfHostedRunnerForEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"]["response"]; + }; + setGithubActionsPermissionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /enterprises/{enterprise}/actions/permissions"]["response"]; + }; + setSelectedOrganizationsEnabledGithubActionsEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /enterprises/{enterprise}/actions/permissions/organizations"]["response"]; + }; + }; + gists: { + checkIsStarred: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/star"]["response"]; + }; + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /gists"]["response"]; + }; + createComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /gists/{gist_id}/comments"]["response"]; + }; + delete: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /gists/{gist_id}"]["response"]; + }; + deleteComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /gists/{gist_id}/comments/{comment_id}"]["response"]; + }; + fork: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /gists/{gist_id}/forks"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}"]["response"]; + }; + getComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/comments/{comment_id}"]["response"]; + }; + getRevision: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/{sha}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists"]["response"]; + }; + listComments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/comments"]["response"]; + }; + listCommits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/commits"]["response"]; + }; + listForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/gists"]["response"]; + }; + listForks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/forks"]["response"]; + }; + listPublic: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/public"]["response"]; + }; + listStarred: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/starred"]["response"]; + }; + star: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /gists/{gist_id}/star"]["response"]; + }; + unstar: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /gists/{gist_id}/star"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /gists/{gist_id}"]["response"]; + }; + updateComment: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /gists/{gist_id}/comments/{comment_id}"]["response"]; + }; + }; + git: { + createBlob: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/blobs"]["response"]; + }; + createCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/commits"]["response"]; + }; + createRef: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/refs"]["response"]; + }; + createTag: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/tags"]["response"]; + }; + createTree: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/trees"]["response"]; + }; + deleteRef: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/git/refs/{ref}"]["response"]; + }; + getBlob: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"]["response"]; + }; + getCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"]["response"]; + }; + getRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/ref/{ref}"]["response"]; + }; + getTag: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"]["response"]; + }; + getTree: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"]["response"]; + }; + listMatchingRefs: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"]["response"]; + }; + updateRef: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]["response"]; + }; + }; + gitignore: { + getAllTemplates: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gitignore/templates"]["response"]; + }; + getTemplate: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gitignore/templates/{name}"]["response"]; + }; + }; + interactions: { + getRestrictionsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/interaction-limits"]["response"]; + }; + getRestrictionsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/interaction-limits"]["response"]; + }; + getRestrictionsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/interaction-limits"]["response"]; + }; + getRestrictionsForYourPublicRepos: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/interaction-limits"]["response"]; + }; + removeRestrictionsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/interaction-limits"]["response"]; + }; + removeRestrictionsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/interaction-limits"]["response"]; + }; + removeRestrictionsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/interaction-limits"]["response"]; + }; + removeRestrictionsForYourPublicRepos: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/interaction-limits"]["response"]; + }; + setRestrictionsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/interaction-limits"]["response"]; + }; + setRestrictionsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/interaction-limits"]["response"]; + }; + setRestrictionsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/interaction-limits"]["response"]; + }; + setRestrictionsForYourPublicRepos: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/interaction-limits"]["response"]; + }; + }; + issues: { + addAssignees: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"]["response"]; + }; + addLabels: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + checkUserCanBeAssigned: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/assignees/{assignee}"]["response"]; + }; + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues"]["response"]; + }; + createComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"]["response"]; + }; + createLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/labels"]["response"]; + }; + createMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/milestones"]["response"]; + }; + deleteComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"]["response"]; + }; + deleteLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/labels/{name}"]["response"]; + }; + deleteMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}"]["response"]; + }; + getComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"]["response"]; + }; + getEvent: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/events/{event_id}"]["response"]; + }; + getLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/labels/{name}"]["response"]; + }; + getMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /issues"]["response"]; + }; + listAssignees: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/assignees"]["response"]; + }; + listComments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"]["response"]; + }; + listCommentsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments"]["response"]; + }; + listEvents: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/events"]["response"]; + }; + listEventsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/events"]["response"]; + }; + listEventsForTimeline: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/issues"]["response"]; + }; + listForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/issues"]["response"]; + }; + listForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues"]["response"]; + }; + listLabelsForMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"]["response"]; + }; + listLabelsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/labels"]["response"]; + }; + listLabelsOnIssue: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + listMilestones: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/milestones"]["response"]; + }; + lock: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"]["response"]; + }; + removeAllLabels: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + removeAssignees: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"]["response"]; + }; + removeLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"]["response"]; + }; + setLabels: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + unlock: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/issues/{issue_number}"]["response"]; + }; + updateComment: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"]["response"]; + }; + updateLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/labels/{name}"]["response"]; + }; + updateMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]["response"]; + }; + }; + licenses: { + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /licenses/{license}"]["response"]; + }; + getAllCommonlyUsed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /licenses"]["response"]; + }; + getForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/license"]["response"]; + }; + }; + markdown: { + render: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /markdown"]["response"]; + }; + renderRaw: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /markdown/raw"]["response"]; + }; + }; + meta: { + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /meta"]["response"]; + }; + getOctocat: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /octocat"]["response"]; + }; + getZen: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /zen"]["response"]; + }; + root: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /"]["response"]; + }; + }; + migrations: { + cancelImport: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/import"]["response"]; + }; + deleteArchiveForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/migrations/{migration_id}/archive"]["response"]; + }; + deleteArchiveForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/migrations/{migration_id}/archive"]["response"]; + }; + downloadArchiveForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/migrations/{migration_id}/archive"]["response"]; + }; + getArchiveForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations/{migration_id}/archive"]["response"]; + }; + getCommitAuthors: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/import/authors"]["response"]; + }; + getImportStatus: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/import"]["response"]; + }; + getLargeFiles: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/import/large_files"]["response"]; + }; + getStatusForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations/{migration_id}"]["response"]; + }; + getStatusForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/migrations/{migration_id}"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations"]["response"]; + }; + listForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/migrations"]["response"]; + }; + listReposForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations/{migration_id}/repositories"]["response"]; + }; + listReposForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/migrations/{migration_id}/repositories"]["response"]; + }; + listReposForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations/{migration_id}/repositories"]["response"]; + }; + mapCommitAuthor: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"]["response"]; + }; + setLfsPreference: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/import/lfs"]["response"]; + }; + startForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/migrations"]["response"]; + }; + startForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/migrations"]["response"]; + }; + startImport: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/import"]["response"]; + }; + unlockRepoForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"]["response"]; + }; + unlockRepoForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"]["response"]; + }; + updateImport: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/import"]["response"]; + }; + }; + orgs: { + blockUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/blocks/{username}"]["response"]; + }; + cancelInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/invitations/{invitation_id}"]["response"]; + }; + checkBlockedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/blocks/{username}"]["response"]; + }; + checkMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/members/{username}"]["response"]; + }; + checkPublicMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/public_members/{username}"]["response"]; + }; + convertMemberToOutsideCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/outside_collaborators/{username}"]["response"]; + }; + createInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/invitations"]["response"]; + }; + createWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/hooks"]["response"]; + }; + deleteWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/hooks/{hook_id}"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}"]["response"]; + }; + getMembershipForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/memberships/orgs/{org}"]["response"]; + }; + getMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/memberships/{username}"]["response"]; + }; + getWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}"]["response"]; + }; + getWebhookConfigForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}/config"]["response"]; + }; + getWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /organizations"]["response"]; + }; + listAppInstallations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/installations"]["response"]; + }; + listBlockedUsers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/blocks"]["response"]; + }; + listCustomRoles: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /organizations/{organization_id}/custom_roles"]["response"]; + }; + listFailedInvitations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/failed_invitations"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/orgs"]["response"]; + }; + listForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/orgs"]["response"]; + }; + listInvitationTeams: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/invitations/{invitation_id}/teams"]["response"]; + }; + listMembers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/members"]["response"]; + }; + listMembershipsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/memberships/orgs"]["response"]; + }; + listOutsideCollaborators: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/outside_collaborators"]["response"]; + }; + listPendingInvitations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/invitations"]["response"]; + }; + listPublicMembers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/public_members"]["response"]; + }; + listWebhookDeliveries: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}/deliveries"]["response"]; + }; + listWebhooks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks"]["response"]; + }; + pingWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/hooks/{hook_id}/pings"]["response"]; + }; + redeliverWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"]["response"]; + }; + removeMember: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/members/{username}"]["response"]; + }; + removeMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/memberships/{username}"]["response"]; + }; + removeOutsideCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/outside_collaborators/{username}"]["response"]; + }; + removePublicMembershipForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/public_members/{username}"]["response"]; + }; + setMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/memberships/{username}"]["response"]; + }; + setPublicMembershipForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/public_members/{username}"]["response"]; + }; + unblockUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/blocks/{username}"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}"]["response"]; + }; + updateMembershipForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/memberships/orgs/{org}"]["response"]; + }; + updateWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/hooks/{hook_id}"]["response"]; + }; + updateWebhookConfigForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/hooks/{hook_id}/config"]["response"]; + }; + }; + packages: { + deletePackageForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/packages/{package_type}/{package_name}"]["response"]; + }; + deletePackageForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/packages/{package_type}/{package_name}"]["response"]; + }; + deletePackageForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /users/{username}/packages/{package_type}/{package_name}"]["response"]; + }; + deletePackageVersionForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + deletePackageVersionForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + deletePackageVersionForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + getAllPackageVersionsForAPackageOwnedByAnOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getAllPackageVersionsForPackageOwnedByOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getAllPackageVersionsForPackageOwnedByUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getPackageForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages/{package_type}/{package_name}"]["response"]; + }; + getPackageForOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}"]["response"]; + }; + getPackageForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/packages/{package_type}/{package_name}"]["response"]; + }; + getPackageVersionForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + getPackageVersionForOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + getPackageVersionForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + listPackagesForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages"]["response"]; + }; + listPackagesForOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages"]["response"]; + }; + listPackagesForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/packages"]["response"]; + }; + restorePackageForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/packages/{package_type}/{package_name}/restore{?token}"]["response"]; + }; + restorePackageForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"]["response"]; + }; + restorePackageForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"]["response"]; + }; + restorePackageVersionForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]["response"]; + }; + restorePackageVersionForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]["response"]; + }; + restorePackageVersionForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]["response"]; + }; + }; + projects: { + addCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /projects/{project_id}/collaborators/{username}"]["response"]; + }; + createCard: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /projects/columns/{column_id}/cards"]["response"]; + }; + createColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /projects/{project_id}/columns"]["response"]; + }; + createForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/projects"]["response"]; + }; + createForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/projects"]["response"]; + }; + createForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/projects"]["response"]; + }; + delete: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /projects/{project_id}"]["response"]; + }; + deleteCard: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /projects/columns/cards/{card_id}"]["response"]; + }; + deleteColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /projects/columns/{column_id}"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/{project_id}"]["response"]; + }; + getCard: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/columns/cards/{card_id}"]["response"]; + }; + getColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/columns/{column_id}"]["response"]; + }; + getPermissionForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/{project_id}/collaborators/{username}/permission"]["response"]; + }; + listCards: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/columns/{column_id}/cards"]["response"]; + }; + listCollaborators: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/{project_id}/collaborators"]["response"]; + }; + listColumns: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/{project_id}/columns"]["response"]; + }; + listForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/projects"]["response"]; + }; + listForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/projects"]["response"]; + }; + listForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/projects"]["response"]; + }; + moveCard: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /projects/columns/cards/{card_id}/moves"]["response"]; + }; + moveColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /projects/columns/{column_id}/moves"]["response"]; + }; + removeCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /projects/{project_id}/collaborators/{username}"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /projects/{project_id}"]["response"]; + }; + updateCard: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /projects/columns/cards/{card_id}"]["response"]; + }; + updateColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /projects/columns/{column_id}"]["response"]; + }; + }; + pulls: { + checkIfMerged: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"]["response"]; + }; + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls"]["response"]; + }; + createReplyForReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"]["response"]; + }; + createReview: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["response"]; + }; + createReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["response"]; + }; + deletePendingReview: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"]["response"]; + }; + deleteReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"]["response"]; + }; + dismissReview: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}"]["response"]; + }; + getReview: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"]["response"]; + }; + getReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls"]["response"]; + }; + listCommentsForReview: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"]["response"]; + }; + listCommits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"]["response"]; + }; + listFiles: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"]["response"]; + }; + listRequestedReviewers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]; + }; + listReviewComments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["response"]; + }; + listReviewCommentsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments"]["response"]; + }; + listReviews: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["response"]; + }; + merge: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"]["response"]; + }; + removeRequestedReviewers: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]; + }; + requestReviewers: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]; + }; + submitReview: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"]["response"]; + }; + updateBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"]["response"]; + }; + updateReview: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"]["response"]; + }; + updateReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]["response"]; + }; + }; + rateLimit: { + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /rate_limit"]["response"]; + }; + }; + reactions: { + createForCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["response"]; + }; + createForIssue: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["response"]; + }; + createForIssueComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["response"]; + }; + createForPullRequestReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["response"]; + }; + createForRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"]["response"]; + }; + createForTeamDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; + }; + createForTeamDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["response"]; + }; + deleteForCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"]["response"]; + }; + deleteForIssue: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"]["response"]; + }; + deleteForIssueComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"]["response"]; + }; + deleteForPullRequestComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"]["response"]; + }; + deleteForRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"]["response"]; + }; + deleteForTeamDiscussion: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"]["response"]; + }; + deleteForTeamDiscussionComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"]["response"]; + }; + listForCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["response"]; + }; + listForIssue: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["response"]; + }; + listForIssueComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["response"]; + }; + listForPullRequestReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["response"]; + }; + listForRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"]["response"]; + }; + listForTeamDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; + }; + listForTeamDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["response"]; + }; + }; + repos: { + acceptInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/repository_invitations/{invitation_id}"]["response"]; + }; + acceptInvitationForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/repository_invitations/{invitation_id}"]["response"]; + }; + addAppAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; + }; + addCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/collaborators/{username}"]["response"]; + }; + addStatusCheckContexts: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; + }; + addTeamAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; + }; + addUserAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; + }; + checkCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/collaborators/{username}"]["response"]; + }; + checkVulnerabilityAlerts: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/vulnerability-alerts"]["response"]; + }; + codeownersErrors: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codeowners/errors"]["response"]; + }; + compareCommits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/compare/{base}...{head}"]["response"]; + }; + compareCommitsWithBasehead: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/compare/{basehead}"]["response"]; + }; + createAutolink: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/autolinks"]["response"]; + }; + createCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["response"]; + }; + createCommitSignatureProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"]["response"]; + }; + createCommitStatus: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/statuses/{sha}"]["response"]; + }; + createDeployKey: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/keys"]["response"]; + }; + createDeployment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/deployments"]["response"]; + }; + createDeploymentStatus: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["response"]; + }; + createDispatchEvent: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/dispatches"]["response"]; + }; + createForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/repos"]["response"]; + }; + createFork: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/forks"]["response"]; + }; + createInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/repos"]["response"]; + }; + createOrUpdateEnvironment: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/environments/{environment_name}"]["response"]; + }; + createOrUpdateFileContents: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/contents/{path}"]["response"]; + }; + createPagesSite: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pages"]["response"]; + }; + createRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/releases"]["response"]; + }; + createTagProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/tags/protection"]["response"]; + }; + createUsingTemplate: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{template_owner}/{template_repo}/generate"]["response"]; + }; + createWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/hooks"]["response"]; + }; + declineInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/repository_invitations/{invitation_id}"]["response"]; + }; + declineInvitationForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/repository_invitations/{invitation_id}"]["response"]; + }; + delete: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}"]["response"]; + }; + deleteAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"]["response"]; + }; + deleteAdminBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"]["response"]; + }; + deleteAnEnvironment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/environments/{environment_name}"]["response"]; + }; + deleteAutolink: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"]["response"]; + }; + deleteBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"]["response"]; + }; + deleteCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/comments/{comment_id}"]["response"]; + }; + deleteCommitSignatureProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"]["response"]; + }; + deleteDeployKey: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/keys/{key_id}"]["response"]; + }; + deleteDeployment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"]["response"]; + }; + deleteFile: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/contents/{path}"]["response"]; + }; + deleteInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"]["response"]; + }; + deletePagesSite: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pages"]["response"]; + }; + deletePullRequestReviewProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"]["response"]; + }; + deleteRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/releases/{release_id}"]["response"]; + }; + deleteReleaseAsset: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"]["response"]; + }; + deleteTagProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"]["response"]; + }; + deleteWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"]["response"]; + }; + disableAutomatedSecurityFixes: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/automated-security-fixes"]["response"]; + }; + disableLfsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/lfs"]["response"]; + }; + disableVulnerabilityAlerts: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/vulnerability-alerts"]["response"]; + }; + downloadArchive: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/zipball/{ref}"]["response"]; + }; + downloadTarballArchive: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/tarball/{ref}"]["response"]; + }; + downloadZipballArchive: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/zipball/{ref}"]["response"]; + }; + enableAutomatedSecurityFixes: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/automated-security-fixes"]["response"]; + }; + enableLfsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/lfs"]["response"]; + }; + enableVulnerabilityAlerts: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/vulnerability-alerts"]["response"]; + }; + generateReleaseNotes: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/releases/generate-notes"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}"]["response"]; + }; + getAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"]["response"]; + }; + getAdminBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"]["response"]; + }; + getAllEnvironments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/environments"]["response"]; + }; + getAllStatusCheckContexts: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; + }; + getAllTopics: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/topics"]["response"]; + }; + getAppsWithAccessToProtectedBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; + }; + getAutolink: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"]["response"]; + }; + getBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}"]["response"]; + }; + getBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection"]["response"]; + }; + getClones: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/traffic/clones"]["response"]; + }; + getCodeFrequencyStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/code_frequency"]["response"]; + }; + getCollaboratorPermissionLevel: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/collaborators/{username}/permission"]["response"]; + }; + getCombinedStatusForRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/status"]["response"]; + }; + getCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}"]["response"]; + }; + getCommitActivityStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/commit_activity"]["response"]; + }; + getCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}"]["response"]; + }; + getCommitSignatureProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"]["response"]; + }; + getCommunityProfileMetrics: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/community/profile"]["response"]; + }; + getContent: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/contents/{path}"]["response"]; + }; + getContributorsStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/contributors"]["response"]; + }; + getDeployKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/keys/{key_id}"]["response"]; + }; + getDeployment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}"]["response"]; + }; + getDeploymentStatus: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"]["response"]; + }; + getEnvironment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}"]["response"]; + }; + getLatestPagesBuild: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages/builds/latest"]["response"]; + }; + getLatestRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/latest"]["response"]; + }; + getPages: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages"]["response"]; + }; + getPagesBuild: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages/builds/{build_id}"]["response"]; + }; + getPagesHealthCheck: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages/health"]["response"]; + }; + getParticipationStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/participation"]["response"]; + }; + getPullRequestReviewProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"]["response"]; + }; + getPunchCardStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/punch_card"]["response"]; + }; + getReadme: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/readme"]["response"]; + }; + getReadmeInDirectory: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/readme/{dir}"]["response"]; + }; + getRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}"]["response"]; + }; + getReleaseAsset: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"]["response"]; + }; + getReleaseByTag: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/tags/{tag}"]["response"]; + }; + getStatusChecksProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; + }; + getTeamsWithAccessToProtectedBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; + }; + getTopPaths: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/traffic/popular/paths"]["response"]; + }; + getTopReferrers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/traffic/popular/referrers"]["response"]; + }; + getUsersWithAccessToProtectedBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; + }; + getViews: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/traffic/views"]["response"]; + }; + getWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}"]["response"]; + }; + getWebhookConfigForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"]["response"]; + }; + getWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"]["response"]; + }; + listAutolinks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/autolinks"]["response"]; + }; + listBranches: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches"]["response"]; + }; + listBranchesForHeadCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"]["response"]; + }; + listCollaborators: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/collaborators"]["response"]; + }; + listCommentsForCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["response"]; + }; + listCommitCommentsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/comments"]["response"]; + }; + listCommitStatusesForRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/statuses"]["response"]; + }; + listCommits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits"]["response"]; + }; + listContributors: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/contributors"]["response"]; + }; + listDeployKeys: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/keys"]["response"]; + }; + listDeploymentStatuses: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["response"]; + }; + listDeployments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/deployments"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/repos"]["response"]; + }; + listForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/repos"]["response"]; + }; + listForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/repos"]["response"]; + }; + listForks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/forks"]["response"]; + }; + listInvitations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/invitations"]["response"]; + }; + listInvitationsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/repository_invitations"]["response"]; + }; + listLanguages: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/languages"]["response"]; + }; + listPagesBuilds: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages/builds"]["response"]; + }; + listPublic: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories"]["response"]; + }; + listPullRequestsAssociatedWithCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"]["response"]; + }; + listReleaseAssets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/assets"]["response"]; + }; + listReleases: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases"]["response"]; + }; + listTagProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/tags/protection"]["response"]; + }; + listTags: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/tags"]["response"]; + }; + listTeams: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/teams"]["response"]; + }; + listWebhookDeliveries: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"]["response"]; + }; + listWebhooks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks"]["response"]; + }; + merge: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/merges"]["response"]; + }; + mergeUpstream: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/merge-upstream"]["response"]; + }; + pingWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"]["response"]; + }; + redeliverWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"]["response"]; + }; + removeAppAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; + }; + removeCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/collaborators/{username}"]["response"]; + }; + removeStatusCheckContexts: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; + }; + removeStatusCheckProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; + }; + removeTeamAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; + }; + removeUserAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; + }; + renameBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/rename"]["response"]; + }; + replaceAllTopics: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/topics"]["response"]; + }; + requestPagesBuild: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pages/builds"]["response"]; + }; + setAdminBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"]["response"]; + }; + setAppAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; + }; + setStatusCheckContexts: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; + }; + setTeamAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; + }; + setUserAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; + }; + testPushWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"]["response"]; + }; + transfer: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/transfer"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}"]["response"]; + }; + updateBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection"]["response"]; + }; + updateCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/comments/{comment_id}"]["response"]; + }; + updateInformationAboutPagesSite: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pages"]["response"]; + }; + updateInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"]["response"]; + }; + updatePullRequestReviewProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"]["response"]; + }; + updateRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/releases/{release_id}"]["response"]; + }; + updateReleaseAsset: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"]["response"]; + }; + updateStatusCheckPotection: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; + }; + updateStatusCheckProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; + }; + updateWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"]["response"]; + }; + updateWebhookConfigForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"]["response"]; + }; + uploadReleaseAsset: { + parameters: RequestParameters & Omit; + response: Endpoints["POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}"]["response"]; + }; + }; + search: { + code: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/code"]["response"]; + }; + commits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/commits"]["response"]; + }; + issuesAndPullRequests: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/issues"]["response"]; + }; + labels: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/labels"]["response"]; + }; + repos: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/repositories"]["response"]; + }; + topics: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/topics"]["response"]; + }; + users: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/users"]["response"]; + }; + }; + secretScanning: { + getAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]["response"]; + }; + listAlertsForEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/secret-scanning/alerts"]["response"]; + }; + listAlertsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/secret-scanning/alerts"]["response"]; + }; + listAlertsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts"]["response"]; + }; + listLocationsForAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"]["response"]; + }; + updateAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]["response"]; + }; + }; + teams: { + addOrUpdateMembershipForUserInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"]["response"]; + }; + addOrUpdateProjectPermissionsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"]["response"]; + }; + addOrUpdateRepoPermissionsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"]["response"]; + }; + checkPermissionsForProjectInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"]["response"]; + }; + checkPermissionsForRepoInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"]["response"]; + }; + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams"]["response"]; + }; + createDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["response"]; + }; + createDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions"]["response"]; + }; + deleteDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"]["response"]; + }; + deleteDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"]["response"]; + }; + deleteInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}"]["response"]; + }; + getByName: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}"]["response"]; + }; + getDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"]["response"]; + }; + getDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"]["response"]; + }; + getMembershipForUserInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams"]["response"]; + }; + listChildInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/teams"]["response"]; + }; + listDiscussionCommentsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["response"]; + }; + listDiscussionsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/teams"]["response"]; + }; + listMembersInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/members"]["response"]; + }; + listPendingInvitationsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/invitations"]["response"]; + }; + listProjectsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects"]["response"]; + }; + listReposInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos"]["response"]; + }; + removeMembershipForUserInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"]["response"]; + }; + removeProjectInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"]["response"]; + }; + removeRepoInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"]["response"]; + }; + updateDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"]["response"]; + }; + updateDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"]["response"]; + }; + updateInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/teams/{team_slug}"]["response"]; + }; + }; + users: { + addEmailForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/emails"]["response"]; + }; + addEmailForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/emails"]["response"]; + }; + block: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/blocks/{username}"]["response"]; + }; + checkBlocked: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/blocks/{username}"]["response"]; + }; + checkFollowingForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/following/{target_user}"]["response"]; + }; + checkPersonIsFollowedByAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/following/{username}"]["response"]; + }; + createGpgKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/gpg_keys"]["response"]; + }; + createGpgKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/gpg_keys"]["response"]; + }; + createPublicSshKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/keys"]["response"]; + }; + createPublicSshKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/keys"]["response"]; + }; + deleteEmailForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/emails"]["response"]; + }; + deleteEmailForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/emails"]["response"]; + }; + deleteGpgKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/gpg_keys/{gpg_key_id}"]["response"]; + }; + deleteGpgKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/gpg_keys/{gpg_key_id}"]["response"]; + }; + deletePublicSshKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/keys/{key_id}"]["response"]; + }; + deletePublicSshKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/keys/{key_id}"]["response"]; + }; + follow: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/following/{username}"]["response"]; + }; + getAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user"]["response"]; + }; + getByUsername: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}"]["response"]; + }; + getContextForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/hovercard"]["response"]; + }; + getGpgKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/gpg_keys/{gpg_key_id}"]["response"]; + }; + getGpgKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/gpg_keys/{gpg_key_id}"]["response"]; + }; + getPublicSshKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/keys/{key_id}"]["response"]; + }; + getPublicSshKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/keys/{key_id}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users"]["response"]; + }; + listBlockedByAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/blocks"]["response"]; + }; + listBlockedByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/blocks"]["response"]; + }; + listEmailsForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/emails"]["response"]; + }; + listEmailsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/emails"]["response"]; + }; + listFollowedByAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/following"]["response"]; + }; + listFollowedByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/following"]["response"]; + }; + listFollowersForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/followers"]["response"]; + }; + listFollowersForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/followers"]["response"]; + }; + listFollowingForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/following"]["response"]; + }; + listGpgKeysForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/gpg_keys"]["response"]; + }; + listGpgKeysForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/gpg_keys"]["response"]; + }; + listGpgKeysForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/gpg_keys"]["response"]; + }; + listPublicEmailsForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/public_emails"]["response"]; + }; + listPublicEmailsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/public_emails"]["response"]; + }; + listPublicKeysForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/keys"]["response"]; + }; + listPublicSshKeysForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/keys"]["response"]; + }; + listPublicSshKeysForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/keys"]["response"]; + }; + setPrimaryEmailVisibilityForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/email/visibility"]["response"]; + }; + setPrimaryEmailVisibilityForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/email/visibility"]["response"]; + }; + unblock: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/blocks/{username}"]["response"]; + }; + unfollow: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/following/{username}"]["response"]; + }; + updateAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user"]["response"]; + }; + }; +}; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts new file mode 100644 index 0000000..a81f3b0 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts @@ -0,0 +1,11 @@ +import { Octokit } from "@octokit/core"; +export { RestEndpointMethodTypes } from "./generated/parameters-and-response-types"; +import { Api } from "./types"; +export declare function restEndpointMethods(octokit: Octokit): Api; +export declare namespace restEndpointMethods { + var VERSION: string; +} +export declare function legacyRestEndpointMethods(octokit: Octokit): Api["rest"] & Api; +export declare namespace legacyRestEndpointMethods { + var VERSION: string; +} diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts new file mode 100644 index 0000000..3628f02 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts @@ -0,0 +1,18 @@ +import { Route, RequestParameters } from "@octokit/types"; +import { RestEndpointMethods } from "./generated/method-types"; +export declare type Api = { + rest: RestEndpointMethods; +}; +export declare type EndpointDecorations = { + mapToData?: string; + deprecated?: string; + renamed?: [string, string]; + renamedParameters?: { + [name: string]: string; + }; +}; +export declare type EndpointsDefaultsAndDecorations = { + [scope: string]: { + [methodName: string]: [Route, RequestParameters?, EndpointDecorations?]; + }; +}; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts new file mode 100644 index 0000000..04c17e0 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "5.16.2"; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js new file mode 100644 index 0000000..e35644d --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js @@ -0,0 +1,1745 @@ +const Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels", + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve", + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel", + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", + ], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token", + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token", + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token", + ], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}", + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}", + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}", + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}", + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}", + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs", + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}", + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs", + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs", + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}", + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository", + ], + getActionsCacheUsageForEnterprise: [ + "GET /enterprises/{enterprise}/actions/cache/usage", + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions", + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions", + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key", + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", + ], + getGithubActionsDefaultWorkflowPermissionsEnterprise: [ + "GET /enterprises/{enterprise}/actions/permissions/workflow", + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow", + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow", + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions", + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions", + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] }, + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals", + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}", + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access", + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}", + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing", + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels", + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads", + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories", + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun", + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs", + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels", + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}", + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}", + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions", + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions", + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels", + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + ], + setGithubActionsDefaultWorkflowPermissionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions/workflow", + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow", + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow", + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions", + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions", + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories", + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories", + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access", + ], + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription", + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription", + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}", + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public", + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications", + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription", + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"], + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] }, + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens", + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}", + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}", + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories", + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed", + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts", + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] }, + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended", + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"], + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions", + ], + getGithubAdvancedSecurityBillingGhe: [ + "GET /enterprises/{enterprise}/settings/billing/advanced-security", + ], + getGithubAdvancedSecurityBillingOrg: [ + "GET /orgs/{org}/settings/billing/advanced-security", + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages", + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage", + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage", + ], + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest", + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences", + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"], + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}", + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } }, + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", + ], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] }, + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"], + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"], + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}", + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines", + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}", + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}", + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces", + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces", + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}", + ], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}", + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}", + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports", + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}", + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key", + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key", + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}", + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}", + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } }, + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces", + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories", + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}", + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines", + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories", + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop", + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"], + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}", + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}", + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}", + ], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key", + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}", + ], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + ], + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots", + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}", + ], + }, + emojis: { get: ["GET /emojis"] }, + enterpriseAdmin: { + addCustomLabelsToSelfHostedRunnerForEnterprise: [ + "POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels", + ], + disableSelectedOrganizationGithubActionsEnterprise: [ + "DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}", + ], + enableSelectedOrganizationGithubActionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}", + ], + getAllowedActionsEnterprise: [ + "GET /enterprises/{enterprise}/actions/permissions/selected-actions", + ], + getGithubActionsPermissionsEnterprise: [ + "GET /enterprises/{enterprise}/actions/permissions", + ], + getServerStatistics: [ + "GET /enterprise-installation/{enterprise_or_org}/server-statistics", + ], + listLabelsForSelfHostedRunnerForEnterprise: [ + "GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels", + ], + listSelectedOrganizationsEnabledGithubActionsEnterprise: [ + "GET /enterprises/{enterprise}/actions/permissions/organizations", + ], + removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: [ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels", + ], + removeCustomLabelFromSelfHostedRunnerForEnterprise: [ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}", + ], + setAllowedActionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions/selected-actions", + ], + setCustomLabelsForSelfHostedRunnerForEnterprise: [ + "PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels", + ], + setGithubActionsPermissionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions", + ], + setSelectedOrganizationsEnabledGithubActionsEnterprise: [ + "PUT /enterprises/{enterprise}/actions/permissions/organizations", + ], + }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"], + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"], + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"], + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }, + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits", + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }, + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }, + ], + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees", + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments", + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}", + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}", + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels", + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees", + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}", + ], + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"], + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } }, + ], + }, + meta: { + get: ["GET /meta"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"], + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive", + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive", + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive", + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive", + ], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories", + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] }, + ], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", + ], + updateImport: ["PATCH /repos/{owner}/{repo}/import"], + }, + orgs: { + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}", + ], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}", + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}", + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}", + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}", + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}", + ], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"], + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}", + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}", + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}", + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] }, + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser", + ], + }, + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions", + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}", + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}", + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}", + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}", + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}", + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}", + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + ], + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission", + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}", + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"], + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments", + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}", + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}", + ], + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions", + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}", + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + ], + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] }, + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}", + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" }, + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" }, + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" }, + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" }, + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts", + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}", + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments", + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}", + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate", + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] }, + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}", + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}", + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection", + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}", + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}", + ], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}", + ], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}", + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes", + ], + disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts", + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] }, + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes", + ], + enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts", + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes", + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection", + ], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission", + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}", + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config", + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}", + ], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" }, + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}", + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" }, + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" }, + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" }, + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" }, + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" }, + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" }, + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" }, + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection", + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}", + ], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}", + ], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] }, + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config", + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" }, + ], + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"], + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts", + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", + ], + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}", + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}", + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations", + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}", + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}", + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"], + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] }, + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] }, + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] }, + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] }, + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] }, + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] }, + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] }, + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] }, + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] }, + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] }, + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] }, + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] }, + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] }, + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] }, + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] }, + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility", + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"], + }, +}; + +const VERSION = "5.16.2"; + +function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ method, url }, defaults); + if (!newMethods[scope]) { + newMethods[scope] = {}; + } + const scopeMethods = newMethods[scope]; + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; + } + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + } + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + /* istanbul ignore next */ + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); + // There are currently no other decorations than `.mapToData` + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined, + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); + if (!(alias in options)) { + options[alias] = options[name]; + } + delete options[name]; + } + } + return requestWithDefaults(options); + } + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} + +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, Endpoints); + return { + rest: api, + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, Endpoints); + return { + ...api, + rest: api, + }; +} +legacyRestEndpointMethods.VERSION = VERSION; + +export { legacyRestEndpointMethods, restEndpointMethods }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map new file mode 100644 index 0000000..d33402a --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/generated/endpoints.js","../dist-src/version.js","../dist-src/endpoints-to-methods.js","../dist-src/index.js"],"sourcesContent":["const Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\n \"POST /orgs/{org}/actions/runners/{runner_id}/labels\",\n ],\n addCustomLabelsToSelfHostedRunnerForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\",\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\",\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\",\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\",\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\",\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\",\n ],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\",\n ],\n deleteActionsCacheById: [\n \"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\",\n ],\n deleteActionsCacheByKey: [\n \"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\",\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\",\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\",\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\",\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\",\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\",\n ],\n downloadWorkflowRunAttemptLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\",\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\",\n ],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n ],\n getActionsCacheUsageForEnterprise: [\n \"GET /enterprises/{enterprise}/actions/cache/usage\",\n ],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\",\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\",\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n getGithubActionsDefaultWorkflowPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/workflow\",\n ],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/workflow\",\n ],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/workflow\",\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\",\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] },\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\",\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/access\",\n ],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\",\n ],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\",\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\",\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n ],\n listJobsForWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n ],\n listLabelsForSelfHostedRunnerForOrg: [\n \"GET /orgs/{org}/actions/runners/{runner_id}/labels\",\n ],\n listLabelsForSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\",\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\",\n ],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\",\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\",\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\",\n ],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\",\n ],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\",\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\",\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n setCustomLabelsForSelfHostedRunnerForOrg: [\n \"PUT /orgs/{org}/actions/runners/{runner_id}/labels\",\n ],\n setCustomLabelsForSelfHostedRunnerForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\",\n ],\n setGithubActionsDefaultWorkflowPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/workflow\",\n ],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/workflow\",\n ],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/workflow\",\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\",\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\",\n ],\n setWorkflowAccessToRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/access\",\n ],\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\",\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\",\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\",\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\",\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\",\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\",\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"],\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"] },\n ],\n addRepoToInstallationForAuthenticatedUser: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\",\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\",\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\",\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\",\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\",\n ],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\n \"POST /app/hook/deliveries/{delivery_id}/attempts\",\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"] },\n ],\n removeRepoFromInstallationForAuthenticatedUser: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\",\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"],\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\",\n ],\n getGithubAdvancedSecurityBillingGhe: [\n \"GET /enterprises/{enterprise}/settings/billing/advanced-security\",\n ],\n getGithubAdvancedSecurityBillingOrg: [\n \"GET /orgs/{org}/settings/billing/advanced-security\",\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\",\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\",\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\",\n ],\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\n \"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\",\n ],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\",\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\",\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } },\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\",\n ],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] },\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"],\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"],\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n codespaceMachinesForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/machines\",\n ],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\",\n ],\n createOrUpdateSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}\",\n ],\n createWithPrForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\",\n ],\n createWithRepoForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/codespaces\",\n ],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\n \"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\",\n ],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\",\n ],\n deleteSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}\",\n ],\n exportForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/exports\",\n ],\n getExportDetailsForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/exports/{export_id}\",\n ],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getPublicKeyForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/public-key\",\n ],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\",\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\",\n ],\n getSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}\",\n ],\n listDevcontainersInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n ],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\n \"GET /orgs/{org}/codespaces\",\n {},\n { renamedParameters: { org_id: \"org\" } },\n ],\n listInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces\",\n ],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}/repositories\",\n ],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n removeRepositoryForSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n repoMachinesForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/machines\",\n ],\n setRepositoriesForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories\",\n ],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\n \"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\",\n ],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"],\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}\",\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\",\n ],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\",\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n ],\n },\n dependencyGraph: {\n createRepositorySnapshot: [\n \"POST /repos/{owner}/{repo}/dependency-graph/snapshots\",\n ],\n diffRange: [\n \"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\",\n ],\n },\n emojis: { get: [\"GET /emojis\"] },\n enterpriseAdmin: {\n addCustomLabelsToSelfHostedRunnerForEnterprise: [\n \"POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels\",\n ],\n disableSelectedOrganizationGithubActionsEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n enableSelectedOrganizationGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n getAllowedActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n getGithubActionsPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions\",\n ],\n getServerStatistics: [\n \"GET /enterprise-installation/{enterprise_or_org}/server-statistics\",\n ],\n listLabelsForSelfHostedRunnerForEnterprise: [\n \"GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels\",\n ],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels\",\n ],\n removeCustomLabelFromSelfHostedRunnerForEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}\",\n ],\n setAllowedActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n setCustomLabelsForSelfHostedRunnerForEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels\",\n ],\n setGithubActionsPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions\",\n ],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"],\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"],\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"],\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] },\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\",\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] },\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] },\n ],\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\",\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\",\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"],\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } },\n ],\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"],\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\",\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\",\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\",\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\",\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n ],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n {},\n { renamed: [\"migrations\", \"listReposForAuthenticatedUser\"] },\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\",\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\",\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"],\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\",\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\",\n ],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listCustomRoles: [\"GET /organizations/{organization_id}/custom_roles\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\",\n ],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\",\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\",\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\",\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\",\n ],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"],\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\",\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n deletePackageForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}\",\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] },\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\",\n ],\n },\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\",\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\",\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\",\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\",\n ],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\",\n ],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"],\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\",\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\",\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\",\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\",\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\",\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\",\n ],\n deleteForRelease: [\n \"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\",\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\",\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\",\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n ],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n ],\n listForRelease: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n ],\n },\n repos: {\n acceptInvitation: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"] },\n ],\n acceptInvitationForAuthenticatedUser: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n ],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\",\n ],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\",\n ],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\",\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"] },\n ],\n declineInvitationForAuthenticatedUser: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n ],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\",\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n deleteTagProtection: [\n \"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\",\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\",\n ],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\",\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] },\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\",\n ],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\",\n ],\n generateReleaseNotes: [\n \"POST /repos/{owner}/{repo}/releases/generate-notes\",\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n ],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n ],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\",\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\",\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n getWebhookDelivery: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\",\n ],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n ],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\",\n ],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\",\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] },\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" },\n ],\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"],\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n ],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"],\n },\n users: {\n addEmailForAuthenticated: [\n \"POST /user/emails\",\n {},\n { renamed: [\"users\", \"addEmailForAuthenticatedUser\"] },\n ],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\n \"POST /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"] },\n ],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\n \"POST /user/keys\",\n {},\n { renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"] },\n ],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\n \"DELETE /user/emails\",\n {},\n { renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"] },\n ],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\n \"DELETE /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"] },\n ],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\n \"DELETE /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"] },\n ],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\n \"GET /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"] },\n ],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\n \"GET /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"] },\n ],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\n \"GET /user/blocks\",\n {},\n { renamed: [\"users\", \"listBlockedByAuthenticatedUser\"] },\n ],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\n \"GET /user/emails\",\n {},\n { renamed: [\"users\", \"listEmailsForAuthenticatedUser\"] },\n ],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\n \"GET /user/following\",\n {},\n { renamed: [\"users\", \"listFollowedByAuthenticatedUser\"] },\n ],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\n \"GET /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"] },\n ],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\n \"GET /user/public_emails\",\n {},\n { renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"] },\n ],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\n \"GET /user/keys\",\n {},\n { renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"] },\n ],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\n \"PATCH /user/email/visibility\",\n {},\n { renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"] },\n ],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\n \"PATCH /user/email/visibility\",\n ],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"],\n },\n};\nexport default Endpoints;\n","export const VERSION = \"5.16.2\";\n","export function endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({ method, url }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined,\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n","import ENDPOINTS from \"./generated/endpoints\";\nimport { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nexport function restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n rest: api,\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nexport function legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n ...api,\n rest: api,\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n"],"names":["ENDPOINTS"],"mappings":"AAAA,MAAM,SAAS,GAAG;AAClB,IAAI,OAAO,EAAE;AACb,QAAQ,uCAAuC,EAAE;AACjD,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,wCAAwC,EAAE;AAClD,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,+CAA+C,CAAC;AAClF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,+CAA+C,CAAC;AAClF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,4FAA4F;AACxG,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,kDAAkD,CAAC;AAC7E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,oDAAoD,CAAC;AACjF,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,kDAAkD,EAAE;AAC5D,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,gFAAgF;AAC5F,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,iDAAiD,EAAE;AAC3D,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,0CAA0C,CAAC;AACzE,QAAQ,oBAAoB,EAAE,CAAC,+CAA+C,CAAC;AAC/E,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,qCAAqC,CAAC;AAC3E,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,2DAA2D,CAAC;AAClF,QAAQ,uBAAuB,EAAE;AACjC,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,oDAAoD,EAAE;AAC9D,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,sDAAsD,EAAE;AAChE,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,oDAAoD,EAAE;AAC9D,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,uCAAuC,EAAE;AACjD,YAAY,qCAAqC;AACjD,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,iDAAiD,CAAC;AACjF,QAAQ,eAAe,EAAE,CAAC,4CAA4C,CAAC;AACvE,QAAQ,YAAY,EAAE,CAAC,+CAA+C,CAAC;AACvE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,+CAA+C;AAC3D,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,SAAS,EAAE,uCAAuC,CAAC,EAAE;AAC7E,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,sDAAsD,CAAC;AAClF,QAAQ,aAAa,EAAE,CAAC,yDAAyD,CAAC;AAClF,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,6CAA6C,CAAC;AAClF,QAAQ,0BAA0B,EAAE;AACpC,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,2DAA2D,CAAC;AAClF,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,iDAAiD,CAAC;AAC3E,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,6CAA6C,CAAC;AAC7E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,gFAAgF;AAC5F,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,iCAAiC,CAAC;AAC3D,QAAQ,eAAe,EAAE,CAAC,2CAA2C,CAAC;AACtE,QAAQ,iBAAiB,EAAE,CAAC,6CAA6C,CAAC;AAC1E,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,wDAAwD,EAAE;AAClE,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,2BAA2B,EAAE,CAAC,iCAAiC,CAAC;AACxE,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,wCAAwC,CAAC;AAC3E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,wDAAwD,CAAC;AACjF,QAAQ,uBAAuB,EAAE;AACjC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,+CAA+C,EAAE;AACzD,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,gDAAgD,EAAE;AAC1D,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,2CAA2C,EAAE;AACrD,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,4CAA4C,EAAE;AACtD,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,wCAAwC,EAAE;AAClD,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,oDAAoD,EAAE;AAC9D,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,sDAAsD,EAAE;AAChE,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,oDAAoD,EAAE;AAC9D,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,uCAAuC,EAAE;AACjD,YAAY,qCAAqC;AACjD,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,qCAAqC,EAAE,CAAC,kCAAkC,CAAC;AACnF,QAAQ,sBAAsB,EAAE,CAAC,2CAA2C,CAAC;AAC7E,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,YAAY,CAAC;AAChC,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,SAAS,EAAE,CAAC,wCAAwC,CAAC;AAC7D,QAAQ,yCAAyC,EAAE;AACnD,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,8BAA8B,EAAE,CAAC,8BAA8B,CAAC;AACxE,QAAQ,qCAAqC,EAAE,CAAC,oBAAoB,CAAC;AACrE,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,aAAa,CAAC;AACzC,QAAQ,8BAA8B,EAAE,CAAC,qCAAqC,CAAC;AAC/E,QAAQ,uBAAuB,EAAE,CAAC,qCAAqC,CAAC;AACxE,QAAQ,mBAAmB,EAAE,CAAC,wBAAwB,CAAC;AACvD,QAAQ,yBAAyB,EAAE,CAAC,uCAAuC,CAAC;AAC5E,QAAQ,+BAA+B,EAAE;AACzC,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,kCAAkC,CAAC;AAC5D,QAAQ,yCAAyC,EAAE;AACnD,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,mCAAmC,EAAE,CAAC,mBAAmB,CAAC;AAClE,QAAQ,sBAAsB,EAAE,CAAC,+BAA+B,CAAC;AACjE,QAAQ,sBAAsB,EAAE,CAAC,qCAAqC,CAAC;AACvE,QAAQ,qBAAqB,EAAE,CAAC,sCAAsC,CAAC;AACvE,QAAQ,oCAAoC,EAAE,CAAC,yBAAyB,CAAC;AACzE,QAAQ,mBAAmB,EAAE,CAAC,uCAAuC,CAAC;AACtE,QAAQ,uBAAuB,EAAE,CAAC,oBAAoB,CAAC;AACvD,QAAQ,2BAA2B,EAAE,CAAC,yCAAyC,CAAC;AAChF,QAAQ,gBAAgB,EAAE,CAAC,0CAA0C,CAAC;AACtE,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,4BAA4B,EAAE,CAAC,kCAAkC,CAAC;AAC1E,QAAQ,8BAA8B,EAAE,CAAC,qCAAqC,CAAC;AAC/E,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,wEAAwE;AACpF,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,MAAM,EAAE,2CAA2C,CAAC,EAAE;AAC9E,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,kBAAkB,EAAE,CAAC,wCAAwC,CAAC;AACtE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,kBAAkB,EAAE,CAAC,6CAA6C,CAAC;AAC3E,QAAQ,WAAW,EAAE,CAAC,wCAAwC,CAAC;AAC/D,QAAQ,gBAAgB,EAAE,CAAC,UAAU,CAAC;AACtC,QAAQ,SAAS,EAAE,CAAC,sBAAsB,CAAC;AAC3C,QAAQ,eAAe,EAAE,CAAC,0CAA0C,CAAC;AACrE,QAAQ,kBAAkB,EAAE,CAAC,8BAA8B,CAAC;AAC5D,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,oCAAoC,CAAC;AACnE,QAAQ,sBAAsB,EAAE,CAAC,sBAAsB,CAAC;AACxD,QAAQ,kBAAkB,EAAE,CAAC,wCAAwC,CAAC;AACtE,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,0BAA0B,EAAE;AACpC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,wBAAwB,CAAC;AACrD,QAAQ,qCAAqC,EAAE,CAAC,yBAAyB,CAAC;AAC1E,QAAQ,SAAS,EAAE,CAAC,gCAAgC,CAAC;AACrD,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,iCAAiC,EAAE,CAAC,gCAAgC,CAAC;AAC7E,QAAQ,qCAAqC,EAAE,CAAC,iCAAiC,CAAC;AAClF,QAAQ,4CAA4C,EAAE;AACtD,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,qBAAqB,EAAE,CAAC,0BAA0B,CAAC;AAC3D,QAAQ,wBAAwB,EAAE;AAClC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,MAAM,EAAE,gDAAgD,CAAC,EAAE;AACnF,SAAS;AACT,QAAQ,8CAA8C,EAAE;AACxD,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,uCAAuC,CAAC;AAC7D,QAAQ,6BAA6B,EAAE,CAAC,4BAA4B,CAAC;AACrE,QAAQ,UAAU,EAAE,CAAC,6CAA6C,CAAC;AACnE,QAAQ,mBAAmB,EAAE,CAAC,oDAAoD,CAAC;AACnF,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,wBAAwB,CAAC;AAC7D,KAAK;AACL,IAAI,OAAO,EAAE;AACb,QAAQ,0BAA0B,EAAE,CAAC,0CAA0C,CAAC;AAChF,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,2BAA2B,EAAE,CAAC,2CAA2C,CAAC;AAClF,QAAQ,4BAA4B,EAAE;AACtC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,uDAAuD;AACnE,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,MAAM,EAAE,CAAC,uCAAuC,CAAC;AACzD,QAAQ,WAAW,EAAE,CAAC,yCAAyC,CAAC;AAChE,QAAQ,GAAG,EAAE,CAAC,qDAAqD,CAAC;AACpE,QAAQ,QAAQ,EAAE,CAAC,yDAAyD,CAAC;AAC7E,QAAQ,eAAe,EAAE;AACzB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,oDAAoD,CAAC;AAC1E,QAAQ,YAAY,EAAE;AACtB,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,sDAAsD,CAAC;AAClF,QAAQ,YAAY,EAAE;AACtB,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,uDAAuD,CAAC;AACzE,KAAK;AACL,IAAI,YAAY,EAAE;AAClB,QAAQ,cAAc,EAAE;AACxB,YAAY,oFAAoF;AAChG,SAAS;AACT,QAAQ,QAAQ,EAAE;AAClB,YAAY,+DAA+D;AAC3E,YAAY,EAAE;AACd,YAAY,EAAE,iBAAiB,EAAE,EAAE,QAAQ,EAAE,cAAc,EAAE,EAAE;AAC/D,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,2DAA2D,CAAC;AAC/E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,yEAAyE;AACrF,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,sCAAsC,CAAC;AAClE,QAAQ,iBAAiB,EAAE,CAAC,gDAAgD,CAAC;AAC7E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yEAAyE;AACrF,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,oBAAoB,CAAC,EAAE;AAC/D,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,kDAAkD,CAAC;AAChF,QAAQ,WAAW,EAAE;AACrB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,iDAAiD,CAAC;AACxE,KAAK;AACL,IAAI,cAAc,EAAE;AACpB,QAAQ,oBAAoB,EAAE,CAAC,uBAAuB,CAAC;AACvD,QAAQ,cAAc,EAAE,CAAC,6BAA6B,CAAC;AACvD,KAAK;AACL,IAAI,UAAU,EAAE;AAChB,QAAQ,0CAA0C,EAAE;AACpD,YAAY,yEAAyE;AACrF,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,uBAAuB,CAAC;AAC7D,QAAQ,wBAAwB,EAAE;AAClC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,wCAAwC,EAAE;AAClD,YAAY,4CAA4C;AACxD,SAAS;AACT,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,uCAAuC;AACnD,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,0CAA0C,CAAC;AAChF,QAAQ,sBAAsB,EAAE;AAChC,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,uCAAuC,CAAC;AAC1E,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,aAAa,EAAE;AACvB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,4CAA4C;AACxD,SAAS;AACT,QAAQ,iDAAiD,EAAE;AAC3D,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,wBAAwB,EAAE,CAAC,sBAAsB,CAAC;AAC1D,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,4BAA4B;AACxC,YAAY,EAAE;AACd,YAAY,EAAE,iBAAiB,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,sCAAsC;AAClD,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,8CAA8C,CAAC;AACzE,QAAQ,6CAA6C,EAAE;AACvD,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,+BAA+B,EAAE,CAAC,8BAA8B,CAAC;AACzE,QAAQ,6CAA6C,EAAE;AACvD,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,4CAA4C,EAAE;AACtD,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,8CAA8C,CAAC;AACnF,QAAQ,wBAAwB,EAAE,CAAC,6CAA6C,CAAC;AACjF,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,yCAAyC,CAAC;AAC/E,KAAK;AACL,IAAI,UAAU,EAAE;AAChB,QAAQ,0BAA0B,EAAE;AACpC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,qDAAqD,CAAC;AAChF,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,+CAA+C,CAAC;AAC1E,QAAQ,YAAY,EAAE,CAAC,kDAAkD,CAAC;AAC1E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,aAAa,EAAE;AACvB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,oCAAoC,CAAC;AAC9D,QAAQ,eAAe,EAAE,CAAC,8CAA8C,CAAC;AACzE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,kFAAkF;AAC9F,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,+DAA+D;AAC3E,SAAS;AACT,KAAK;AACL,IAAI,eAAe,EAAE;AACrB,QAAQ,wBAAwB,EAAE;AAClC,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,SAAS,EAAE;AACnB,YAAY,+DAA+D;AAC3E,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE,EAAE,GAAG,EAAE,CAAC,aAAa,CAAC,EAAE;AACpC,IAAI,eAAe,EAAE;AACrB,QAAQ,8CAA8C,EAAE;AACxD,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,kDAAkD,EAAE;AAC5D,YAAY,6EAA6E;AACzF,SAAS;AACT,QAAQ,iDAAiD,EAAE;AAC3D,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,0CAA0C,EAAE;AACpD,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,sDAAsD,EAAE;AAChE,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,kDAAkD,EAAE;AAC5D,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,+CAA+C,EAAE;AACzD,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,sDAAsD,EAAE;AAChE,YAAY,iEAAiE;AAC7E,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,cAAc,EAAE,CAAC,2BAA2B,CAAC;AACrD,QAAQ,MAAM,EAAE,CAAC,aAAa,CAAC;AAC/B,QAAQ,aAAa,EAAE,CAAC,gCAAgC,CAAC;AACzD,QAAQ,MAAM,EAAE,CAAC,yBAAyB,CAAC;AAC3C,QAAQ,aAAa,EAAE,CAAC,+CAA+C,CAAC;AACxE,QAAQ,IAAI,EAAE,CAAC,6BAA6B,CAAC;AAC7C,QAAQ,GAAG,EAAE,CAAC,sBAAsB,CAAC;AACrC,QAAQ,UAAU,EAAE,CAAC,4CAA4C,CAAC;AAClE,QAAQ,WAAW,EAAE,CAAC,4BAA4B,CAAC;AACnD,QAAQ,IAAI,EAAE,CAAC,YAAY,CAAC;AAC5B,QAAQ,YAAY,EAAE,CAAC,+BAA+B,CAAC;AACvD,QAAQ,WAAW,EAAE,CAAC,8BAA8B,CAAC;AACrD,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,4BAA4B,CAAC;AACjD,QAAQ,UAAU,EAAE,CAAC,mBAAmB,CAAC;AACzC,QAAQ,WAAW,EAAE,CAAC,oBAAoB,CAAC;AAC3C,QAAQ,IAAI,EAAE,CAAC,2BAA2B,CAAC;AAC3C,QAAQ,MAAM,EAAE,CAAC,8BAA8B,CAAC;AAChD,QAAQ,MAAM,EAAE,CAAC,wBAAwB,CAAC;AAC1C,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,KAAK;AACL,IAAI,GAAG,EAAE;AACT,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,YAAY,EAAE,CAAC,wCAAwC,CAAC;AAChE,QAAQ,SAAS,EAAE,CAAC,qCAAqC,CAAC;AAC1D,QAAQ,SAAS,EAAE,CAAC,qCAAqC,CAAC;AAC1D,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,SAAS,EAAE,CAAC,6CAA6C,CAAC;AAClE,QAAQ,OAAO,EAAE,CAAC,gDAAgD,CAAC;AACnE,QAAQ,SAAS,EAAE,CAAC,oDAAoD,CAAC;AACzE,QAAQ,MAAM,EAAE,CAAC,yCAAyC,CAAC;AAC3D,QAAQ,MAAM,EAAE,CAAC,8CAA8C,CAAC;AAChE,QAAQ,OAAO,EAAE,CAAC,gDAAgD,CAAC;AACnE,QAAQ,gBAAgB,EAAE,CAAC,mDAAmD,CAAC;AAC/E,QAAQ,SAAS,EAAE,CAAC,4CAA4C,CAAC;AACjE,KAAK;AACL,IAAI,SAAS,EAAE;AACf,QAAQ,eAAe,EAAE,CAAC,0BAA0B,CAAC;AACrD,QAAQ,WAAW,EAAE,CAAC,iCAAiC,CAAC;AACxD,KAAK;AACL,IAAI,YAAY,EAAE;AAClB,QAAQ,mCAAmC,EAAE,CAAC,8BAA8B,CAAC;AAC7E,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,sBAAsB,EAAE,CAAC,8CAA8C,CAAC;AAChF,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,8BAA8B;AAC1C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,qCAAqC,CAAC,EAAE;AAChF,SAAS;AACT,QAAQ,sCAAsC,EAAE,CAAC,iCAAiC,CAAC;AACnF,QAAQ,wBAAwB,EAAE,CAAC,uCAAuC,CAAC;AAC3E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,iCAAiC;AAC7C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,wCAAwC,CAAC,EAAE;AACnF,SAAS;AACT,QAAQ,mCAAmC,EAAE,CAAC,8BAA8B,CAAC;AAC7E,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,sBAAsB,EAAE,CAAC,8CAA8C,CAAC;AAChF,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,8BAA8B;AAC1C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,qCAAqC,CAAC,EAAE;AAChF,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,YAAY,EAAE;AACtB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,yDAAyD,CAAC;AAC9E,QAAQ,sBAAsB,EAAE,CAAC,gDAAgD,CAAC;AAClF,QAAQ,MAAM,EAAE,CAAC,mCAAmC,CAAC;AACrD,QAAQ,aAAa,EAAE;AACvB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,mCAAmC,CAAC;AAC1D,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,aAAa,EAAE;AACvB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,4CAA4C,CAAC;AACnE,QAAQ,eAAe,EAAE;AACzB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,iDAAiD,CAAC;AAChE,QAAQ,UAAU,EAAE,CAAC,wDAAwD,CAAC;AAC9E,QAAQ,QAAQ,EAAE,CAAC,oDAAoD,CAAC;AACxE,QAAQ,QAAQ,EAAE,CAAC,yCAAyC,CAAC;AAC7D,QAAQ,YAAY,EAAE,CAAC,yDAAyD,CAAC;AACjF,QAAQ,IAAI,EAAE,CAAC,aAAa,CAAC;AAC7B,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,YAAY,EAAE,CAAC,0DAA0D,CAAC;AAClF,QAAQ,mBAAmB,EAAE,CAAC,2CAA2C,CAAC;AAC1E,QAAQ,UAAU,EAAE,CAAC,wDAAwD,CAAC;AAC9E,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,wBAAwB,EAAE,CAAC,kBAAkB,CAAC;AACtD,QAAQ,UAAU,EAAE,CAAC,wBAAwB,CAAC;AAC9C,QAAQ,WAAW,EAAE,CAAC,kCAAkC,CAAC;AACzD,QAAQ,sBAAsB,EAAE;AAChC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,kCAAkC,CAAC;AAC/D,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,sCAAsC,CAAC;AAChE,QAAQ,IAAI,EAAE,CAAC,sDAAsD,CAAC;AACtE,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,wDAAwD,CAAC;AAC7E,QAAQ,MAAM,EAAE,CAAC,yDAAyD,CAAC;AAC3E,QAAQ,MAAM,EAAE,CAAC,mDAAmD,CAAC;AACrE,QAAQ,aAAa,EAAE,CAAC,0DAA0D,CAAC;AACnF,QAAQ,WAAW,EAAE,CAAC,2CAA2C,CAAC;AAClE,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,GAAG,EAAE,CAAC,yBAAyB,CAAC;AACxC,QAAQ,kBAAkB,EAAE,CAAC,eAAe,CAAC;AAC7C,QAAQ,UAAU,EAAE,CAAC,mCAAmC,CAAC;AACzD,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,MAAM,EAAE,CAAC,gBAAgB,CAAC;AAClC,QAAQ,SAAS,EAAE;AACnB,YAAY,oBAAoB;AAChC,YAAY,EAAE,OAAO,EAAE,EAAE,cAAc,EAAE,2BAA2B,EAAE,EAAE;AACxE,SAAS;AACT,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC;AAC1B,QAAQ,UAAU,EAAE,CAAC,cAAc,CAAC;AACpC,QAAQ,MAAM,EAAE,CAAC,UAAU,CAAC;AAC5B,QAAQ,IAAI,EAAE,CAAC,OAAO,CAAC;AACvB,KAAK;AACL,IAAI,UAAU,EAAE;AAChB,QAAQ,YAAY,EAAE,CAAC,qCAAqC,CAAC;AAC7D,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,6CAA6C;AACzD,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,0CAA0C,CAAC;AACtE,QAAQ,eAAe,EAAE,CAAC,kCAAkC,CAAC;AAC7D,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,QAAQ,6BAA6B,EAAE,CAAC,qCAAqC,CAAC;AAC9E,QAAQ,eAAe,EAAE,CAAC,2CAA2C,CAAC;AACtE,QAAQ,wBAAwB,EAAE,CAAC,sBAAsB,CAAC;AAC1D,QAAQ,UAAU,EAAE,CAAC,4BAA4B,CAAC;AAClD,QAAQ,6BAA6B,EAAE;AACvC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,wDAAwD,CAAC;AACnF,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kDAAkD;AAC9D,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,YAAY,EAAE,+BAA+B,CAAC,EAAE;AACxE,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,wDAAwD,CAAC;AACnF,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,yBAAyB,EAAE,CAAC,uBAAuB,CAAC;AAC5D,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,kCAAkC,CAAC;AACzD,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,SAAS,EAAE,CAAC,mCAAmC,CAAC;AACxD,QAAQ,gBAAgB,EAAE,CAAC,gDAAgD,CAAC;AAC5E,QAAQ,gBAAgB,EAAE,CAAC,mCAAmC,CAAC;AAC/D,QAAQ,sBAAsB,EAAE,CAAC,oCAAoC,CAAC;AACtE,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,8BAA8B,CAAC;AAC1D,QAAQ,aAAa,EAAE,CAAC,wBAAwB,CAAC;AACjD,QAAQ,aAAa,EAAE,CAAC,oCAAoC,CAAC;AAC7D,QAAQ,GAAG,EAAE,CAAC,iBAAiB,CAAC;AAChC,QAAQ,iCAAiC,EAAE,CAAC,kCAAkC,CAAC;AAC/E,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,UAAU,EAAE,CAAC,iCAAiC,CAAC;AACvD,QAAQ,sBAAsB,EAAE,CAAC,wCAAwC,CAAC;AAC1E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,IAAI,EAAE,CAAC,oBAAoB,CAAC;AACpC,QAAQ,oBAAoB,EAAE,CAAC,+BAA+B,CAAC;AAC/D,QAAQ,gBAAgB,EAAE,CAAC,wBAAwB,CAAC;AACpD,QAAQ,eAAe,EAAE,CAAC,mDAAmD,CAAC;AAC9E,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,wBAAwB,EAAE,CAAC,gBAAgB,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,4BAA4B,CAAC;AACnD,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,WAAW,EAAE,CAAC,yBAAyB,CAAC;AAChD,QAAQ,mCAAmC,EAAE,CAAC,4BAA4B,CAAC;AAC3E,QAAQ,wBAAwB,EAAE,CAAC,uCAAuC,CAAC;AAC3E,QAAQ,sBAAsB,EAAE,CAAC,6BAA6B,CAAC;AAC/D,QAAQ,iBAAiB,EAAE,CAAC,gCAAgC,CAAC;AAC7D,QAAQ,qBAAqB,EAAE,CAAC,4CAA4C,CAAC;AAC7E,QAAQ,YAAY,EAAE,CAAC,uBAAuB,CAAC;AAC/C,QAAQ,WAAW,EAAE,CAAC,wCAAwC,CAAC;AAC/D,QAAQ,wBAAwB,EAAE;AAClC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,uCAAuC,CAAC;AAC/D,QAAQ,uBAAuB,EAAE,CAAC,2CAA2C,CAAC;AAC9E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,0CAA0C,EAAE;AACpD,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,uCAAuC,EAAE;AACjD,YAAY,2CAA2C;AACvD,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,sCAAsC,CAAC;AAC7D,QAAQ,MAAM,EAAE,CAAC,mBAAmB,CAAC;AACrC,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,oCAAoC;AAChD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,mCAAmC,CAAC;AAC5D,QAAQ,yBAAyB,EAAE,CAAC,0CAA0C,CAAC;AAC/E,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,wCAAwC,EAAE;AAClD,YAAY,mFAAmF;AAC/F,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,+FAA+F;AAC3G,SAAS;AACT,QAAQ,4CAA4C,EAAE;AACtD,YAAY,iEAAiE;AAC7E,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,UAAU,EAAE,2CAA2C,CAAC,EAAE;AAClF,SAAS;AACT,QAAQ,2DAA2D,EAAE;AACrE,YAAY,2DAA2D;AACvE,YAAY,EAAE;AACd,YAAY;AACZ,gBAAgB,OAAO,EAAE;AACzB,oBAAoB,UAAU;AAC9B,oBAAoB,yDAAyD;AAC7E,iBAAiB;AACjB,aAAa;AACb,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,0CAA0C,EAAE;AACpD,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,gFAAgF;AAC5F,SAAS;AACT,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,4FAA4F;AACxG,SAAS;AACT,QAAQ,gCAAgC,EAAE,CAAC,oBAAoB,CAAC;AAChE,QAAQ,2BAA2B,EAAE,CAAC,0BAA0B,CAAC;AACjE,QAAQ,mBAAmB,EAAE,CAAC,gCAAgC,CAAC;AAC/D,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,yEAAyE;AACrF,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,+FAA+F;AAC3G,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,qGAAqG;AACjH,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,eAAe,EAAE,CAAC,qDAAqD,CAAC;AAChF,QAAQ,UAAU,EAAE,CAAC,0CAA0C,CAAC;AAChE,QAAQ,YAAY,EAAE,CAAC,qCAAqC,CAAC;AAC7D,QAAQ,0BAA0B,EAAE,CAAC,qBAAqB,CAAC;AAC3D,QAAQ,YAAY,EAAE,CAAC,2BAA2B,CAAC;AACnD,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,MAAM,EAAE,CAAC,+BAA+B,CAAC;AACjD,QAAQ,UAAU,EAAE,CAAC,0CAA0C,CAAC;AAChE,QAAQ,YAAY,EAAE,CAAC,sCAAsC,CAAC;AAC9D,QAAQ,GAAG,EAAE,CAAC,4BAA4B,CAAC;AAC3C,QAAQ,OAAO,EAAE,CAAC,uCAAuC,CAAC;AAC1D,QAAQ,SAAS,EAAE,CAAC,mCAAmC,CAAC;AACxD,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,yCAAyC,CAAC;AAC9D,QAAQ,iBAAiB,EAAE,CAAC,0CAA0C,CAAC;AACvE,QAAQ,WAAW,EAAE,CAAC,oCAAoC,CAAC;AAC3D,QAAQ,UAAU,EAAE,CAAC,0BAA0B,CAAC;AAChD,QAAQ,WAAW,EAAE,CAAC,oCAAoC,CAAC;AAC3D,QAAQ,WAAW,EAAE,CAAC,gCAAgC,CAAC;AACvD,QAAQ,QAAQ,EAAE,CAAC,8CAA8C,CAAC;AAClE,QAAQ,UAAU,EAAE,CAAC,0CAA0C,CAAC;AAChE,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,8BAA8B,CAAC;AAChD,QAAQ,UAAU,EAAE,CAAC,yCAAyC,CAAC;AAC/D,QAAQ,YAAY,EAAE,CAAC,qCAAqC,CAAC;AAC7D,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,aAAa,EAAE,CAAC,qDAAqD,CAAC;AAC9E,QAAQ,MAAM,EAAE,CAAC,kCAAkC,CAAC;AACpD,QAAQ,2BAA2B,EAAE;AACrC,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,wDAAwD,CAAC;AAChF,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,aAAa,EAAE;AACvB,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,+CAA+C,CAAC;AAC9D,QAAQ,SAAS,EAAE;AACnB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,uDAAuD,CAAC;AACnF,QAAQ,IAAI,EAAE,CAAC,iCAAiC,CAAC;AACjD,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,uDAAuD,CAAC;AAC9E,QAAQ,SAAS,EAAE,CAAC,qDAAqD,CAAC;AAC1E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,0CAA0C,CAAC;AAC/E,QAAQ,WAAW,EAAE,CAAC,uDAAuD,CAAC;AAC9E,QAAQ,KAAK,EAAE,CAAC,qDAAqD,CAAC;AACtE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,iDAAiD,CAAC;AACnE,QAAQ,YAAY,EAAE;AACtB,YAAY,6DAA6D;AACzE,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yDAAyD;AACrE,SAAS;AACT,KAAK;AACL,IAAI,SAAS,EAAE,EAAE,GAAG,EAAE,CAAC,iBAAiB,CAAC,EAAE;AAC3C,IAAI,SAAS,EAAE;AACf,QAAQ,sBAAsB,EAAE;AAChC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,wGAAwG;AACpH,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mFAAmF;AAC/F,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,kFAAkF;AAC9F,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,8FAA8F;AAC1G,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,wHAAwH;AACpI,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,2DAA2D,CAAC;AACnF,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,uGAAuG;AACnH,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,6EAA6E;AACzF,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,oDAAoD;AAChE,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,sCAAsC,CAAC,EAAE;AAC1E,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,oDAAoD,CAAC;AAC/E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,yFAAyF;AACrG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,4EAA4E;AACxF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,4EAA4E;AACxF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,oDAAoD,CAAC;AACjF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,6CAA6C,CAAC;AACzE,QAAQ,cAAc,EAAE,CAAC,mDAAmD,CAAC;AAC7E,QAAQ,0BAA0B,EAAE;AACpC,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,sCAAsC,CAAC;AAChE,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,6EAA6E;AACzF,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,2CAA2C,CAAC;AACzE,QAAQ,eAAe,EAAE,CAAC,iCAAiC,CAAC;AAC5D,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,sBAAsB,EAAE;AAChC,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,uCAAuC,CAAC;AACtE,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,UAAU,EAAE,CAAC,kCAAkC,CAAC;AACxD,QAAQ,WAAW,EAAE,CAAC,wBAAwB,CAAC;AAC/C,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,2CAA2C,CAAC;AACjF,QAAQ,eAAe,EAAE,CAAC,kCAAkC,CAAC;AAC7D,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,mBAAmB,EAAE,CAAC,4CAA4C,CAAC;AAC3E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,kCAAkC,CAAC;AAC3D,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,qDAAqD;AACjE,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,uCAAuC,CAAC,EAAE;AAC3E,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,8BAA8B,CAAC;AAChD,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,sDAAsD,CAAC;AAChF,QAAQ,sBAAsB,EAAE;AAChC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,oDAAoD,CAAC;AACnF,QAAQ,+BAA+B,EAAE;AACzC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,4CAA4C,CAAC;AACvE,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,8CAA8C,CAAC;AACpE,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,oCAAoC,CAAC;AAC/D,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,oDAAoD,CAAC;AAC7E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,kCAAkC,CAAC;AAC/D,QAAQ,0BAA0B,EAAE;AACpC,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,yCAAyC;AACrD,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,wBAAwB,CAAC,EAAE;AAC5D,SAAS;AACT,QAAQ,sBAAsB,EAAE,CAAC,yCAAyC,CAAC;AAC3E,QAAQ,sBAAsB,EAAE,CAAC,yCAAyC,CAAC;AAC3E,QAAQ,4BAA4B,EAAE;AACtC,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,+BAA+B,CAAC;AAC3D,QAAQ,yBAAyB,EAAE;AACnC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,2BAA2B,CAAC;AAC1C,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,wCAAwC,CAAC;AACtE,QAAQ,yBAAyB,EAAE;AACnC,YAAY,wFAAwF;AACpG,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,kCAAkC,CAAC;AAC1D,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,mDAAmD,CAAC;AAC1E,QAAQ,SAAS,EAAE,CAAC,6CAA6C,CAAC;AAClE,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,0CAA0C,CAAC;AAC/D,QAAQ,qBAAqB,EAAE,CAAC,gDAAgD,CAAC;AACjF,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,gDAAgD,CAAC;AACnF,QAAQ,SAAS,EAAE,CAAC,yCAAyC,CAAC;AAC9D,QAAQ,sBAAsB,EAAE,CAAC,iDAAiD,CAAC;AACnF,QAAQ,gBAAgB,EAAE,CAAC,iDAAiD,CAAC;AAC7E,QAAQ,4BAA4B,EAAE;AACtC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,6CAA6C,CAAC;AACnF,QAAQ,UAAU,EAAE,CAAC,2CAA2C,CAAC;AACjE,QAAQ,oBAAoB,EAAE,CAAC,8CAA8C,CAAC;AAC9E,QAAQ,YAAY,EAAE,CAAC,yCAAyC,CAAC;AACjE,QAAQ,aAAa,EAAE,CAAC,uDAAuD,CAAC;AAChF,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,+CAA+C,CAAC;AAC9E,QAAQ,gBAAgB,EAAE,CAAC,2CAA2C,CAAC;AACvE,QAAQ,QAAQ,EAAE,CAAC,iCAAiC,CAAC;AACrD,QAAQ,aAAa,EAAE,CAAC,mDAAmD,CAAC;AAC5E,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,qBAAqB,EAAE,CAAC,+CAA+C,CAAC;AAChF,QAAQ,8BAA8B,EAAE;AACxC,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,4CAA4C,CAAC;AACzE,QAAQ,SAAS,EAAE,CAAC,kCAAkC,CAAC;AACvD,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,UAAU,EAAE,CAAC,iDAAiD,CAAC;AACvE,QAAQ,eAAe,EAAE,CAAC,sDAAsD,CAAC;AACjF,QAAQ,eAAe,EAAE,CAAC,+CAA+C,CAAC;AAC1E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,iDAAiD,CAAC;AACxE,QAAQ,eAAe,EAAE,CAAC,qDAAqD,CAAC;AAChF,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,yCAAyC,CAAC;AAC7D,QAAQ,UAAU,EAAE,CAAC,2CAA2C,CAAC;AACjE,QAAQ,uBAAuB,EAAE;AACjC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,QAAQ,yBAAyB,EAAE;AACnC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,oCAAoC,CAAC;AACzE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,mCAAmC,CAAC;AAC1D,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,cAAc,EAAE,CAAC,gCAAgC,CAAC;AAC1D,QAAQ,sBAAsB,EAAE;AAChC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,wBAAwB,EAAE,CAAC,iBAAiB,CAAC;AACrD,QAAQ,UAAU,EAAE,CAAC,uBAAuB,CAAC;AAC7C,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,iCAAiC,CAAC;AACtD,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,mCAAmC,EAAE,CAAC,kCAAkC,CAAC;AACjF,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,eAAe,EAAE,CAAC,wCAAwC,CAAC;AACnE,QAAQ,UAAU,EAAE,CAAC,mBAAmB,CAAC;AACzC,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,QAAQ,iBAAiB,EAAE,CAAC,2CAA2C,CAAC;AACxE,QAAQ,QAAQ,EAAE,CAAC,gCAAgC,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,iCAAiC,CAAC;AACtD,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,iCAAiC,CAAC;AACzD,QAAQ,KAAK,EAAE,CAAC,mCAAmC,CAAC;AACpD,QAAQ,aAAa,EAAE,CAAC,2CAA2C,CAAC;AACpE,QAAQ,WAAW,EAAE,CAAC,kDAAkD,CAAC;AACzE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,6EAA6E;AACzF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2FAA2F;AACvG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,kFAAkF;AAC9F,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,qDAAqD,CAAC;AAC7E,QAAQ,gBAAgB,EAAE,CAAC,kCAAkC,CAAC;AAC9D,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,0EAA0E;AACtF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wFAAwF;AACpG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,kDAAkD,CAAC;AAC7E,QAAQ,QAAQ,EAAE,CAAC,qCAAqC,CAAC;AACzD,QAAQ,MAAM,EAAE,CAAC,6BAA6B,CAAC;AAC/C,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,+BAA+B,EAAE,CAAC,iCAAiC,CAAC;AAC5E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,wFAAwF;AACpG,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,mDAAmD,CAAC;AAC5E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,iFAAiF;AAC7F,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,EAAE;AACjE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,iFAAiF;AAC7F,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,6CAA6C,CAAC;AACtE,QAAQ,0BAA0B,EAAE;AACpC,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,sEAAsE;AAClF,YAAY,EAAE,OAAO,EAAE,4BAA4B,EAAE;AACrD,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,IAAI,EAAE,CAAC,kBAAkB,CAAC;AAClC,QAAQ,OAAO,EAAE,CAAC,qBAAqB,CAAC;AACxC,QAAQ,qBAAqB,EAAE,CAAC,oBAAoB,CAAC;AACrD,QAAQ,MAAM,EAAE,CAAC,oBAAoB,CAAC;AACtC,QAAQ,KAAK,EAAE,CAAC,0BAA0B,CAAC;AAC3C,QAAQ,MAAM,EAAE,CAAC,oBAAoB,CAAC;AACtC,QAAQ,KAAK,EAAE,CAAC,mBAAmB,CAAC;AACpC,KAAK;AACL,IAAI,cAAc,EAAE;AACpB,QAAQ,QAAQ,EAAE;AAClB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,iBAAiB,EAAE,CAAC,kDAAkD,CAAC;AAC/E,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,mEAAmE;AAC/E,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,wBAAwB,CAAC;AAC1C,QAAQ,4BAA4B,EAAE;AACtC,YAAY,6EAA6E;AACzF,SAAS;AACT,QAAQ,qBAAqB,EAAE,CAAC,gDAAgD,CAAC;AACjF,QAAQ,4BAA4B,EAAE;AACtC,YAAY,gGAAgG;AAC5G,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,sCAAsC,CAAC;AAC7D,QAAQ,SAAS,EAAE,CAAC,mCAAmC,CAAC;AACxD,QAAQ,yBAAyB,EAAE;AACnC,YAAY,6FAA6F;AACzG,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,IAAI,EAAE,CAAC,uBAAuB,CAAC;AACvC,QAAQ,cAAc,EAAE,CAAC,yCAAyC,CAAC;AACnE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,+CAA+C,CAAC;AAC/E,QAAQ,wBAAwB,EAAE,CAAC,iBAAiB,CAAC;AACrD,QAAQ,gBAAgB,EAAE,CAAC,2CAA2C,CAAC;AACvE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,4CAA4C,CAAC;AACzE,QAAQ,cAAc,EAAE,CAAC,yCAAyC,CAAC;AACnE,QAAQ,4BAA4B,EAAE;AACtC,YAAY,6DAA6D;AACzE,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,+FAA+F;AAC3G,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,qCAAqC,CAAC;AAC5D,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,wBAAwB,EAAE;AAClC,YAAY,mBAAmB;AAC/B,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,8BAA8B,CAAC,EAAE;AAClE,SAAS;AACT,QAAQ,4BAA4B,EAAE,CAAC,mBAAmB,CAAC;AAC3D,QAAQ,KAAK,EAAE,CAAC,6BAA6B,CAAC;AAC9C,QAAQ,YAAY,EAAE,CAAC,6BAA6B,CAAC;AACrD,QAAQ,qBAAqB,EAAE,CAAC,+CAA+C,CAAC;AAChF,QAAQ,oCAAoC,EAAE,CAAC,gCAAgC,CAAC;AAChF,QAAQ,4BAA4B,EAAE;AACtC,YAAY,qBAAqB;AACjC,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,kCAAkC,CAAC,EAAE;AACtE,SAAS;AACT,QAAQ,gCAAgC,EAAE,CAAC,qBAAqB,CAAC;AACjE,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,iBAAiB;AAC7B,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,wCAAwC,CAAC,EAAE;AAC5E,SAAS;AACT,QAAQ,sCAAsC,EAAE,CAAC,iBAAiB,CAAC;AACnE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,qBAAqB;AACjC,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,iCAAiC,CAAC,EAAE;AACrE,SAAS;AACT,QAAQ,+BAA+B,EAAE,CAAC,qBAAqB,CAAC;AAChE,QAAQ,4BAA4B,EAAE;AACtC,YAAY,oCAAoC;AAChD,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,kCAAkC,CAAC,EAAE;AACtE,SAAS;AACT,QAAQ,gCAAgC,EAAE,CAAC,oCAAoC,CAAC;AAChF,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,4BAA4B;AACxC,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,wCAAwC,CAAC,EAAE;AAC5E,SAAS;AACT,QAAQ,sCAAsC,EAAE,CAAC,4BAA4B,CAAC;AAC9E,QAAQ,MAAM,EAAE,CAAC,gCAAgC,CAAC;AAClD,QAAQ,gBAAgB,EAAE,CAAC,WAAW,CAAC;AACvC,QAAQ,aAAa,EAAE,CAAC,uBAAuB,CAAC;AAChD,QAAQ,iBAAiB,EAAE,CAAC,iCAAiC,CAAC;AAC9D,QAAQ,yBAAyB,EAAE;AACnC,YAAY,iCAAiC;AAC7C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,+BAA+B,CAAC,EAAE;AACnE,SAAS;AACT,QAAQ,6BAA6B,EAAE,CAAC,iCAAiC,CAAC;AAC1E,QAAQ,+BAA+B,EAAE;AACzC,YAAY,yBAAyB;AACrC,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,qCAAqC,CAAC,EAAE;AACzE,SAAS;AACT,QAAQ,mCAAmC,EAAE,CAAC,yBAAyB,CAAC;AACxE,QAAQ,IAAI,EAAE,CAAC,YAAY,CAAC;AAC5B,QAAQ,0BAA0B,EAAE;AACpC,YAAY,kBAAkB;AAC9B,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,gCAAgC,CAAC,EAAE;AACpE,SAAS;AACT,QAAQ,8BAA8B,EAAE,CAAC,kBAAkB,CAAC;AAC5D,QAAQ,0BAA0B,EAAE;AACpC,YAAY,kBAAkB;AAC9B,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,gCAAgC,CAAC,EAAE;AACpE,SAAS;AACT,QAAQ,8BAA8B,EAAE,CAAC,kBAAkB,CAAC;AAC5D,QAAQ,2BAA2B,EAAE;AACrC,YAAY,qBAAqB;AACjC,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,iCAAiC,CAAC,EAAE;AACrE,SAAS;AACT,QAAQ,+BAA+B,EAAE,CAAC,qBAAqB,CAAC;AAChE,QAAQ,iCAAiC,EAAE,CAAC,qBAAqB,CAAC;AAClE,QAAQ,oBAAoB,EAAE,CAAC,iCAAiC,CAAC;AACjE,QAAQ,oBAAoB,EAAE,CAAC,iCAAiC,CAAC;AACjE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,oBAAoB;AAChC,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,iCAAiC,CAAC,EAAE;AACrE,SAAS;AACT,QAAQ,+BAA+B,EAAE,CAAC,oBAAoB,CAAC;AAC/D,QAAQ,kBAAkB,EAAE,CAAC,gCAAgC,CAAC;AAC9D,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,yBAAyB;AACrC,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,sCAAsC,CAAC,EAAE;AAC1E,SAAS;AACT,QAAQ,oCAAoC,EAAE,CAAC,yBAAyB,CAAC;AACzE,QAAQ,qBAAqB,EAAE,CAAC,4BAA4B,CAAC;AAC7D,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,gBAAgB;AAC5B,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,uCAAuC,CAAC,EAAE;AAC3E,SAAS;AACT,QAAQ,qCAAqC,EAAE,CAAC,gBAAgB,CAAC;AACjE,QAAQ,yCAAyC,EAAE;AACnD,YAAY,8BAA8B;AAC1C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,+CAA+C,CAAC,EAAE;AACnF,SAAS;AACT,QAAQ,6CAA6C,EAAE;AACvD,YAAY,8BAA8B;AAC1C,SAAS;AACT,QAAQ,OAAO,EAAE,CAAC,gCAAgC,CAAC;AACnD,QAAQ,QAAQ,EAAE,CAAC,mCAAmC,CAAC;AACvD,QAAQ,mBAAmB,EAAE,CAAC,aAAa,CAAC;AAC5C,KAAK;AACL,CAAC;;AC9nDM,MAAM,OAAO,GAAG,mBAAmB,CAAC;;ACApC,SAAS,kBAAkB,CAAC,OAAO,EAAE,YAAY,EAAE;AAC1D,IAAI,MAAM,UAAU,GAAG,EAAE,CAAC;AAC1B,IAAI,KAAK,MAAM,CAAC,KAAK,EAAE,SAAS,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;AACnE,QAAQ,KAAK,MAAM,CAAC,UAAU,EAAE,QAAQ,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;AACxE,YAAY,MAAM,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC,GAAG,QAAQ,CAAC;AAC5D,YAAY,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACnD,YAAY,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,EAAE,QAAQ,CAAC,CAAC;AAC9E,YAAY,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;AACpC,gBAAgB,UAAU,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC;AACvC,aAAa;AACb,YAAY,MAAM,YAAY,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;AACnD,YAAY,IAAI,WAAW,EAAE;AAC7B,gBAAgB,YAAY,CAAC,UAAU,CAAC,GAAG,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,WAAW,CAAC,CAAC;AAC/G,gBAAgB,SAAS;AACzB,aAAa;AACb,YAAY,YAAY,CAAC,UAAU,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;AAClF,SAAS;AACT,KAAK;AACL,IAAI,OAAO,UAAU,CAAC;AACtB,CAAC;AACD,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,WAAW,EAAE;AACrE,IAAI,MAAM,mBAAmB,GAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACnE;AACA,IAAI,SAAS,eAAe,CAAC,GAAG,IAAI,EAAE;AACtC;AACA,QAAQ,IAAI,OAAO,GAAG,mBAAmB,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AAClE;AACA,QAAQ,IAAI,WAAW,CAAC,SAAS,EAAE;AACnC,YAAY,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE;AACjD,gBAAgB,IAAI,EAAE,OAAO,CAAC,WAAW,CAAC,SAAS,CAAC;AACpD,gBAAgB,CAAC,WAAW,CAAC,SAAS,GAAG,SAAS;AAClD,aAAa,CAAC,CAAC;AACf,YAAY,OAAO,mBAAmB,CAAC,OAAO,CAAC,CAAC;AAChD,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,OAAO,EAAE;AACjC,YAAY,MAAM,CAAC,QAAQ,EAAE,aAAa,CAAC,GAAG,WAAW,CAAC,OAAO,CAAC;AAClE,YAAY,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5H,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,UAAU,EAAE;AACpC,YAAY,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AACrD,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,iBAAiB,EAAE;AAC3C;AACA,YAAY,MAAM,OAAO,GAAG,mBAAmB,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AACxE,YAAY,KAAK,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,WAAW,CAAC,iBAAiB,CAAC,EAAE;AACvF,gBAAgB,IAAI,IAAI,IAAI,OAAO,EAAE;AACrC,oBAAoB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,uCAAuC,EAAE,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACzI,oBAAoB,IAAI,EAAE,KAAK,IAAI,OAAO,CAAC,EAAE;AAC7C,wBAAwB,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AACvD,qBAAqB;AACrB,oBAAoB,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC;AACzC,iBAAiB;AACjB,aAAa;AACb,YAAY,OAAO,mBAAmB,CAAC,OAAO,CAAC,CAAC;AAChD,SAAS;AACT;AACA,QAAQ,OAAO,mBAAmB,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5C,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,eAAe,EAAE,mBAAmB,CAAC,CAAC;AAC/D,CAAC;;ACxDM,SAAS,mBAAmB,CAAC,OAAO,EAAE;AAC7C,IAAI,MAAM,GAAG,GAAG,kBAAkB,CAAC,OAAO,EAAEA,SAAS,CAAC,CAAC;AACvD,IAAI,OAAO;AACX,QAAQ,IAAI,EAAE,GAAG;AACjB,KAAK,CAAC;AACN,CAAC;AACD,mBAAmB,CAAC,OAAO,GAAG,OAAO,CAAC;AACtC,AAAO,SAAS,yBAAyB,CAAC,OAAO,EAAE;AACnD,IAAI,MAAM,GAAG,GAAG,kBAAkB,CAAC,OAAO,EAAEA,SAAS,CAAC,CAAC;AACvD,IAAI,OAAO;AACX,QAAQ,GAAG,GAAG;AACd,QAAQ,IAAI,EAAE,GAAG;AACjB,KAAK,CAAC;AACN,CAAC;AACD,yBAAyB,CAAC,OAAO,GAAG,OAAO,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/package.json b/node_modules/@octokit/plugin-rest-endpoint-methods/package.json new file mode 100644 index 0000000..42c5ce3 --- /dev/null +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/package.json @@ -0,0 +1,59 @@ +{ + "name": "@octokit/plugin-rest-endpoint-methods", + "description": "Octokit plugin adding one method for all of api.github.com REST API endpoints", + "version": "5.16.2", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "pika": true, + "sideEffects": false, + "keywords": [ + "github", + "api", + "sdk", + "toolkit" + ], + "repository": "github:octokit/plugin-rest-endpoint-methods.js", + "dependencies": { + "@octokit/types": "^6.39.0", + "deprecation": "^2.3.1" + }, + "peerDependencies": { + "@octokit/core": ">=3" + }, + "devDependencies": { + "@gimenete/type-writer": "^0.1.5", + "@octokit/core": "^3.0.0", + "@pika/pack": "^0.5.0", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^27.0.0", + "@types/node": "^16.0.0", + "fetch-mock": "^9.0.0", + "fs-extra": "^10.0.0", + "github-openapi-graphql-query": "^2.0.0", + "jest": "^27.0.0", + "lodash.camelcase": "^4.3.0", + "lodash.set": "^4.3.2", + "lodash.upperfirst": "^4.3.1", + "mustache": "^4.0.0", + "npm-run-all": "^4.1.5", + "prettier": "2.7.1", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^4.2.0", + "string-to-jsdoc-comment": "^1.0.0", + "ts-jest": "^27.0.0-next.12", + "typescript": "^4.0.2" + }, + "publishConfig": { + "access": "public" + }, + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js" +} diff --git a/node_modules/@octokit/request-error/LICENSE b/node_modules/@octokit/request-error/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/node_modules/@octokit/request-error/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@octokit/request-error/README.md b/node_modules/@octokit/request-error/README.md new file mode 100644 index 0000000..1bf5384 --- /dev/null +++ b/node_modules/@octokit/request-error/README.md @@ -0,0 +1,67 @@ +# http-error.js + +> Error class for Octokit request errors + +[![@latest](https://img.shields.io/npm/v/@octokit/request-error.svg)](https://www.npmjs.com/package/@octokit/request-error) +[![Build Status](https://github.com/octokit/request-error.js/workflows/Test/badge.svg)](https://github.com/octokit/request-error.js/actions?query=workflow%3ATest) + +## Usage + + + + + + +
+Browsers + +Load @octokit/request-error directly from cdn.skypack.dev + +```html + +``` + +
+Node + + +Install with npm install @octokit/request-error + +```js +const { RequestError } = require("@octokit/request-error"); +// or: import { RequestError } from "@octokit/request-error"; +``` + +
+ +```js +const error = new RequestError("Oops", 500, { + headers: { + "x-github-request-id": "1:2:3:4", + }, // response headers + request: { + method: "POST", + url: "https://api.github.com/foo", + body: { + bar: "baz", + }, + headers: { + authorization: "token secret123", + }, + }, +}); + +error.message; // Oops +error.status; // 500 +error.request.method; // POST +error.request.url; // https://api.github.com/foo +error.request.body; // { bar: 'baz' } +error.request.headers; // { authorization: 'token [REDACTED]' } +error.response; // { url, status, headers, data } +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/node_modules/@octokit/request-error/dist-node/index.js b/node_modules/@octokit/request-error/dist-node/index.js new file mode 100644 index 0000000..619f462 --- /dev/null +++ b/node_modules/@octokit/request-error/dist-node/index.js @@ -0,0 +1,74 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var deprecation = require('deprecation'); +var once = _interopDefault(require('once')); + +const logOnceCode = once(deprecation => console.warn(deprecation)); +const logOnceHeaders = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ + +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = "HttpError"; + this.status = statusCode; + let headers; + + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } // redact request credentials without mutating original request options + + + const requestCopy = Object.assign({}, options.request); + + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; // deprecations + + Object.defineProperty(this, "code", { + get() { + logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + } + + }); + } + +} + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/request-error/dist-node/index.js.map b/node_modules/@octokit/request-error/dist-node/index.js.map new file mode 100644 index 0000000..9134ddb --- /dev/null +++ b/node_modules/@octokit/request-error/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnceCode = once((deprecation) => console.warn(deprecation));\nconst logOnceHeaders = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n },\n });\n }\n}\n"],"names":["logOnceCode","once","deprecation","console","warn","logOnceHeaders","RequestError","Error","constructor","message","statusCode","options","captureStackTrace","name","status","headers","response","requestCopy","Object","assign","request","authorization","replace","url","defineProperty","get","Deprecation"],"mappings":";;;;;;;;;AAEA,MAAMA,WAAW,GAAGC,IAAI,CAAEC,WAAD,IAAiBC,OAAO,CAACC,IAAR,CAAaF,WAAb,CAAlB,CAAxB;AACA,MAAMG,cAAc,GAAGJ,IAAI,CAAEC,WAAD,IAAiBC,OAAO,CAACC,IAAR,CAAaF,WAAb,CAAlB,CAA3B;AACA;AACA;AACA;;AACO,MAAMI,YAAN,SAA2BC,KAA3B,CAAiC;AACpCC,EAAAA,WAAW,CAACC,OAAD,EAAUC,UAAV,EAAsBC,OAAtB,EAA+B;AACtC,UAAMF,OAAN,EADsC;;AAGtC;;AACA,QAAIF,KAAK,CAACK,iBAAV,EAA6B;AACzBL,MAAAA,KAAK,CAACK,iBAAN,CAAwB,IAAxB,EAA8B,KAAKJ,WAAnC;AACH;;AACD,SAAKK,IAAL,GAAY,WAAZ;AACA,SAAKC,MAAL,GAAcJ,UAAd;AACA,QAAIK,OAAJ;;AACA,QAAI,aAAaJ,OAAb,IAAwB,OAAOA,OAAO,CAACI,OAAf,KAA2B,WAAvD,EAAoE;AAChEA,MAAAA,OAAO,GAAGJ,OAAO,CAACI,OAAlB;AACH;;AACD,QAAI,cAAcJ,OAAlB,EAA2B;AACvB,WAAKK,QAAL,GAAgBL,OAAO,CAACK,QAAxB;AACAD,MAAAA,OAAO,GAAGJ,OAAO,CAACK,QAAR,CAAiBD,OAA3B;AACH,KAhBqC;;;AAkBtC,UAAME,WAAW,GAAGC,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBR,OAAO,CAACS,OAA1B,CAApB;;AACA,QAAIT,OAAO,CAACS,OAAR,CAAgBL,OAAhB,CAAwBM,aAA5B,EAA2C;AACvCJ,MAAAA,WAAW,CAACF,OAAZ,GAAsBG,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBR,OAAO,CAACS,OAAR,CAAgBL,OAAlC,EAA2C;AAC7DM,QAAAA,aAAa,EAAEV,OAAO,CAACS,OAAR,CAAgBL,OAAhB,CAAwBM,aAAxB,CAAsCC,OAAtC,CAA8C,MAA9C,EAAsD,aAAtD;AAD8C,OAA3C,CAAtB;AAGH;;AACDL,IAAAA,WAAW,CAACM,GAAZ,GAAkBN,WAAW,CAACM,GAAZ;AAEd;AAFc,KAGbD,OAHa,CAGL,sBAHK,EAGmB,0BAHnB;AAKd;AALc,KAMbA,OANa,CAML,qBANK,EAMkB,yBANlB,CAAlB;AAOA,SAAKF,OAAL,GAAeH,WAAf,CA/BsC;;AAiCtCC,IAAAA,MAAM,CAACM,cAAP,CAAsB,IAAtB,EAA4B,MAA5B,EAAoC;AAChCC,MAAAA,GAAG,GAAG;AACFzB,QAAAA,WAAW,CAAC,IAAI0B,uBAAJ,CAAgB,0EAAhB,CAAD,CAAX;AACA,eAAOhB,UAAP;AACH;;AAJ+B,KAApC;AAMAQ,IAAAA,MAAM,CAACM,cAAP,CAAsB,IAAtB,EAA4B,SAA5B,EAAuC;AACnCC,MAAAA,GAAG,GAAG;AACFpB,QAAAA,cAAc,CAAC,IAAIqB,uBAAJ,CAAgB,uFAAhB,CAAD,CAAd;AACA,eAAOX,OAAO,IAAI,EAAlB;AACH;;AAJkC,KAAvC;AAMH;;AA9CmC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request-error/dist-src/index.js b/node_modules/@octokit/request-error/dist-src/index.js new file mode 100644 index 0000000..5eb1927 --- /dev/null +++ b/node_modules/@octokit/request-error/dist-src/index.js @@ -0,0 +1,55 @@ +import { Deprecation } from "deprecation"; +import once from "once"; +const logOnceCode = once((deprecation) => console.warn(deprecation)); +const logOnceHeaders = once((deprecation) => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ +export class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); + // Maintains proper stack trace (only available on V8) + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + // redact request credentials without mutating original request options + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]"), + }); + } + requestCopy.url = requestCopy.url + // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") + // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + // deprecations + Object.defineProperty(this, "code", { + get() { + logOnceCode(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + }, + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + }, + }); + } +} diff --git a/node_modules/@octokit/request-error/dist-src/types.js b/node_modules/@octokit/request-error/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/request-error/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/request-error/dist-types/index.d.ts b/node_modules/@octokit/request-error/dist-types/index.d.ts new file mode 100644 index 0000000..d6e089c --- /dev/null +++ b/node_modules/@octokit/request-error/dist-types/index.d.ts @@ -0,0 +1,33 @@ +import { RequestOptions, ResponseHeaders, OctokitResponse } from "@octokit/types"; +import { RequestErrorOptions } from "./types"; +/** + * Error with extra properties to help with debugging + */ +export declare class RequestError extends Error { + name: "HttpError"; + /** + * http status code + */ + status: number; + /** + * http status code + * + * @deprecated `error.code` is deprecated in favor of `error.status` + */ + code: number; + /** + * Request options that lead to the error. + */ + request: RequestOptions; + /** + * error response headers + * + * @deprecated `error.headers` is deprecated in favor of `error.response.headers` + */ + headers: ResponseHeaders; + /** + * Response object if a response was received + */ + response?: OctokitResponse; + constructor(message: string, statusCode: number, options: RequestErrorOptions); +} diff --git a/node_modules/@octokit/request-error/dist-types/types.d.ts b/node_modules/@octokit/request-error/dist-types/types.d.ts new file mode 100644 index 0000000..7785231 --- /dev/null +++ b/node_modules/@octokit/request-error/dist-types/types.d.ts @@ -0,0 +1,9 @@ +import { RequestOptions, ResponseHeaders, OctokitResponse } from "@octokit/types"; +export declare type RequestErrorOptions = { + /** @deprecated set `response` instead */ + headers?: ResponseHeaders; + request: RequestOptions; +} | { + response: OctokitResponse; + request: RequestOptions; +}; diff --git a/node_modules/@octokit/request-error/dist-web/index.js b/node_modules/@octokit/request-error/dist-web/index.js new file mode 100644 index 0000000..0fb64be --- /dev/null +++ b/node_modules/@octokit/request-error/dist-web/index.js @@ -0,0 +1,59 @@ +import { Deprecation } from 'deprecation'; +import once from 'once'; + +const logOnceCode = once((deprecation) => console.warn(deprecation)); +const logOnceHeaders = once((deprecation) => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); + // Maintains proper stack trace (only available on V8) + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + // redact request credentials without mutating original request options + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]"), + }); + } + requestCopy.url = requestCopy.url + // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") + // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + // deprecations + Object.defineProperty(this, "code", { + get() { + logOnceCode(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + }, + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + }, + }); + } +} + +export { RequestError }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/request-error/dist-web/index.js.map b/node_modules/@octokit/request-error/dist-web/index.js.map new file mode 100644 index 0000000..78f677f --- /dev/null +++ b/node_modules/@octokit/request-error/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnceCode = once((deprecation) => console.warn(deprecation));\nconst logOnceHeaders = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n },\n });\n }\n}\n"],"names":[],"mappings":";;;AAEA,MAAM,WAAW,GAAG,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACrE,MAAM,cAAc,GAAG,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACxE;AACA;AACA;AACO,MAAM,YAAY,SAAS,KAAK,CAAC;AACxC,IAAI,WAAW,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE;AAC9C,QAAQ,KAAK,CAAC,OAAO,CAAC,CAAC;AACvB;AACA;AACA,QAAQ,IAAI,KAAK,CAAC,iBAAiB,EAAE;AACrC,YAAY,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AAC5D,SAAS;AACT,QAAQ,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AAChC,QAAQ,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;AACjC,QAAQ,IAAI,OAAO,CAAC;AACpB,QAAQ,IAAI,SAAS,IAAI,OAAO,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,WAAW,EAAE;AAC5E,YAAY,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACtC,SAAS;AACT,QAAQ,IAAI,UAAU,IAAI,OAAO,EAAE;AACnC,YAAY,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AAC7C,YAAY,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC;AAC/C,SAAS;AACT;AACA,QAAQ,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;AAC/D,QAAQ,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,EAAE;AACnD,YAAY,WAAW,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE;AAC7E,gBAAgB,aAAa,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,MAAM,EAAE,aAAa,CAAC;AACnG,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,WAAW,CAAC,GAAG,GAAG,WAAW,CAAC,GAAG;AACzC;AACA;AACA,aAAa,OAAO,CAAC,sBAAsB,EAAE,0BAA0B,CAAC;AACxE;AACA;AACA,aAAa,OAAO,CAAC,qBAAqB,EAAE,yBAAyB,CAAC,CAAC;AACvE,QAAQ,IAAI,CAAC,OAAO,GAAG,WAAW,CAAC;AACnC;AACA,QAAQ,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE;AAC5C,YAAY,GAAG,GAAG;AAClB,gBAAgB,WAAW,CAAC,IAAI,WAAW,CAAC,0EAA0E,CAAC,CAAC,CAAC;AACzH,gBAAgB,OAAO,UAAU,CAAC;AAClC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,QAAQ,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,EAAE;AAC/C,YAAY,GAAG,GAAG;AAClB,gBAAgB,cAAc,CAAC,IAAI,WAAW,CAAC,uFAAuF,CAAC,CAAC,CAAC;AACzI,gBAAgB,OAAO,OAAO,IAAI,EAAE,CAAC;AACrC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,KAAK;AACL;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request-error/package.json b/node_modules/@octokit/request-error/package.json new file mode 100644 index 0000000..2f5b239 --- /dev/null +++ b/node_modules/@octokit/request-error/package.json @@ -0,0 +1,47 @@ +{ + "name": "@octokit/request-error", + "description": "Error class for Octokit request errors", + "version": "2.1.0", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "pika": true, + "sideEffects": false, + "keywords": [ + "octokit", + "github", + "api", + "error" + ], + "repository": "github:octokit/request-error.js", + "dependencies": { + "@octokit/types": "^6.0.3", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "devDependencies": { + "@pika/pack": "^0.5.0", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-bundle-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/jest": "^26.0.0", + "@types/node": "^14.0.4", + "@types/once": "^1.4.0", + "jest": "^27.0.0", + "pika-plugin-unpkg-field": "^1.1.0", + "prettier": "2.3.1", + "semantic-release": "^17.0.0", + "ts-jest": "^27.0.0-next.12", + "typescript": "^4.0.0" + }, + "publishConfig": { + "access": "public" + }, + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js" +} diff --git a/node_modules/@octokit/request/LICENSE b/node_modules/@octokit/request/LICENSE new file mode 100644 index 0000000..af5366d --- /dev/null +++ b/node_modules/@octokit/request/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@octokit/request/README.md b/node_modules/@octokit/request/README.md new file mode 100644 index 0000000..747a670 --- /dev/null +++ b/node_modules/@octokit/request/README.md @@ -0,0 +1,551 @@ +# request.js + +> Send parameterized requests to GitHub’s APIs with sensible defaults in browsers and Node + +[![@latest](https://img.shields.io/npm/v/@octokit/request.svg)](https://www.npmjs.com/package/@octokit/request) +[![Build Status](https://github.com/octokit/request.js/workflows/Test/badge.svg)](https://github.com/octokit/request.js/actions?query=workflow%3ATest+branch%3Amaster) + +`@octokit/request` is a request library for browsers & node that makes it easier +to interact with [GitHub’s REST API](https://developer.github.com/v3/) and +[GitHub’s GraphQL API](https://developer.github.com/v4/guides/forming-calls/#the-graphql-endpoint). + +It uses [`@octokit/endpoint`](https://github.com/octokit/endpoint.js) to parse +the passed options and sends the request using [fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) +([node-fetch](https://github.com/bitinn/node-fetch) in Node). + + + + + +- [Features](#features) +- [Usage](#usage) + - [REST API example](#rest-api-example) + - [GraphQL example](#graphql-example) + - [Alternative: pass `method` & `url` as part of options](#alternative-pass-method--url-as-part-of-options) +- [Authentication](#authentication) +- [request()](#request) +- [`request.defaults()`](#requestdefaults) +- [`request.endpoint`](#requestendpoint) +- [Special cases](#special-cases) + - [The `data` parameter – set request body directly](#the-data-parameter-%E2%80%93-set-request-body-directly) + - [Set parameters for both the URL/query and the request body](#set-parameters-for-both-the-urlquery-and-the-request-body) +- [LICENSE](#license) + + + +## Features + +🤩 1:1 mapping of REST API endpoint documentation, e.g. [Add labels to an issue](https://developer.github.com/v3/issues/labels/#add-labels-to-an-issue) becomes + +```js +request("POST /repos/{owner}/{repo}/issues/{number}/labels", { + mediaType: { + previews: ["symmetra"], + }, + owner: "octokit", + repo: "request.js", + number: 1, + labels: ["🐛 bug"], +}); +``` + +👶 [Small bundle size](https://bundlephobia.com/result?p=@octokit/request@5.0.3) (\<4kb minified + gzipped) + +😎 [Authenticate](#authentication) with any of [GitHubs Authentication Strategies](https://github.com/octokit/auth.js). + +👍 Sensible defaults + +- `baseUrl`: `https://api.github.com` +- `headers.accept`: `application/vnd.github.v3+json` +- `headers.agent`: `octokit-request.js/ `, e.g. `octokit-request.js/1.2.3 Node.js/10.15.0 (macOS Mojave; x64)` + +👌 Simple to test: mock requests by passing a custom fetch method. + +🧐 Simple to debug: Sets `error.request` to request options causing the error (with redacted credentials). + +## Usage + + + + + + +
+Browsers + +Load @octokit/request directly from cdn.skypack.dev + +```html + +``` + +
+Node + + +Install with npm install @octokit/request + +```js +const { request } = require("@octokit/request"); +// or: import { request } from "@octokit/request"; +``` + +
+ +### REST API example + +```js +// Following GitHub docs formatting: +// https://developer.github.com/v3/repos/#list-organization-repositories +const result = await request("GET /orgs/{org}/repos", { + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, + org: "octokit", + type: "private", +}); + +console.log(`${result.data.length} repos found.`); +``` + +### GraphQL example + +For GraphQL request we recommend using [`@octokit/graphql`](https://github.com/octokit/graphql.js#readme) + +```js +const result = await request("POST /graphql", { + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, + query: `query ($login: String!) { + organization(login: $login) { + repositories(privacy: PRIVATE) { + totalCount + } + } + }`, + variables: { + login: "octokit", + }, +}); +``` + +### Alternative: pass `method` & `url` as part of options + +Alternatively, pass in a method and a url + +```js +const result = await request({ + method: "GET", + url: "/orgs/{org}/repos", + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, + org: "octokit", + type: "private", +}); +``` + +## Authentication + +The simplest way to authenticate a request is to set the `Authorization` header directly, e.g. to a [personal access token](https://github.com/settings/tokens/). + +```js +const requestWithAuth = request.defaults({ + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, +}); +const result = await requestWithAuth("GET /user"); +``` + +For more complex authentication strategies such as GitHub Apps or Basic, we recommend the according authentication library exported by [`@octokit/auth`](https://github.com/octokit/auth.js). + +```js +const { createAppAuth } = require("@octokit/auth-app"); +const auth = createAppAuth({ + appId: process.env.APP_ID, + privateKey: process.env.PRIVATE_KEY, + installationId: 123, +}); +const requestWithAuth = request.defaults({ + request: { + hook: auth.hook, + }, + mediaType: { + previews: ["machine-man"], + }, +}); + +const { data: app } = await requestWithAuth("GET /app"); +const { data: app } = await requestWithAuth( + "POST /repos/{owner}/{repo}/issues", + { + owner: "octocat", + repo: "hello-world", + title: "Hello from the engine room", + } +); +``` + +## request() + +`request(route, options)` or `request(options)`. + +**Options** + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ route + + String + + **Required**. If route is set it has to be a string consisting of the request method and URL, e.g. GET /orgs/{org} +
+ options.baseUrl + + String + + The base URL that route or url will be prefixed with, if they use relative paths. Defaults to https://api.github.com. +
+ options.headers + + Object + + Custom headers. Passed headers are merged with defaults:
+ headers['user-agent'] defaults to octokit-rest.js/1.2.3 (where 1.2.3 is the released version).
+ headers['accept'] defaults to application/vnd.github.v3+json.
Use options.mediaType.{format,previews} to request API previews and custom media types. +
+ options.mediaType.format + + String + + Media type param, such as `raw`, `html`, or `full`. See Media Types. +
+ options.mediaType.previews + + Array of strings + + Name of previews, such as `mercy`, `symmetra`, or `scarlet-witch`. See API Previews. +
+ options.method + + String + + Any supported http verb, case insensitive. Defaults to Get. +
+ options.url + + String + + **Required**. A path or full URL which may contain :variable or {variable} placeholders, + e.g. /orgs/{org}/repos. The url is parsed using url-template. +
+ options.data + + Any + + Set request body directly instead of setting it to JSON based on additional parameters. See "The `data` parameter" below. +
+ options.request.agent + + http(s).Agent instance + + Node only. Useful for custom proxy, certificate, or dns lookup. +
+ options.request.fetch + + Function + + Custom replacement for built-in fetch method. Useful for testing or request hooks. +
+ options.request.hook + + Function + + Function with the signature hook(request, endpointOptions), where endpointOptions are the parsed options as returned by endpoint.merge(), and request is request(). This option works great in conjuction with before-after-hook. +
+ options.request.signal + + new AbortController().signal + + Use an AbortController instance to cancel a request. In node you can only cancel streamed requests. +
+ options.request.log + + object + + Used for internal logging. Defaults to console. +
+ options.request.timeout + + Number + + Node only. Request/response timeout in ms, it resets on redirect. 0 to disable (OS limit applies). options.request.signal is recommended instead. +
+ +All other options except `options.request.*` will be passed depending on the `method` and `url` options. + +1. If the option key is a placeholder in the `url`, it will be used as replacement. For example, if the passed options are `{url: '/orgs/{org}/repos', org: 'foo'}` the returned `options.url` is `https://api.github.com/orgs/foo/repos` +2. If the `method` is `GET` or `HEAD`, the option is passed as query parameter +3. Otherwise the parameter is passed in the request body as JSON key. + +**Result** + +`request` returns a promise and resolves with 4 keys + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ key + + type + + description +
statusIntegerResponse status status
urlStringURL of response. If a request results in redirects, this is the final URL. You can send a HEAD request to retrieve it without loading the full response body.
headersObjectAll response headers
dataAnyThe response body as returned from server. If the response is JSON then it will be parsed into an object
+ +If an error occurs, the `error` instance has additional properties to help with debugging + +- `error.status` The http response status code +- `error.request` The request options such as `method`, `url` and `data` +- `error.response` The http response object with `url`, `headers`, and `data` + +## `request.defaults()` + +Override or set default options. Example: + +```js +const myrequest = require("@octokit/request").defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + authorization: `token 0000000000000000000000000000000000000001`, + }, + org: "my-project", + per_page: 100, +}); + +myrequest(`GET /orgs/{org}/repos`); +``` + +You can call `.defaults()` again on the returned method, the defaults will cascade. + +```js +const myProjectRequest = request.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + }, + org: "my-project", +}); +const myProjectRequestWithAuth = myProjectRequest.defaults({ + headers: { + authorization: `token 0000000000000000000000000000000000000001`, + }, +}); +``` + +`myProjectRequest` now defaults the `baseUrl`, `headers['user-agent']`, +`org` and `headers['authorization']` on top of `headers['accept']` that is set +by the global default. + +## `request.endpoint` + +See https://github.com/octokit/endpoint.js. Example + +```js +const options = request.endpoint("GET /orgs/{org}/repos", { + org: "my-project", + type: "private", +}); + +// { +// method: 'GET', +// url: 'https://api.github.com/orgs/my-project/repos?type=private', +// headers: { +// accept: 'application/vnd.github.v3+json', +// authorization: 'token 0000000000000000000000000000000000000001', +// 'user-agent': 'octokit/endpoint.js v1.2.3' +// } +// } +``` + +All of the [`@octokit/endpoint`](https://github.com/octokit/endpoint.js) API can be used: + +- [`octokitRequest.endpoint()`](#endpoint) +- [`octokitRequest.endpoint.defaults()`](#endpointdefaults) +- [`octokitRequest.endpoint.merge()`](#endpointdefaults) +- [`octokitRequest.endpoint.parse()`](#endpointmerge) + +## Special cases + + + +### The `data` parameter – set request body directly + +Some endpoints such as [Render a Markdown document in raw mode](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode) don’t have parameters that are sent as request body keys, instead the request body needs to be set directly. In these cases, set the `data` parameter. + +```js +const response = await request("POST /markdown/raw", { + data: "Hello world github/linguist#1 **cool**, and #1!", + headers: { + accept: "text/html;charset=utf-8", + "content-type": "text/plain", + }, +}); + +// Request is sent as +// +// { +// method: 'post', +// url: 'https://api.github.com/markdown/raw', +// headers: { +// accept: 'text/html;charset=utf-8', +// 'content-type': 'text/plain', +// 'user-agent': userAgent +// }, +// body: 'Hello world github/linguist#1 **cool**, and #1!' +// } +// +// not as +// +// { +// ... +// body: '{"data": "Hello world github/linguist#1 **cool**, and #1!"}' +// } +``` + +### Set parameters for both the URL/query and the request body + +There are API endpoints that accept both query parameters as well as a body. In that case you need to add the query parameters as templates to `options.url`, as defined in the [RFC 6570 URI Template specification](https://tools.ietf.org/html/rfc6570). + +Example + +```js +request( + "POST https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}", + { + name: "example.zip", + label: "short description", + headers: { + "content-type": "text/plain", + "content-length": 14, + authorization: `token 0000000000000000000000000000000000000001`, + }, + data: "Hello, world!", + } +); +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/node_modules/@octokit/request/dist-node/index.js b/node_modules/@octokit/request/dist-node/index.js new file mode 100644 index 0000000..685e2f5 --- /dev/null +++ b/node_modules/@octokit/request/dist-node/index.js @@ -0,0 +1,177 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var endpoint = require('@octokit/endpoint'); +var universalUserAgent = require('universal-user-agent'); +var isPlainObject = require('is-plain-object'); +var nodeFetch = _interopDefault(require('node-fetch')); +var requestError = require('@octokit/request-error'); + +const VERSION = "5.6.3"; + +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + + if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, // `requestOptions.request.agent` type is incompatible + // see https://github.com/octokit/types.ts/pull/264 + requestOptions.request)).then(async response => { + url = response.url; + status = response.status; + + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + } + + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requests + + + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + + throw new requestError.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: undefined + }, + request: requestOptions + }); + } + + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + + if (status >= 400) { + const data = await getResponseData(response); + const error = new requestError.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + + return getResponseData(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) throw error; + throw new requestError.RequestError(error.message, 500, { + request: requestOptions + }); + }); +} + +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + + if (/application\/json/.test(contentType)) { + return response.json(); + } + + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + + return getBufferResponse(response); +} + +function toErrorMessage(data) { + if (typeof data === "string") return data; // istanbul ignore else - just in case + + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + + return data.message; + } // istanbul ignore next - just in case + + + return `Unknown error: ${JSON.stringify(data)}`; +} + +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); + + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); + + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } + + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; + + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); +} + +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + } +}); + +exports.request = request; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/request/dist-node/index.js.map b/node_modules/@octokit/request/dist-node/index.js.map new file mode 100644 index 0000000..9a51ed1 --- /dev/null +++ b/node_modules/@octokit/request/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/get-buffer-response.js","../dist-src/fetch-wrapper.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"5.6.3\";\n","export default function getBufferResponse(response) {\n return response.arrayBuffer();\n}\n","import { isPlainObject } from \"is-plain-object\";\nimport nodeFetch from \"node-fetch\";\nimport { RequestError } from \"@octokit/request-error\";\nimport getBuffer from \"./get-buffer-response\";\nexport default function fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log\n ? requestOptions.request.log\n : console;\n if (isPlainObject(requestOptions.body) ||\n Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = (requestOptions.request && requestOptions.request.fetch) || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect,\n }, \n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request))\n .then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n if (status === 204 || status === 205) {\n return;\n }\n // GitHub API returns 200 for HEAD requests\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined,\n },\n request: requestOptions,\n });\n }\n if (status === 304) {\n throw new RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response),\n },\n request: requestOptions,\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data,\n },\n request: requestOptions,\n });\n throw error;\n }\n return getResponseData(response);\n })\n .then((data) => {\n return {\n status,\n url,\n headers,\n data,\n };\n })\n .catch((error) => {\n if (error instanceof RequestError)\n throw error;\n throw new RequestError(error.message, 500, {\n request: requestOptions,\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBuffer(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n // istanbul ignore else - just in case\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n // istanbul ignore next - just in case\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n","import fetchWrapper from \"./fetch-wrapper\";\nexport default function withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n}\n","import { endpoint } from \"@octokit/endpoint\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport withDefaults from \"./with-defaults\";\nexport const request = withDefaults(endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${getUserAgent()}`,\n },\n});\n"],"names":["VERSION","getBufferResponse","response","arrayBuffer","fetchWrapper","requestOptions","log","request","console","isPlainObject","body","Array","isArray","JSON","stringify","headers","status","url","fetch","nodeFetch","Object","assign","method","redirect","then","keyAndValue","matches","link","match","deprecationLink","pop","warn","sunset","RequestError","statusText","data","undefined","getResponseData","error","toErrorMessage","catch","message","contentType","get","test","json","text","getBuffer","errors","map","join","withDefaults","oldEndpoint","newDefaults","endpoint","defaults","newApi","route","parameters","endpointOptions","merge","hook","parse","bind","getUserAgent"],"mappings":";;;;;;;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;ACAQ,SAASC,iBAAT,CAA2BC,QAA3B,EAAqC;AAChD,SAAOA,QAAQ,CAACC,WAAT,EAAP;AACH;;ACEc,SAASC,YAAT,CAAsBC,cAAtB,EAAsC;AACjD,QAAMC,GAAG,GAAGD,cAAc,CAACE,OAAf,IAA0BF,cAAc,CAACE,OAAf,CAAuBD,GAAjD,GACND,cAAc,CAACE,OAAf,CAAuBD,GADjB,GAENE,OAFN;;AAGA,MAAIC,2BAAa,CAACJ,cAAc,CAACK,IAAhB,CAAb,IACAC,KAAK,CAACC,OAAN,CAAcP,cAAc,CAACK,IAA7B,CADJ,EACwC;AACpCL,IAAAA,cAAc,CAACK,IAAf,GAAsBG,IAAI,CAACC,SAAL,CAAeT,cAAc,CAACK,IAA9B,CAAtB;AACH;;AACD,MAAIK,OAAO,GAAG,EAAd;AACA,MAAIC,MAAJ;AACA,MAAIC,GAAJ;AACA,QAAMC,KAAK,GAAIb,cAAc,CAACE,OAAf,IAA0BF,cAAc,CAACE,OAAf,CAAuBW,KAAlD,IAA4DC,SAA1E;AACA,SAAOD,KAAK,CAACb,cAAc,CAACY,GAAhB,EAAqBG,MAAM,CAACC,MAAP,CAAc;AAC3CC,IAAAA,MAAM,EAAEjB,cAAc,CAACiB,MADoB;AAE3CZ,IAAAA,IAAI,EAAEL,cAAc,CAACK,IAFsB;AAG3CK,IAAAA,OAAO,EAAEV,cAAc,CAACU,OAHmB;AAI3CQ,IAAAA,QAAQ,EAAElB,cAAc,CAACkB;AAJkB,GAAd;AAOjC;AACAlB,EAAAA,cAAc,CAACE,OARkB,CAArB,CAAL,CASFiB,IATE,CASG,MAAOtB,QAAP,IAAoB;AAC1Be,IAAAA,GAAG,GAAGf,QAAQ,CAACe,GAAf;AACAD,IAAAA,MAAM,GAAGd,QAAQ,CAACc,MAAlB;;AACA,SAAK,MAAMS,WAAX,IAA0BvB,QAAQ,CAACa,OAAnC,EAA4C;AACxCA,MAAAA,OAAO,CAACU,WAAW,CAAC,CAAD,CAAZ,CAAP,GAA0BA,WAAW,CAAC,CAAD,CAArC;AACH;;AACD,QAAI,iBAAiBV,OAArB,EAA8B;AAC1B,YAAMW,OAAO,GAAGX,OAAO,CAACY,IAAR,IAAgBZ,OAAO,CAACY,IAAR,CAAaC,KAAb,CAAmB,8BAAnB,CAAhC;AACA,YAAMC,eAAe,GAAGH,OAAO,IAAIA,OAAO,CAACI,GAAR,EAAnC;AACAxB,MAAAA,GAAG,CAACyB,IAAJ,CAAU,uBAAsB1B,cAAc,CAACiB,MAAO,IAAGjB,cAAc,CAACY,GAAI,qDAAoDF,OAAO,CAACiB,MAAO,GAAEH,eAAe,GAAI,SAAQA,eAAgB,EAA5B,GAAgC,EAAG,EAAnM;AACH;;AACD,QAAIb,MAAM,KAAK,GAAX,IAAkBA,MAAM,KAAK,GAAjC,EAAsC;AAClC;AACH,KAbyB;;;AAe1B,QAAIX,cAAc,CAACiB,MAAf,KAA0B,MAA9B,EAAsC;AAClC,UAAIN,MAAM,GAAG,GAAb,EAAkB;AACd;AACH;;AACD,YAAM,IAAIiB,yBAAJ,CAAiB/B,QAAQ,CAACgC,UAA1B,EAAsClB,MAAtC,EAA8C;AAChDd,QAAAA,QAAQ,EAAE;AACNe,UAAAA,GADM;AAEND,UAAAA,MAFM;AAGND,UAAAA,OAHM;AAINoB,UAAAA,IAAI,EAAEC;AAJA,SADsC;AAOhD7B,QAAAA,OAAO,EAAEF;AAPuC,OAA9C,CAAN;AASH;;AACD,QAAIW,MAAM,KAAK,GAAf,EAAoB;AAChB,YAAM,IAAIiB,yBAAJ,CAAiB,cAAjB,EAAiCjB,MAAjC,EAAyC;AAC3Cd,QAAAA,QAAQ,EAAE;AACNe,UAAAA,GADM;AAEND,UAAAA,MAFM;AAGND,UAAAA,OAHM;AAINoB,UAAAA,IAAI,EAAE,MAAME,eAAe,CAACnC,QAAD;AAJrB,SADiC;AAO3CK,QAAAA,OAAO,EAAEF;AAPkC,OAAzC,CAAN;AASH;;AACD,QAAIW,MAAM,IAAI,GAAd,EAAmB;AACf,YAAMmB,IAAI,GAAG,MAAME,eAAe,CAACnC,QAAD,CAAlC;AACA,YAAMoC,KAAK,GAAG,IAAIL,yBAAJ,CAAiBM,cAAc,CAACJ,IAAD,CAA/B,EAAuCnB,MAAvC,EAA+C;AACzDd,QAAAA,QAAQ,EAAE;AACNe,UAAAA,GADM;AAEND,UAAAA,MAFM;AAGND,UAAAA,OAHM;AAINoB,UAAAA;AAJM,SAD+C;AAOzD5B,QAAAA,OAAO,EAAEF;AAPgD,OAA/C,CAAd;AASA,YAAMiC,KAAN;AACH;;AACD,WAAOD,eAAe,CAACnC,QAAD,CAAtB;AACH,GA/DM,EAgEFsB,IAhEE,CAgEIW,IAAD,IAAU;AAChB,WAAO;AACHnB,MAAAA,MADG;AAEHC,MAAAA,GAFG;AAGHF,MAAAA,OAHG;AAIHoB,MAAAA;AAJG,KAAP;AAMH,GAvEM,EAwEFK,KAxEE,CAwEKF,KAAD,IAAW;AAClB,QAAIA,KAAK,YAAYL,yBAArB,EACI,MAAMK,KAAN;AACJ,UAAM,IAAIL,yBAAJ,CAAiBK,KAAK,CAACG,OAAvB,EAAgC,GAAhC,EAAqC;AACvClC,MAAAA,OAAO,EAAEF;AAD8B,KAArC,CAAN;AAGH,GA9EM,CAAP;AA+EH;;AACD,eAAegC,eAAf,CAA+BnC,QAA/B,EAAyC;AACrC,QAAMwC,WAAW,GAAGxC,QAAQ,CAACa,OAAT,CAAiB4B,GAAjB,CAAqB,cAArB,CAApB;;AACA,MAAI,oBAAoBC,IAApB,CAAyBF,WAAzB,CAAJ,EAA2C;AACvC,WAAOxC,QAAQ,CAAC2C,IAAT,EAAP;AACH;;AACD,MAAI,CAACH,WAAD,IAAgB,yBAAyBE,IAAzB,CAA8BF,WAA9B,CAApB,EAAgE;AAC5D,WAAOxC,QAAQ,CAAC4C,IAAT,EAAP;AACH;;AACD,SAAOC,iBAAS,CAAC7C,QAAD,CAAhB;AACH;;AACD,SAASqC,cAAT,CAAwBJ,IAAxB,EAA8B;AAC1B,MAAI,OAAOA,IAAP,KAAgB,QAApB,EACI,OAAOA,IAAP,CAFsB;;AAI1B,MAAI,aAAaA,IAAjB,EAAuB;AACnB,QAAIxB,KAAK,CAACC,OAAN,CAAcuB,IAAI,CAACa,MAAnB,CAAJ,EAAgC;AAC5B,aAAQ,GAAEb,IAAI,CAACM,OAAQ,KAAIN,IAAI,CAACa,MAAL,CAAYC,GAAZ,CAAgBpC,IAAI,CAACC,SAArB,EAAgCoC,IAAhC,CAAqC,IAArC,CAA2C,EAAtE;AACH;;AACD,WAAOf,IAAI,CAACM,OAAZ;AACH,GATyB;;;AAW1B,SAAQ,kBAAiB5B,IAAI,CAACC,SAAL,CAAeqB,IAAf,CAAqB,EAA9C;AACH;;ACrHc,SAASgB,YAAT,CAAsBC,WAAtB,EAAmCC,WAAnC,EAAgD;AAC3D,QAAMC,QAAQ,GAAGF,WAAW,CAACG,QAAZ,CAAqBF,WAArB,CAAjB;;AACA,QAAMG,MAAM,GAAG,UAAUC,KAAV,EAAiBC,UAAjB,EAA6B;AACxC,UAAMC,eAAe,GAAGL,QAAQ,CAACM,KAAT,CAAeH,KAAf,EAAsBC,UAAtB,CAAxB;;AACA,QAAI,CAACC,eAAe,CAACpD,OAAjB,IAA4B,CAACoD,eAAe,CAACpD,OAAhB,CAAwBsD,IAAzD,EAA+D;AAC3D,aAAOzD,YAAY,CAACkD,QAAQ,CAACQ,KAAT,CAAeH,eAAf,CAAD,CAAnB;AACH;;AACD,UAAMpD,OAAO,GAAG,CAACkD,KAAD,EAAQC,UAAR,KAAuB;AACnC,aAAOtD,YAAY,CAACkD,QAAQ,CAACQ,KAAT,CAAeR,QAAQ,CAACM,KAAT,CAAeH,KAAf,EAAsBC,UAAtB,CAAf,CAAD,CAAnB;AACH,KAFD;;AAGAtC,IAAAA,MAAM,CAACC,MAAP,CAAcd,OAAd,EAAuB;AACnB+C,MAAAA,QADmB;AAEnBC,MAAAA,QAAQ,EAAEJ,YAAY,CAACY,IAAb,CAAkB,IAAlB,EAAwBT,QAAxB;AAFS,KAAvB;AAIA,WAAOK,eAAe,CAACpD,OAAhB,CAAwBsD,IAAxB,CAA6BtD,OAA7B,EAAsCoD,eAAtC,CAAP;AACH,GAbD;;AAcA,SAAOvC,MAAM,CAACC,MAAP,CAAcmC,MAAd,EAAsB;AACzBF,IAAAA,QADyB;AAEzBC,IAAAA,QAAQ,EAAEJ,YAAY,CAACY,IAAb,CAAkB,IAAlB,EAAwBT,QAAxB;AAFe,GAAtB,CAAP;AAIH;;MCjBY/C,OAAO,GAAG4C,YAAY,CAACG,iBAAD,EAAW;AAC1CvC,EAAAA,OAAO,EAAE;AACL,kBAAe,sBAAqBf,OAAQ,IAAGgE,+BAAY,EAAG;AADzD;AADiC,CAAX,CAA5B;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request/dist-src/fetch-wrapper.js b/node_modules/@octokit/request/dist-src/fetch-wrapper.js new file mode 100644 index 0000000..79653c4 --- /dev/null +++ b/node_modules/@octokit/request/dist-src/fetch-wrapper.js @@ -0,0 +1,119 @@ +import { isPlainObject } from "is-plain-object"; +import nodeFetch from "node-fetch"; +import { RequestError } from "@octokit/request-error"; +import getBuffer from "./get-buffer-response"; +export default function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log + ? requestOptions.request.log + : console; + if (isPlainObject(requestOptions.body) || + Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + const fetch = (requestOptions.request && requestOptions.request.fetch) || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect, + }, + // `requestOptions.request.agent` type is incompatible + // see https://github.com/octokit/types.ts/pull/264 + requestOptions.request)) + .then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + } + if (status === 204 || status === 205) { + return; + } + // GitHub API returns 200 for HEAD requests + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: undefined, + }, + request: requestOptions, + }); + } + if (status === 304) { + throw new RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response), + }, + request: requestOptions, + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data, + }, + request: requestOptions, + }); + throw error; + } + return getResponseData(response); + }) + .then((data) => { + return { + status, + url, + headers, + data, + }; + }) + .catch((error) => { + if (error instanceof RequestError) + throw error; + throw new RequestError(error.message, 500, { + request: requestOptions, + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json(); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBuffer(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + // istanbul ignore else - just in case + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + // istanbul ignore next - just in case + return `Unknown error: ${JSON.stringify(data)}`; +} diff --git a/node_modules/@octokit/request/dist-src/get-buffer-response.js b/node_modules/@octokit/request/dist-src/get-buffer-response.js new file mode 100644 index 0000000..845a394 --- /dev/null +++ b/node_modules/@octokit/request/dist-src/get-buffer-response.js @@ -0,0 +1,3 @@ +export default function getBufferResponse(response) { + return response.arrayBuffer(); +} diff --git a/node_modules/@octokit/request/dist-src/index.js b/node_modules/@octokit/request/dist-src/index.js new file mode 100644 index 0000000..2460e99 --- /dev/null +++ b/node_modules/@octokit/request/dist-src/index.js @@ -0,0 +1,9 @@ +import { endpoint } from "@octokit/endpoint"; +import { getUserAgent } from "universal-user-agent"; +import { VERSION } from "./version"; +import withDefaults from "./with-defaults"; +export const request = withDefaults(endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${getUserAgent()}`, + }, +}); diff --git a/node_modules/@octokit/request/dist-src/version.js b/node_modules/@octokit/request/dist-src/version.js new file mode 100644 index 0000000..a068c68 --- /dev/null +++ b/node_modules/@octokit/request/dist-src/version.js @@ -0,0 +1 @@ +export const VERSION = "5.6.3"; diff --git a/node_modules/@octokit/request/dist-src/with-defaults.js b/node_modules/@octokit/request/dist-src/with-defaults.js new file mode 100644 index 0000000..e206429 --- /dev/null +++ b/node_modules/@octokit/request/dist-src/with-defaults.js @@ -0,0 +1,22 @@ +import fetchWrapper from "./fetch-wrapper"; +export default function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint), + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint), + }); +} diff --git a/node_modules/@octokit/request/dist-types/fetch-wrapper.d.ts b/node_modules/@octokit/request/dist-types/fetch-wrapper.d.ts new file mode 100644 index 0000000..4901c79 --- /dev/null +++ b/node_modules/@octokit/request/dist-types/fetch-wrapper.d.ts @@ -0,0 +1,11 @@ +import { EndpointInterface } from "@octokit/types"; +export default function fetchWrapper(requestOptions: ReturnType & { + redirect?: "error" | "follow" | "manual"; +}): Promise<{ + status: number; + url: string; + headers: { + [header: string]: string; + }; + data: any; +}>; diff --git a/node_modules/@octokit/request/dist-types/get-buffer-response.d.ts b/node_modules/@octokit/request/dist-types/get-buffer-response.d.ts new file mode 100644 index 0000000..915b705 --- /dev/null +++ b/node_modules/@octokit/request/dist-types/get-buffer-response.d.ts @@ -0,0 +1,2 @@ +import { Response } from "node-fetch"; +export default function getBufferResponse(response: Response): Promise; diff --git a/node_modules/@octokit/request/dist-types/index.d.ts b/node_modules/@octokit/request/dist-types/index.d.ts new file mode 100644 index 0000000..1030809 --- /dev/null +++ b/node_modules/@octokit/request/dist-types/index.d.ts @@ -0,0 +1 @@ +export declare const request: import("@octokit/types").RequestInterface; diff --git a/node_modules/@octokit/request/dist-types/version.d.ts b/node_modules/@octokit/request/dist-types/version.d.ts new file mode 100644 index 0000000..5629807 --- /dev/null +++ b/node_modules/@octokit/request/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "5.6.3"; diff --git a/node_modules/@octokit/request/dist-types/with-defaults.d.ts b/node_modules/@octokit/request/dist-types/with-defaults.d.ts new file mode 100644 index 0000000..0080469 --- /dev/null +++ b/node_modules/@octokit/request/dist-types/with-defaults.d.ts @@ -0,0 +1,2 @@ +import { EndpointInterface, RequestInterface, RequestParameters } from "@octokit/types"; +export default function withDefaults(oldEndpoint: EndpointInterface, newDefaults: RequestParameters): RequestInterface; diff --git a/node_modules/@octokit/request/dist-web/index.js b/node_modules/@octokit/request/dist-web/index.js new file mode 100644 index 0000000..44359f8 --- /dev/null +++ b/node_modules/@octokit/request/dist-web/index.js @@ -0,0 +1,158 @@ +import { endpoint } from '@octokit/endpoint'; +import { getUserAgent } from 'universal-user-agent'; +import { isPlainObject } from 'is-plain-object'; +import nodeFetch from 'node-fetch'; +import { RequestError } from '@octokit/request-error'; + +const VERSION = "5.6.3"; + +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log + ? requestOptions.request.log + : console; + if (isPlainObject(requestOptions.body) || + Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + const fetch = (requestOptions.request && requestOptions.request.fetch) || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect, + }, + // `requestOptions.request.agent` type is incompatible + // see https://github.com/octokit/types.ts/pull/264 + requestOptions.request)) + .then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + } + if (status === 204 || status === 205) { + return; + } + // GitHub API returns 200 for HEAD requests + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: undefined, + }, + request: requestOptions, + }); + } + if (status === 304) { + throw new RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response), + }, + request: requestOptions, + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data, + }, + request: requestOptions, + }); + throw error; + } + return getResponseData(response); + }) + .then((data) => { + return { + status, + url, + headers, + data, + }; + }) + .catch((error) => { + if (error instanceof RequestError) + throw error; + throw new RequestError(error.message, 500, { + request: requestOptions, + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json(); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + // istanbul ignore else - just in case + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + // istanbul ignore next - just in case + return `Unknown error: ${JSON.stringify(data)}`; +} + +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint), + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint), + }); +} + +const request = withDefaults(endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${getUserAgent()}`, + }, +}); + +export { request }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/request/dist-web/index.js.map b/node_modules/@octokit/request/dist-web/index.js.map new file mode 100644 index 0000000..b3cda51 --- /dev/null +++ b/node_modules/@octokit/request/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/get-buffer-response.js","../dist-src/fetch-wrapper.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"5.6.3\";\n","export default function getBufferResponse(response) {\n return response.arrayBuffer();\n}\n","import { isPlainObject } from \"is-plain-object\";\nimport nodeFetch from \"node-fetch\";\nimport { RequestError } from \"@octokit/request-error\";\nimport getBuffer from \"./get-buffer-response\";\nexport default function fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log\n ? requestOptions.request.log\n : console;\n if (isPlainObject(requestOptions.body) ||\n Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = (requestOptions.request && requestOptions.request.fetch) || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect,\n }, \n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request))\n .then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n if (status === 204 || status === 205) {\n return;\n }\n // GitHub API returns 200 for HEAD requests\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined,\n },\n request: requestOptions,\n });\n }\n if (status === 304) {\n throw new RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response),\n },\n request: requestOptions,\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data,\n },\n request: requestOptions,\n });\n throw error;\n }\n return getResponseData(response);\n })\n .then((data) => {\n return {\n status,\n url,\n headers,\n data,\n };\n })\n .catch((error) => {\n if (error instanceof RequestError)\n throw error;\n throw new RequestError(error.message, 500, {\n request: requestOptions,\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBuffer(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n // istanbul ignore else - just in case\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n // istanbul ignore next - just in case\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n","import fetchWrapper from \"./fetch-wrapper\";\nexport default function withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n}\n","import { endpoint } from \"@octokit/endpoint\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport withDefaults from \"./with-defaults\";\nexport const request = withDefaults(endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${getUserAgent()}`,\n },\n});\n"],"names":["getBuffer"],"mappings":";;;;;;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACA3B,SAAS,iBAAiB,CAAC,QAAQ,EAAE;AACpD,IAAI,OAAO,QAAQ,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;;ACEc,SAAS,YAAY,CAAC,cAAc,EAAE;AACrD,IAAI,MAAM,GAAG,GAAG,cAAc,CAAC,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC,GAAG;AACpE,UAAU,cAAc,CAAC,OAAO,CAAC,GAAG;AACpC,UAAU,OAAO,CAAC;AAClB,IAAI,IAAI,aAAa,CAAC,cAAc,CAAC,IAAI,CAAC;AAC1C,QAAQ,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,EAAE;AAC5C,QAAQ,cAAc,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;AAClE,KAAK;AACL,IAAI,IAAI,OAAO,GAAG,EAAE,CAAC;AACrB,IAAI,IAAI,MAAM,CAAC;AACf,IAAI,IAAI,GAAG,CAAC;AACZ,IAAI,MAAM,KAAK,GAAG,CAAC,cAAc,CAAC,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC,KAAK,KAAK,SAAS,CAAC;AACxF,IAAI,OAAO,KAAK,CAAC,cAAc,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,CAAC;AACnD,QAAQ,MAAM,EAAE,cAAc,CAAC,MAAM;AACrC,QAAQ,IAAI,EAAE,cAAc,CAAC,IAAI;AACjC,QAAQ,OAAO,EAAE,cAAc,CAAC,OAAO;AACvC,QAAQ,QAAQ,EAAE,cAAc,CAAC,QAAQ;AACzC,KAAK;AACL;AACA;AACA,IAAI,cAAc,CAAC,OAAO,CAAC,CAAC;AAC5B,SAAS,IAAI,CAAC,OAAO,QAAQ,KAAK;AAClC,QAAQ,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC;AAC3B,QAAQ,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;AACjC,QAAQ,KAAK,MAAM,WAAW,IAAI,QAAQ,CAAC,OAAO,EAAE;AACpD,YAAY,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;AACrD,SAAS;AACT,QAAQ,IAAI,aAAa,IAAI,OAAO,EAAE;AACtC,YAAY,MAAM,OAAO,GAAG,OAAO,CAAC,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,8BAA8B,CAAC,CAAC;AAC/F,YAAY,MAAM,eAAe,GAAG,OAAO,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;AAC7D,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,oBAAoB,EAAE,cAAc,CAAC,MAAM,CAAC,CAAC,EAAE,cAAc,CAAC,GAAG,CAAC,kDAAkD,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE,eAAe,GAAG,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;AAClN,SAAS;AACT,QAAQ,IAAI,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,EAAE;AAC9C,YAAY,OAAO;AACnB,SAAS;AACT;AACA,QAAQ,IAAI,cAAc,CAAC,MAAM,KAAK,MAAM,EAAE;AAC9C,YAAY,IAAI,MAAM,GAAG,GAAG,EAAE;AAC9B,gBAAgB,OAAO;AACvB,aAAa;AACb,YAAY,MAAM,IAAI,YAAY,CAAC,QAAQ,CAAC,UAAU,EAAE,MAAM,EAAE;AAChE,gBAAgB,QAAQ,EAAE;AAC1B,oBAAoB,GAAG;AACvB,oBAAoB,MAAM;AAC1B,oBAAoB,OAAO;AAC3B,oBAAoB,IAAI,EAAE,SAAS;AACnC,iBAAiB;AACjB,gBAAgB,OAAO,EAAE,cAAc;AACvC,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,IAAI,MAAM,KAAK,GAAG,EAAE;AAC5B,YAAY,MAAM,IAAI,YAAY,CAAC,cAAc,EAAE,MAAM,EAAE;AAC3D,gBAAgB,QAAQ,EAAE;AAC1B,oBAAoB,GAAG;AACvB,oBAAoB,MAAM;AAC1B,oBAAoB,OAAO;AAC3B,oBAAoB,IAAI,EAAE,MAAM,eAAe,CAAC,QAAQ,CAAC;AACzD,iBAAiB;AACjB,gBAAgB,OAAO,EAAE,cAAc;AACvC,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,IAAI,MAAM,IAAI,GAAG,EAAE;AAC3B,YAAY,MAAM,IAAI,GAAG,MAAM,eAAe,CAAC,QAAQ,CAAC,CAAC;AACzD,YAAY,MAAM,KAAK,GAAG,IAAI,YAAY,CAAC,cAAc,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE;AACzE,gBAAgB,QAAQ,EAAE;AAC1B,oBAAoB,GAAG;AACvB,oBAAoB,MAAM;AAC1B,oBAAoB,OAAO;AAC3B,oBAAoB,IAAI;AACxB,iBAAiB;AACjB,gBAAgB,OAAO,EAAE,cAAc;AACvC,aAAa,CAAC,CAAC;AACf,YAAY,MAAM,KAAK,CAAC;AACxB,SAAS;AACT,QAAQ,OAAO,eAAe,CAAC,QAAQ,CAAC,CAAC;AACzC,KAAK,CAAC;AACN,SAAS,IAAI,CAAC,CAAC,IAAI,KAAK;AACxB,QAAQ,OAAO;AACf,YAAY,MAAM;AAClB,YAAY,GAAG;AACf,YAAY,OAAO;AACnB,YAAY,IAAI;AAChB,SAAS,CAAC;AACV,KAAK,CAAC;AACN,SAAS,KAAK,CAAC,CAAC,KAAK,KAAK;AAC1B,QAAQ,IAAI,KAAK,YAAY,YAAY;AACzC,YAAY,MAAM,KAAK,CAAC;AACxB,QAAQ,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,EAAE;AACnD,YAAY,OAAO,EAAE,cAAc;AACnC,SAAS,CAAC,CAAC;AACX,KAAK,CAAC,CAAC;AACP,CAAC;AACD,eAAe,eAAe,CAAC,QAAQ,EAAE;AACzC,IAAI,MAAM,WAAW,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;AAC7D,IAAI,IAAI,mBAAmB,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;AAC/C,QAAQ,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;AAC/B,KAAK;AACL,IAAI,IAAI,CAAC,WAAW,IAAI,wBAAwB,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;AACpE,QAAQ,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;AAC/B,KAAK;AACL,IAAI,OAAOA,iBAAS,CAAC,QAAQ,CAAC,CAAC;AAC/B,CAAC;AACD,SAAS,cAAc,CAAC,IAAI,EAAE;AAC9B,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ;AAChC,QAAQ,OAAO,IAAI,CAAC;AACpB;AACA,IAAI,IAAI,SAAS,IAAI,IAAI,EAAE;AAC3B,QAAQ,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;AACxC,YAAY,OAAO,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACpF,SAAS;AACT,QAAQ,OAAO,IAAI,CAAC,OAAO,CAAC;AAC5B,KAAK;AACL;AACA,IAAI,OAAO,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACpD,CAAC;;ACrHc,SAAS,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE;AAC/D,IAAI,MAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;AACvD,IAAI,MAAM,MAAM,GAAG,UAAU,KAAK,EAAE,UAAU,EAAE;AAChD,QAAQ,MAAM,eAAe,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;AAClE,QAAQ,IAAI,CAAC,eAAe,CAAC,OAAO,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,IAAI,EAAE;AACvE,YAAY,OAAO,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC;AACjE,SAAS;AACT,QAAQ,MAAM,OAAO,GAAG,CAAC,KAAK,EAAE,UAAU,KAAK;AAC/C,YAAY,OAAO,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;AACnF,SAAS,CAAC;AACV,QAAQ,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE;AAC/B,YAAY,QAAQ;AACpB,YAAY,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACvD,SAAS,CAAC,CAAC;AACX,QAAQ,OAAO,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;AACtE,KAAK,CAAC;AACN,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE;AACjC,QAAQ,QAAQ;AAChB,QAAQ,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACnD,KAAK,CAAC,CAAC;AACP,CAAC;;ACjBW,MAAC,OAAO,GAAG,YAAY,CAAC,QAAQ,EAAE;AAC9C,IAAI,OAAO,EAAE;AACb,QAAQ,YAAY,EAAE,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;AACvE,KAAK;AACL,CAAC,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request/package.json b/node_modules/@octokit/request/package.json new file mode 100644 index 0000000..e38c955 --- /dev/null +++ b/node_modules/@octokit/request/package.json @@ -0,0 +1,56 @@ +{ + "name": "@octokit/request", + "description": "Send parameterized requests to GitHub's APIs with sensible defaults in browsers and Node", + "version": "5.6.3", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "pika": true, + "sideEffects": false, + "keywords": [ + "octokit", + "github", + "api", + "request" + ], + "repository": "github:octokit/request.js", + "dependencies": { + "@octokit/endpoint": "^6.0.1", + "@octokit/request-error": "^2.1.0", + "@octokit/types": "^6.16.1", + "is-plain-object": "^5.0.0", + "node-fetch": "^2.6.7", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/auth-app": "^3.0.0", + "@pika/pack": "^0.5.0", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/fetch-mock": "^7.2.4", + "@types/jest": "^27.0.0", + "@types/lolex": "^5.1.0", + "@types/node": "^14.0.0", + "@types/node-fetch": "^2.3.3", + "@types/once": "^1.4.0", + "fetch-mock": "^9.3.1", + "jest": "^27.0.0", + "lolex": "^6.0.0", + "prettier": "2.4.1", + "semantic-release": "^18.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "string-to-arraybuffer": "^1.0.2", + "ts-jest": "^27.0.0", + "typescript": "^4.0.2" + }, + "publishConfig": { + "access": "public" + }, + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js" +} diff --git a/node_modules/@octokit/types/LICENSE b/node_modules/@octokit/types/LICENSE new file mode 100644 index 0000000..57bee5f --- /dev/null +++ b/node_modules/@octokit/types/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@octokit/types/README.md b/node_modules/@octokit/types/README.md new file mode 100644 index 0000000..c48ce42 --- /dev/null +++ b/node_modules/@octokit/types/README.md @@ -0,0 +1,65 @@ +# types.ts + +> Shared TypeScript definitions for Octokit projects + +[![@latest](https://img.shields.io/npm/v/@octokit/types.svg)](https://www.npmjs.com/package/@octokit/types) +[![Build Status](https://github.com/octokit/types.ts/workflows/Test/badge.svg)](https://github.com/octokit/types.ts/actions?workflow=Test) + + + +- [Usage](#usage) +- [Examples](#examples) + - [Get parameter and response data types for a REST API endpoint](#get-parameter-and-response-data-types-for-a-rest-api-endpoint) + - [Get response types from endpoint methods](#get-response-types-from-endpoint-methods) +- [Contributing](#contributing) +- [License](#license) + + + +## Usage + +See all exported types at https://octokit.github.io/types.ts + +## Examples + +### Get parameter and response data types for a REST API endpoint + +```ts +import { Endpoints } from "@octokit/types"; + +type listUserReposParameters = + Endpoints["GET /repos/{owner}/{repo}"]["parameters"]; +type listUserReposResponse = Endpoints["GET /repos/{owner}/{repo}"]["response"]; + +async function listRepos( + options: listUserReposParameters +): listUserReposResponse["data"] { + // ... +} +``` + +### Get response types from endpoint methods + +```ts +import { + GetResponseTypeFromEndpointMethod, + GetResponseDataTypeFromEndpointMethod, +} from "@octokit/types"; +import { Octokit } from "@octokit/rest"; + +const octokit = new Octokit(); +type CreateLabelResponseType = GetResponseTypeFromEndpointMethod< + typeof octokit.issues.createLabel +>; +type CreateLabelResponseDataType = GetResponseDataTypeFromEndpointMethod< + typeof octokit.issues.createLabel +>; +``` + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/node_modules/@octokit/types/dist-node/index.js b/node_modules/@octokit/types/dist-node/index.js new file mode 100644 index 0000000..b3c252b --- /dev/null +++ b/node_modules/@octokit/types/dist-node/index.js @@ -0,0 +1,8 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +const VERSION = "6.41.0"; + +exports.VERSION = VERSION; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/types/dist-node/index.js.map b/node_modules/@octokit/types/dist-node/index.js.map new file mode 100644 index 0000000..2d148d3 --- /dev/null +++ b/node_modules/@octokit/types/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/VERSION.js"],"sourcesContent":["export const VERSION = \"0.0.0-development\";\n"],"names":["VERSION"],"mappings":";;;;MAAaA,OAAO,GAAG;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/types/dist-src/AuthInterface.js b/node_modules/@octokit/types/dist-src/AuthInterface.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/AuthInterface.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/EndpointDefaults.js b/node_modules/@octokit/types/dist-src/EndpointDefaults.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/EndpointDefaults.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/EndpointInterface.js b/node_modules/@octokit/types/dist-src/EndpointInterface.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/EndpointInterface.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/EndpointOptions.js b/node_modules/@octokit/types/dist-src/EndpointOptions.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/EndpointOptions.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/Fetch.js b/node_modules/@octokit/types/dist-src/Fetch.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/Fetch.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/GetResponseTypeFromEndpointMethod.js b/node_modules/@octokit/types/dist-src/GetResponseTypeFromEndpointMethod.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/GetResponseTypeFromEndpointMethod.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/OctokitResponse.js b/node_modules/@octokit/types/dist-src/OctokitResponse.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/OctokitResponse.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestError.js b/node_modules/@octokit/types/dist-src/RequestError.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestError.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestHeaders.js b/node_modules/@octokit/types/dist-src/RequestHeaders.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestHeaders.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestInterface.js b/node_modules/@octokit/types/dist-src/RequestInterface.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestInterface.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestMethod.js b/node_modules/@octokit/types/dist-src/RequestMethod.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestMethod.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestOptions.js b/node_modules/@octokit/types/dist-src/RequestOptions.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestOptions.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestParameters.js b/node_modules/@octokit/types/dist-src/RequestParameters.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestParameters.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestRequestOptions.js b/node_modules/@octokit/types/dist-src/RequestRequestOptions.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestRequestOptions.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/ResponseHeaders.js b/node_modules/@octokit/types/dist-src/ResponseHeaders.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/ResponseHeaders.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/Route.js b/node_modules/@octokit/types/dist-src/Route.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/Route.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/Signal.js b/node_modules/@octokit/types/dist-src/Signal.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/Signal.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/StrategyInterface.js b/node_modules/@octokit/types/dist-src/StrategyInterface.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/StrategyInterface.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/Url.js b/node_modules/@octokit/types/dist-src/Url.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/Url.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/VERSION.js b/node_modules/@octokit/types/dist-src/VERSION.js new file mode 100644 index 0000000..45a11b8 --- /dev/null +++ b/node_modules/@octokit/types/dist-src/VERSION.js @@ -0,0 +1 @@ +export const VERSION = "6.41.0"; diff --git a/node_modules/@octokit/types/dist-src/generated/Endpoints.js b/node_modules/@octokit/types/dist-src/generated/Endpoints.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/generated/Endpoints.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/index.js b/node_modules/@octokit/types/dist-src/index.js new file mode 100644 index 0000000..004ae9b --- /dev/null +++ b/node_modules/@octokit/types/dist-src/index.js @@ -0,0 +1,21 @@ +export * from "./AuthInterface"; +export * from "./EndpointDefaults"; +export * from "./EndpointInterface"; +export * from "./EndpointOptions"; +export * from "./Fetch"; +export * from "./OctokitResponse"; +export * from "./RequestError"; +export * from "./RequestHeaders"; +export * from "./RequestInterface"; +export * from "./RequestMethod"; +export * from "./RequestOptions"; +export * from "./RequestParameters"; +export * from "./RequestRequestOptions"; +export * from "./ResponseHeaders"; +export * from "./Route"; +export * from "./Signal"; +export * from "./StrategyInterface"; +export * from "./Url"; +export * from "./VERSION"; +export * from "./GetResponseTypeFromEndpointMethod"; +export * from "./generated/Endpoints"; diff --git a/node_modules/@octokit/types/dist-types/AuthInterface.d.ts b/node_modules/@octokit/types/dist-types/AuthInterface.d.ts new file mode 100644 index 0000000..8b39d61 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/AuthInterface.d.ts @@ -0,0 +1,31 @@ +import { EndpointOptions } from "./EndpointOptions"; +import { OctokitResponse } from "./OctokitResponse"; +import { RequestInterface } from "./RequestInterface"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +/** + * Interface to implement complex authentication strategies for Octokit. + * An object Implementing the AuthInterface can directly be passed as the + * `auth` option in the Octokit constructor. + * + * For the official implementations of the most common authentication + * strategies, see https://github.com/octokit/auth.js + */ +export interface AuthInterface { + (...args: AuthOptions): Promise; + hook: { + /** + * Sends a request using the passed `request` instance + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: RequestInterface, options: EndpointOptions): Promise>; + /** + * Sends a request using the passed `request` instance + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: RequestInterface, route: Route, parameters?: RequestParameters): Promise>; + }; +} diff --git a/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts b/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts new file mode 100644 index 0000000..a2c2307 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts @@ -0,0 +1,21 @@ +import { RequestHeaders } from "./RequestHeaders"; +import { RequestMethod } from "./RequestMethod"; +import { RequestParameters } from "./RequestParameters"; +import { Url } from "./Url"; +/** + * The `.endpoint()` method is guaranteed to set all keys defined by RequestParameters + * as well as the method property. + */ +export declare type EndpointDefaults = RequestParameters & { + baseUrl: Url; + method: RequestMethod; + url?: Url; + headers: RequestHeaders & { + accept: string; + "user-agent": string; + }; + mediaType: { + format: string; + previews: string[]; + }; +}; diff --git a/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts b/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts new file mode 100644 index 0000000..d7b4009 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts @@ -0,0 +1,65 @@ +import { EndpointDefaults } from "./EndpointDefaults"; +import { RequestOptions } from "./RequestOptions"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +import { Endpoints } from "./generated/Endpoints"; +export interface EndpointInterface { + /** + * Transforms a GitHub REST API endpoint into generic request options + * + * @param {object} endpoint Must set `url` unless it's set defaults. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: O & { + method?: string; + } & ("url" extends keyof D ? { + url?: string; + } : { + url: string; + })): RequestOptions & Pick; + /** + * Transforms a GitHub REST API endpoint into generic request options + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: keyof Endpoints | R, parameters?: P): (R extends keyof Endpoints ? Endpoints[R]["request"] : RequestOptions) & Pick; + /** + * Object with current default route and parameters + */ + DEFAULTS: D & EndpointDefaults; + /** + * Returns a new `endpoint` interface with new defaults + */ + defaults: (newDefaults: O) => EndpointInterface; + merge: { + /** + * Merges current endpoint defaults with passed route and parameters, + * without transforming them into request options. + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * + */ + (route: keyof Endpoints | R, parameters?: P): D & (R extends keyof Endpoints ? Endpoints[R]["request"] & Endpoints[R]["parameters"] : EndpointDefaults) & P; + /** + * Merges current endpoint defaults with passed route and parameters, + * without transforming them into request options. + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ +

(options: P): EndpointDefaults & D & P; + /** + * Returns current default options. + * + * @deprecated use endpoint.DEFAULTS instead + */ + (): D & EndpointDefaults; + }; + /** + * Stateless method to turn endpoint options into request options. + * Calling `endpoint(options)` is the same as calling `endpoint.parse(endpoint.merge(options))`. + * + * @param {object} options `method`, `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + parse: (options: O) => RequestOptions & Pick; +} diff --git a/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts b/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts new file mode 100644 index 0000000..b1b91f1 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts @@ -0,0 +1,7 @@ +import { RequestMethod } from "./RequestMethod"; +import { Url } from "./Url"; +import { RequestParameters } from "./RequestParameters"; +export declare type EndpointOptions = RequestParameters & { + method: RequestMethod; + url: Url; +}; diff --git a/node_modules/@octokit/types/dist-types/Fetch.d.ts b/node_modules/@octokit/types/dist-types/Fetch.d.ts new file mode 100644 index 0000000..cbbd5e8 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/Fetch.d.ts @@ -0,0 +1,4 @@ +/** + * Browser's fetch method (or compatible such as fetch-mock) + */ +export declare type Fetch = any; diff --git a/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts b/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts new file mode 100644 index 0000000..70e1a8d --- /dev/null +++ b/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts @@ -0,0 +1,5 @@ +declare type Unwrap = T extends Promise ? U : T; +declare type AnyFunction = (...args: any[]) => any; +export declare type GetResponseTypeFromEndpointMethod = Unwrap>; +export declare type GetResponseDataTypeFromEndpointMethod = Unwrap>["data"]; +export {}; diff --git a/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts b/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts new file mode 100644 index 0000000..28fdfb8 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts @@ -0,0 +1,17 @@ +import { ResponseHeaders } from "./ResponseHeaders"; +import { Url } from "./Url"; +export declare type OctokitResponse = { + headers: ResponseHeaders; + /** + * http response code + */ + status: S; + /** + * URL of response after all redirects + */ + url: Url; + /** + * Response data as documented in the REST API reference documentation at https://docs.github.com/rest/reference + */ + data: T; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestError.d.ts b/node_modules/@octokit/types/dist-types/RequestError.d.ts new file mode 100644 index 0000000..89174e6 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestError.d.ts @@ -0,0 +1,11 @@ +export declare type RequestError = { + name: string; + status: number; + documentation_url: string; + errors?: Array<{ + resource: string; + code: string; + field: string; + message?: string; + }>; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts b/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts new file mode 100644 index 0000000..ac5aae0 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts @@ -0,0 +1,15 @@ +export declare type RequestHeaders = { + /** + * Avoid setting `headers.accept`, use `mediaType.{format|previews}` option instead. + */ + accept?: string; + /** + * Use `authorization` to send authenticated request, remember `token ` / `bearer ` prefixes. Example: `token 1234567890abcdef1234567890abcdef12345678` + */ + authorization?: string; + /** + * `user-agent` is set do a default and can be overwritten as needed. + */ + "user-agent"?: string; + [header: string]: string | number | undefined; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestInterface.d.ts b/node_modules/@octokit/types/dist-types/RequestInterface.d.ts new file mode 100644 index 0000000..851811f --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestInterface.d.ts @@ -0,0 +1,34 @@ +import { EndpointInterface } from "./EndpointInterface"; +import { OctokitResponse } from "./OctokitResponse"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +import { Endpoints } from "./generated/Endpoints"; +export interface RequestInterface { + /** + * Sends a request based on endpoint options + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: O & { + method?: string; + } & ("url" extends keyof D ? { + url?: string; + } : { + url: string; + })): Promise>; + /** + * Sends a request based on endpoint options + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: keyof Endpoints | R, options?: R extends keyof Endpoints ? Endpoints[R]["parameters"] & RequestParameters : RequestParameters): R extends keyof Endpoints ? Promise : Promise>; + /** + * Returns a new `request` with updated route and parameters + */ + defaults: (newDefaults: O) => RequestInterface; + /** + * Octokit endpoint API, see {@link https://github.com/octokit/endpoint.js|@octokit/endpoint} + */ + endpoint: EndpointInterface; +} diff --git a/node_modules/@octokit/types/dist-types/RequestMethod.d.ts b/node_modules/@octokit/types/dist-types/RequestMethod.d.ts new file mode 100644 index 0000000..e999c8d --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestMethod.d.ts @@ -0,0 +1,4 @@ +/** + * HTTP Verb supported by GitHub's REST API + */ +export declare type RequestMethod = "DELETE" | "GET" | "HEAD" | "PATCH" | "POST" | "PUT"; diff --git a/node_modules/@octokit/types/dist-types/RequestOptions.d.ts b/node_modules/@octokit/types/dist-types/RequestOptions.d.ts new file mode 100644 index 0000000..97e2181 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestOptions.d.ts @@ -0,0 +1,14 @@ +import { RequestHeaders } from "./RequestHeaders"; +import { RequestMethod } from "./RequestMethod"; +import { RequestRequestOptions } from "./RequestRequestOptions"; +import { Url } from "./Url"; +/** + * Generic request options as they are returned by the `endpoint()` method + */ +export declare type RequestOptions = { + method: RequestMethod; + url: Url; + headers: RequestHeaders; + body?: any; + request?: RequestRequestOptions; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestParameters.d.ts b/node_modules/@octokit/types/dist-types/RequestParameters.d.ts new file mode 100644 index 0000000..b056a0e --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestParameters.d.ts @@ -0,0 +1,45 @@ +import { RequestRequestOptions } from "./RequestRequestOptions"; +import { RequestHeaders } from "./RequestHeaders"; +import { Url } from "./Url"; +/** + * Parameters that can be passed into `request(route, parameters)` or `endpoint(route, parameters)` methods + */ +export declare type RequestParameters = { + /** + * Base URL to be used when a relative URL is passed, such as `/orgs/{org}`. + * If `baseUrl` is `https://enterprise.acme-inc.com/api/v3`, then the request + * will be sent to `https://enterprise.acme-inc.com/api/v3/orgs/{org}`. + */ + baseUrl?: Url; + /** + * HTTP headers. Use lowercase keys. + */ + headers?: RequestHeaders; + /** + * Media type options, see {@link https://developer.github.com/v3/media/|GitHub Developer Guide} + */ + mediaType?: { + /** + * `json` by default. Can be `raw`, `text`, `html`, `full`, `diff`, `patch`, `sha`, `base64`. Depending on endpoint + */ + format?: string; + /** + * Custom media type names of {@link https://developer.github.com/v3/media/|API Previews} without the `-preview` suffix. + * Example for single preview: `['squirrel-girl']`. + * Example for multiple previews: `['squirrel-girl', 'mister-fantastic']`. + */ + previews?: string[]; + }; + /** + * Pass custom meta information for the request. The `request` object will be returned as is. + */ + request?: RequestRequestOptions; + /** + * Any additional parameter will be passed as follows + * 1. URL parameter if `':parameter'` or `{parameter}` is part of `url` + * 2. Query parameter if `method` is `'GET'` or `'HEAD'` + * 3. Request body if `parameter` is `'data'` + * 4. JSON in the request body in the form of `body[parameter]` unless `parameter` key is `'data'` + */ + [parameter: string]: unknown; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts b/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts new file mode 100644 index 0000000..8f5c43a --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts @@ -0,0 +1,26 @@ +import { Fetch } from "./Fetch"; +import { Signal } from "./Signal"; +/** + * Octokit-specific request options which are ignored for the actual request, but can be used by Octokit or plugins to manipulate how the request is sent or how a response is handled + */ +export declare type RequestRequestOptions = { + /** + * Node only. Useful for custom proxy, certificate, or dns lookup. + * + * @see https://nodejs.org/api/http.html#http_class_http_agent + */ + agent?: unknown; + /** + * Custom replacement for built-in fetch method. Useful for testing or request hooks. + */ + fetch?: Fetch; + /** + * Use an `AbortController` instance to cancel a request. In node you can only cancel streamed requests. + */ + signal?: Signal; + /** + * Node only. Request/response timeout in ms, it resets on redirect. 0 to disable (OS limit applies). `options.request.signal` is recommended instead. + */ + timeout?: number; + [option: string]: any; +}; diff --git a/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts b/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts new file mode 100644 index 0000000..c8fbe43 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts @@ -0,0 +1,20 @@ +export declare type ResponseHeaders = { + "cache-control"?: string; + "content-length"?: number; + "content-type"?: string; + date?: string; + etag?: string; + "last-modified"?: string; + link?: string; + location?: string; + server?: string; + status?: string; + vary?: string; + "x-github-mediatype"?: string; + "x-github-request-id"?: string; + "x-oauth-scopes"?: string; + "x-ratelimit-limit"?: string; + "x-ratelimit-remaining"?: string; + "x-ratelimit-reset"?: string; + [header: string]: string | number | undefined; +}; diff --git a/node_modules/@octokit/types/dist-types/Route.d.ts b/node_modules/@octokit/types/dist-types/Route.d.ts new file mode 100644 index 0000000..dcaac75 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/Route.d.ts @@ -0,0 +1,4 @@ +/** + * String consisting of an optional HTTP method and relative path or absolute URL. Examples: `'/orgs/{org}'`, `'PUT /orgs/{org}'`, `GET https://example.com/foo/bar` + */ +export declare type Route = string; diff --git a/node_modules/@octokit/types/dist-types/Signal.d.ts b/node_modules/@octokit/types/dist-types/Signal.d.ts new file mode 100644 index 0000000..4ebcf24 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/Signal.d.ts @@ -0,0 +1,6 @@ +/** + * Abort signal + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export declare type Signal = any; diff --git a/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts b/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts new file mode 100644 index 0000000..405cbd2 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts @@ -0,0 +1,4 @@ +import { AuthInterface } from "./AuthInterface"; +export interface StrategyInterface { + (...args: StrategyOptions): AuthInterface; +} diff --git a/node_modules/@octokit/types/dist-types/Url.d.ts b/node_modules/@octokit/types/dist-types/Url.d.ts new file mode 100644 index 0000000..3e69916 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/Url.d.ts @@ -0,0 +1,4 @@ +/** + * Relative or absolute URL. Examples: `'/orgs/{org}'`, `https://example.com/foo/bar` + */ +export declare type Url = string; diff --git a/node_modules/@octokit/types/dist-types/VERSION.d.ts b/node_modules/@octokit/types/dist-types/VERSION.d.ts new file mode 100644 index 0000000..dc24575 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/VERSION.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "6.41.0"; diff --git a/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts b/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts new file mode 100644 index 0000000..3fe7cf8 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts @@ -0,0 +1,3571 @@ +import { paths } from "@octokit/openapi-types"; +import { OctokitResponse } from "../OctokitResponse"; +import { RequestHeaders } from "../RequestHeaders"; +import { RequestRequestOptions } from "../RequestRequestOptions"; +declare type UnionToIntersection = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never; +declare type ExtractParameters = "parameters" extends keyof T ? UnionToIntersection<{ + [K in keyof T["parameters"]]: T["parameters"][K]; +}[keyof T["parameters"]]> : {}; +declare type ExtractRequestBody = "requestBody" extends keyof T ? "content" extends keyof T["requestBody"] ? "application/json" extends keyof T["requestBody"]["content"] ? T["requestBody"]["content"]["application/json"] : { + data: { + [K in keyof T["requestBody"]["content"]]: T["requestBody"]["content"][K]; + }[keyof T["requestBody"]["content"]]; +} : "application/json" extends keyof T["requestBody"] ? T["requestBody"]["application/json"] : { + data: { + [K in keyof T["requestBody"]]: T["requestBody"][K]; + }[keyof T["requestBody"]]; +} : {}; +declare type ToOctokitParameters = ExtractParameters & ExtractRequestBody; +declare type RequiredPreview = T extends string ? { + mediaType: { + previews: [T, ...string[]]; + }; +} : {}; +declare type Operation = { + parameters: ToOctokitParameters & RequiredPreview; + request: { + method: Method extends keyof MethodsMap ? MethodsMap[Method] : never; + url: Url; + headers: RequestHeaders; + request: RequestRequestOptions; + }; + response: ExtractOctokitResponse; +}; +declare type MethodsMap = { + delete: "DELETE"; + get: "GET"; + patch: "PATCH"; + post: "POST"; + put: "PUT"; +}; +declare type SuccessStatuses = 200 | 201 | 202 | 204; +declare type RedirectStatuses = 301 | 302; +declare type EmptyResponseStatuses = 201 | 204; +declare type KnownJsonResponseTypes = "application/json" | "application/scim+json" | "text/html"; +declare type SuccessResponseDataType = { + [K in SuccessStatuses & keyof Responses]: GetContentKeyIfPresent extends never ? never : OctokitResponse, K>; +}[SuccessStatuses & keyof Responses]; +declare type RedirectResponseDataType = { + [K in RedirectStatuses & keyof Responses]: OctokitResponse; +}[RedirectStatuses & keyof Responses]; +declare type EmptyResponseDataType = { + [K in EmptyResponseStatuses & keyof Responses]: OctokitResponse; +}[EmptyResponseStatuses & keyof Responses]; +declare type GetContentKeyIfPresent = "content" extends keyof T ? DataType : DataType; +declare type DataType = { + [K in KnownJsonResponseTypes & keyof T]: T[K]; +}[KnownJsonResponseTypes & keyof T]; +declare type ExtractOctokitResponse = "responses" extends keyof R ? SuccessResponseDataType extends never ? RedirectResponseDataType extends never ? EmptyResponseDataType : RedirectResponseDataType : SuccessResponseDataType : unknown; +export interface Endpoints { + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-installation-for-the-authenticated-app + */ + "DELETE /app/installations/{installation_id}": Operation<"/app/installations/{installation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#unsuspend-an-app-installation + */ + "DELETE /app/installations/{installation_id}/suspended": Operation<"/app/installations/{installation_id}/suspended", "delete">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#delete-a-grant + */ + "DELETE /applications/grants/{grant_id}": Operation<"/applications/grants/{grant_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-app-authorization + */ + "DELETE /applications/{client_id}/grant": Operation<"/applications/{client_id}/grant", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-app-token + */ + "DELETE /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "delete">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#delete-an-authorization + */ + "DELETE /authorizations/{authorization_id}": Operation<"/authorizations/{authorization_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-selected-organization-for-github-actions-in-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}": Operation<"/enterprises/{enterprise}/actions/permissions/organizations/{org_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-group-from-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-organization-access-to-a-self-hosted-runner-group-in-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-self-hosted-runner-from-a-group-for-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-self-hosted-runner-from-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#delete-a-gist + */ + "DELETE /gists/{gist_id}": Operation<"/gists/{gist_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#delete-a-gist-comment + */ + "DELETE /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#unstar-a-gist + */ + "DELETE /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#revoke-an-installation-access-token + */ + "DELETE /installation/token": Operation<"/installation/token", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#delete-a-thread-subscription + */ + "DELETE /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-selected-repository-for-github-actions-in-an-organization + */ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}": Operation<"/orgs/{org}/actions/permissions/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-group-from-an-organization + */ + "DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-repository-access-to-a-self-hosted-runner-group-in-an-organization + */ + "DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-self-hosted-runner-from-a-group-for-an-organization + */ + "DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-from-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}": Operation<"/orgs/{org}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-organization-secret + */ + "DELETE /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#unblock-a-user-from-an-organization + */ + "DELETE /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-a-saml-sso-authorization-for-an-organization + */ + "DELETE /orgs/{org}/credential-authorizations/{credential_id}": Operation<"/orgs/{org}/credential-authorizations/{credential_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#delete-an-organization-secret + */ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#delete-an-organization-webhook + */ + "DELETE /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-for-an-organization + */ + "DELETE /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#cancel-an-organization-invitation + */ + "DELETE /orgs/{org}/invitations/{invitation_id}": Operation<"/orgs/{org}/invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-an-organization-member + */ + "DELETE /orgs/{org}/members/{username}": Operation<"/orgs/{org}/members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces + */ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}": Operation<"/orgs/{org}/members/{username}/codespaces/{codespace_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-organization-membership-for-a-user + */ + "DELETE /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#delete-an-organization-migration-archive + */ + "DELETE /orgs/{org}/migrations/{migration_id}/archive": Operation<"/orgs/{org}/migrations/{migration_id}/archive", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#unlock-an-organization-repository + */ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock": Operation<"/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-outside-collaborator-from-an-organization + */ + "DELETE /orgs/{org}/outside_collaborators/{username}": Operation<"/orgs/{org}/outside_collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-an-organization + */ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-an-organization + */ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-public-organization-membership-for-the-authenticated-user + */ + "DELETE /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-comment + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-team-discussion-comment-reaction + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-team-discussion-reaction + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#unlink-external-idp-group-team-connection + */ + "DELETE /orgs/{org}/teams/{team_slug}/external-groups": Operation<"/orgs/{org}/teams/{team_slug}/external-groups", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user + */ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-repository-from-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project-card + */ + "DELETE /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project-column + */ + "DELETE /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project + */ + "DELETE /projects/{project_id}": Operation<"/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#remove-project-collaborator + */ + "DELETE /projects/{project_id}/collaborators/{username}": Operation<"/projects/{project_id}/collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-repository + */ + "DELETE /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-artifact + */ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/cache#delete-a-github-actions-cache-for-a-repository-using-a-cache-id + */ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}": Operation<"/repos/{owner}/{repo}/actions/caches/{cache_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/cache#delete-github-actions-caches-for-a-repository-using-a-cache-key + */ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}": Operation<"/repos/{owner}/{repo}/actions/caches", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-workflow-run + */ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-workflow-run-logs + */ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/logs", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/v3/repos#delete-autolink + */ + "DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}": Operation<"/repos/{owner}/{repo}/autolinks/{autolink_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-automated-security-fixes + */ + "DELETE /repos/{owner}/{repo}/automated-security-fixes": Operation<"/repos/{owner}/{repo}/automated-security-fixes", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-branch-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-admin-branch-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-pull-request-review-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-commit-signature-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-status-check-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-status-check-contexts + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-app-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-team-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-user-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "delete">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#delete-a-code-scanning-analysis-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}": Operation<"/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-a-repository-collaborator + */ + "DELETE /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-commit-comment + */ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-a-commit-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-file + */ + "DELETE /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-deployment + */ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-an-environment + */ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/git#delete-a-reference + */ + "DELETE /repos/{owner}/{repo}/git/refs/{ref}": Operation<"/repos/{owner}/{repo}/git/refs/{ref}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-repository-webhook + */ + "DELETE /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#cancel-an-import + */ + "DELETE /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-repository-invitation + */ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}": Operation<"/repos/{owner}/{repo}/invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-an-issue-comment + */ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-an-issue-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-assignees-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-all-labels-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-a-label-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#unlock-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-an-issue-reaction + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-deploy-key + */ + "DELETE /repos/{owner}/{repo}/keys/{key_id}": Operation<"/repos/{owner}/{repo}/keys/{key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-a-label + */ + "DELETE /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-git-lfs-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/lfs": Operation<"/repos/{owner}/{repo}/lfs", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-a-milestone + */ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-github-pages-site + */ + "DELETE /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#delete-a-review-comment-for-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-a-pull-request-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#remove-requested-reviewers-from-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#delete-a-pending-review-for-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-release-asset + */ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-release + */ + "DELETE /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions/#delete-a-release-reaction + */ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#delete-a-repository-subscription + */ + "DELETE /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-tag-protection-state-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}": Operation<"/repos/{owner}/{repo}/tags/protection/{tag_protection_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-vulnerability-alerts + */ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-environment-secret + */ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#delete-a-scim-group-from-an-enterprise + */ + "DELETE /scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}": Operation<"/scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#delete-a-scim-user-from-an-enterprise + */ + "DELETE /scim/v2/enterprises/{enterprise}/Users/{scim_user_id}": Operation<"/scim/v2/enterprises/{enterprise}/Users/{scim_user_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/scim#delete-a-scim-user-from-an-organization + */ + "DELETE /scim/v2/organizations/{org}/Users/{scim_user_id}": Operation<"/scim/v2/organizations/{org}/Users/{scim_user_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#delete-a-team-legacy + */ + "DELETE /teams/{team_id}": Operation<"/teams/{team_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-legacy + */ + "DELETE /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-comment-legacy + */ + "DELETE /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-member-legacy + */ + "DELETE /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user-legacy + */ + "DELETE /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-project-from-a-team-legacy + */ + "DELETE /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-repository-from-a-team-legacy + */ + "DELETE /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#unblock-a-user + */ + "DELETE /user/blocks/{username}": Operation<"/user/blocks/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-secret-for-the-authenticated-user + */ + "DELETE /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret + */ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-codespace-for-the-authenticated-user + */ + "DELETE /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-an-email-address-for-the-authenticated-user + */ + "DELETE /user/emails": Operation<"/user/emails", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#unfollow-a-user + */ + "DELETE /user/following/{username}": Operation<"/user/following/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-gpg-key-for-the-authenticated-user + */ + "DELETE /user/gpg_keys/{gpg_key_id}": Operation<"/user/gpg_keys/{gpg_key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#remove-a-repository-from-an-app-installation + */ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}": Operation<"/user/installations/{installation_id}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-from-your-public-repositories + */ + "DELETE /user/interaction-limits": Operation<"/user/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-public-ssh-key-for-the-authenticated-user + */ + "DELETE /user/keys/{key_id}": Operation<"/user/keys/{key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#delete-a-user-migration-archive + */ + "DELETE /user/migrations/{migration_id}/archive": Operation<"/user/migrations/{migration_id}/archive", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#unlock-a-user-repository + */ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock": Operation<"/user/migrations/{migration_id}/repos/{repo_name}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-the-authenticated-user + */ + "DELETE /user/packages/{package_type}/{package_name}": Operation<"/user/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-the-authenticated-user + */ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#decline-a-repository-invitation + */ + "DELETE /user/repository_invitations/{invitation_id}": Operation<"/user/repository_invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#unstar-a-repository-for-the-authenticated-user + */ + "DELETE /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-a-user + */ + "DELETE /users/{username}/packages/{package_type}/{package_name}": Operation<"/users/{username}/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-a-user + */ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/overview/resources-in-the-rest-api#root-endpoint + */ + "GET /": Operation<"/", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-the-authenticated-app + */ + "GET /app": Operation<"/app", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-webhook-configuration-for-an-app + */ + "GET /app/hook/config": Operation<"/app/hook/config", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-deliveries-for-an-app-webhook + */ + "GET /app/hook/deliveries": Operation<"/app/hook/deliveries", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-delivery-for-an-app-webhook + */ + "GET /app/hook/deliveries/{delivery_id}": Operation<"/app/hook/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app + */ + "GET /app/installations": Operation<"/app/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-an-installation-for-the-authenticated-app + */ + "GET /app/installations/{installation_id}": Operation<"/app/installations/{installation_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#list-your-grants + */ + "GET /applications/grants": Operation<"/applications/grants", "get">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#get-a-single-grant + */ + "GET /applications/grants/{grant_id}": Operation<"/applications/grants/{grant_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps/#get-an-app + */ + "GET /apps/{app_slug}": Operation<"/apps/{app_slug}", "get">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#list-your-authorizations + */ + "GET /authorizations": Operation<"/authorizations", "get">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#get-a-single-authorization + */ + "GET /authorizations/{authorization_id}": Operation<"/authorizations/{authorization_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/codes-of-conduct#get-all-codes-of-conduct + */ + "GET /codes_of_conduct": Operation<"/codes_of_conduct", "get">; + /** + * @see https://docs.github.com/rest/reference/codes-of-conduct#get-a-code-of-conduct + */ + "GET /codes_of_conduct/{key}": Operation<"/codes_of_conduct/{key}", "get">; + /** + * @see https://docs.github.com/rest/reference/emojis#get-emojis + */ + "GET /emojis": Operation<"/emojis", "get">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#get-github-enterprise-server-statistics + */ + "GET /enterprise-installation/{enterprise_or_org}/server-statistics": Operation<"/enterprise-installation/{enterprise_or_org}/server-statistics", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/cache/usage": Operation<"/enterprises/{enterprise}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/permissions": Operation<"/enterprises/{enterprise}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-organizations-enabled-for-github-actions-in-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/permissions/organizations": Operation<"/enterprises/{enterprise}/actions/permissions/organizations", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/permissions/selected-actions": Operation<"/enterprises/{enterprise}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/permissions/workflow": Operation<"/enterprises/{enterprise}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runner-groups-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups": Operation<"/enterprises/{enterprise}/actions/runner-groups", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-group-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-organization-access-to-a-self-hosted-runner-group-in-a-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-in-a-group-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runners": Operation<"/enterprises/{enterprise}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runners/downloads": Operation<"/enterprises/{enterprise}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runners/{runner_id}": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#get-the-audit-log-for-an-enterprise + */ + "GET /enterprises/{enterprise}/audit-log": Operation<"/enterprises/{enterprise}/audit-log", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/code-scanning/alerts": Operation<"/enterprises/{enterprise}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/secret-scanning/alerts": Operation<"/enterprises/{enterprise}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-actions-billing-for-an-enterprise + */ + "GET /enterprises/{enterprise}/settings/billing/actions": Operation<"/enterprises/{enterprise}/settings/billing/actions", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#export-advanced-security-active-committers-data-for-enterprise + */ + "GET /enterprises/{enterprise}/settings/billing/advanced-security": Operation<"/enterprises/{enterprise}/settings/billing/advanced-security", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-packages-billing-for-an-enterprise + */ + "GET /enterprises/{enterprise}/settings/billing/packages": Operation<"/enterprises/{enterprise}/settings/billing/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-shared-storage-billing-for-an-enterprise + */ + "GET /enterprises/{enterprise}/settings/billing/shared-storage": Operation<"/enterprises/{enterprise}/settings/billing/shared-storage", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events + */ + "GET /events": Operation<"/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-feeds + */ + "GET /feeds": Operation<"/feeds", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-the-authenticated-user + */ + "GET /gists": Operation<"/gists", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-public-gists + */ + "GET /gists/public": Operation<"/gists/public", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-starred-gists + */ + "GET /gists/starred": Operation<"/gists/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist + */ + "GET /gists/{gist_id}": Operation<"/gists/{gist_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-comments + */ + "GET /gists/{gist_id}/comments": Operation<"/gists/{gist_id}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist-comment + */ + "GET /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-commits + */ + "GET /gists/{gist_id}/commits": Operation<"/gists/{gist_id}/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-forks + */ + "GET /gists/{gist_id}/forks": Operation<"/gists/{gist_id}/forks", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#check-if-a-gist-is-starred + */ + "GET /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist-revision + */ + "GET /gists/{gist_id}/{sha}": Operation<"/gists/{gist_id}/{sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/gitignore#get-all-gitignore-templates + */ + "GET /gitignore/templates": Operation<"/gitignore/templates", "get">; + /** + * @see https://docs.github.com/rest/reference/gitignore#get-a-gitignore-template + */ + "GET /gitignore/templates/{name}": Operation<"/gitignore/templates/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-app-installation + */ + "GET /installation/repositories": Operation<"/installation/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issues-assigned-to-the-authenticated-user + */ + "GET /issues": Operation<"/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses#get-all-commonly-used-licenses + */ + "GET /licenses": Operation<"/licenses", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses#get-a-license + */ + "GET /licenses/{license}": Operation<"/licenses/{license}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-subscription-plan-for-an-account + */ + "GET /marketplace_listing/accounts/{account_id}": Operation<"/marketplace_listing/accounts/{account_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans + */ + "GET /marketplace_listing/plans": Operation<"/marketplace_listing/plans", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan + */ + "GET /marketplace_listing/plans/{plan_id}/accounts": Operation<"/marketplace_listing/plans/{plan_id}/accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-subscription-plan-for-an-account-stubbed + */ + "GET /marketplace_listing/stubbed/accounts/{account_id}": Operation<"/marketplace_listing/stubbed/accounts/{account_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans-stubbed + */ + "GET /marketplace_listing/stubbed/plans": Operation<"/marketplace_listing/stubbed/plans", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan-stubbed + */ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts": Operation<"/marketplace_listing/stubbed/plans/{plan_id}/accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-github-meta-information + */ + "GET /meta": Operation<"/meta", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-network-of-repositories + */ + "GET /networks/{owner}/{repo}/events": Operation<"/networks/{owner}/{repo}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user + */ + "GET /notifications": Operation<"/notifications", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-thread + */ + "GET /notifications/threads/{thread_id}": Operation<"/notifications/threads/{thread_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-thread-subscription-for-the-authenticated-user + */ + "GET /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-octocat + */ + "GET /octocat": Operation<"/octocat", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations + */ + "GET /organizations": Operation<"/organizations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-custom-repository-roles-in-an-organization + */ + "GET /organizations/{organization_id}/custom_roles": Operation<"/organizations/{organization_id}/custom_roles", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + * @deprecated "org_id" is now "org" + */ + "GET /orgs/{org_id}/codespaces": Operation<"/orgs/{org}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization + */ + "GET /orgs/{org}": Operation<"/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage": Operation<"/orgs/{org}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repositories-with-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage-by-repository": Operation<"/orgs/{org}/actions/cache/usage-by-repository", "get">; + /** + * @see https://docs.github.com/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-an-organization + */ + "GET /orgs/{org}/actions/oidc/customization/sub": Operation<"/orgs/{org}/actions/oidc/customization/sub", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-an-organization + */ + "GET /orgs/{org}/actions/permissions": Operation<"/orgs/{org}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "GET /orgs/{org}/actions/permissions/repositories": Operation<"/orgs/{org}/actions/permissions/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-an-organization + */ + "GET /orgs/{org}/actions/permissions/selected-actions": Operation<"/orgs/{org}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions + */ + "GET /orgs/{org}/actions/permissions/workflow": Operation<"/orgs/{org}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runner-groups-for-an-organization + */ + "GET /orgs/{org}/actions/runner-groups": Operation<"/orgs/{org}/actions/runner-groups", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-group-for-an-organization + */ + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-access-to-a-self-hosted-runner-group-in-an-organization + */ + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-in-a-group-for-an-organization + */ + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-an-organization + */ + "GET /orgs/{org}/actions/runners": Operation<"/orgs/{org}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-an-organization + */ + "GET /orgs/{org}/actions/runners/downloads": Operation<"/orgs/{org}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-an-organization + */ + "GET /orgs/{org}/actions/runners/{runner_id}": Operation<"/orgs/{org}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-an-organization + */ + "GET /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-organization-secrets + */ + "GET /orgs/{org}/actions/secrets": Operation<"/orgs/{org}/actions/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-organization-public-key + */ + "GET /orgs/{org}/actions/secrets/public-key": Operation<"/orgs/{org}/actions/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-audit-log + */ + "GET /orgs/{org}/audit-log": Operation<"/orgs/{org}/audit-log", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-users-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks": Operation<"/orgs/{org}/blocks", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-if-a-user-is-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-by-organization + */ + "GET /orgs/{org}/code-scanning/alerts": Operation<"/orgs/{org}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + */ + "GET /orgs/{org}/codespaces": Operation<"/orgs/{org}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-saml-sso-authorizations-for-an-organization + */ + "GET /orgs/{org}/credential-authorizations": Operation<"/orgs/{org}/credential-authorizations", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-organization-secrets + */ + "GET /orgs/{org}/dependabot/secrets": Operation<"/orgs/{org}/dependabot/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-an-organization-public-key + */ + "GET /orgs/{org}/dependabot/secrets/public-key": Operation<"/orgs/{org}/dependabot/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-organization-events + */ + "GET /orgs/{org}/events": Operation<"/orgs/{org}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#external-idp-group-info-for-an-organization + */ + "GET /orgs/{org}/external-group/{group_id}": Operation<"/orgs/{org}/external-group/{group_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-external-idp-groups-for-an-organization + */ + "GET /orgs/{org}/external-groups": Operation<"/orgs/{org}/external-groups", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-failed-organization-invitations + */ + "GET /orgs/{org}/failed_invitations": Operation<"/orgs/{org}/failed_invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-webhooks + */ + "GET /orgs/{org}/hooks": Operation<"/orgs/{org}/hooks", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-a-webhook-configuration-for-an-organization + */ + "GET /orgs/{org}/hooks/{hook_id}/config": Operation<"/orgs/{org}/hooks/{hook_id}/config", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-deliveries-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-a-webhook-delivery-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-an-organization-installation-for-the-authenticated-app + */ + "GET /orgs/{org}/installation": Operation<"/orgs/{org}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-app-installations-for-an-organization + */ + "GET /orgs/{org}/installations": Operation<"/orgs/{org}/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-an-organization + */ + "GET /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-pending-organization-invitations + */ + "GET /orgs/{org}/invitations": Operation<"/orgs/{org}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-invitation-teams + */ + "GET /orgs/{org}/invitations/{invitation_id}/teams": Operation<"/orgs/{org}/invitations/{invitation_id}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-organization-issues-assigned-to-the-authenticated-user + */ + "GET /orgs/{org}/issues": Operation<"/orgs/{org}/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-members + */ + "GET /orgs/{org}/members": Operation<"/orgs/{org}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-organization-membership-for-a-user + */ + "GET /orgs/{org}/members/{username}": Operation<"/orgs/{org}/members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user + */ + "GET /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#list-organization-migrations + */ + "GET /orgs/{org}/migrations": Operation<"/orgs/{org}/migrations", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-an-organization-migration-status + */ + "GET /orgs/{org}/migrations/{migration_id}": Operation<"/orgs/{org}/migrations/{migration_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#download-an-organization-migration-archive + */ + "GET /orgs/{org}/migrations/{migration_id}/archive": Operation<"/orgs/{org}/migrations/{migration_id}/archive", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#list-repositories-in-an-organization-migration + */ + "GET /orgs/{org}/migrations/{migration_id}/repositories": Operation<"/orgs/{org}/migrations/{migration_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-outside-collaborators-for-an-organization + */ + "GET /orgs/{org}/outside_collaborators": Operation<"/orgs/{org}/outside_collaborators", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-an-organization + */ + "GET /orgs/{org}/packages": Operation<"/orgs/{org}/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-all-package-versions-for-a-package-owned-by-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-organization-projects + */ + "GET /orgs/{org}/projects": Operation<"/orgs/{org}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-public-organization-members + */ + "GET /orgs/{org}/public_members": Operation<"/orgs/{org}/public_members", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-public-organization-membership-for-a-user + */ + "GET /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-organization-repositories + */ + "GET /orgs/{org}/repos": Operation<"/orgs/{org}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-organization + */ + "GET /orgs/{org}/secret-scanning/alerts": Operation<"/orgs/{org}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-actions-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/actions": Operation<"/orgs/{org}/settings/billing/actions", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-advanced-security-active-committers-for-an-organization + */ + "GET /orgs/{org}/settings/billing/advanced-security": Operation<"/orgs/{org}/settings/billing/advanced-security", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-packages-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/packages": Operation<"/orgs/{org}/settings/billing/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-shared-storage-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/shared-storage": Operation<"/orgs/{org}/settings/billing/shared-storage", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-idp-groups-for-an-organization + */ + "GET /orgs/{org}/team-sync/groups": Operation<"/orgs/{org}/team-sync/groups", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams + */ + "GET /orgs/{org}/teams": Operation<"/orgs/{org}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-team-by-name + */ + "GET /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions + */ + "GET /orgs/{org}/teams/{team_slug}/discussions": Operation<"/orgs/{org}/teams/{team_slug}/discussions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-external-idp-group-team-connection + */ + "GET /orgs/{org}/teams/{team_slug}/external-groups": Operation<"/orgs/{org}/teams/{team_slug}/external-groups", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations + */ + "GET /orgs/{org}/teams/{team_slug}/invitations": Operation<"/orgs/{org}/teams/{team_slug}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members + */ + "GET /orgs/{org}/teams/{team_slug}/members": Operation<"/orgs/{org}/teams/{team_slug}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user + */ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-projects + */ + "GET /orgs/{org}/teams/{team_slug}/projects": Operation<"/orgs/{org}/teams/{team_slug}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project + */ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-repositories + */ + "GET /orgs/{org}/teams/{team_slug}/repos": Operation<"/orgs/{org}/teams/{team_slug}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-repository + */ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-idp-groups-for-a-team + */ + "GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings": Operation<"/orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-child-teams + */ + "GET /orgs/{org}/teams/{team_slug}/teams": Operation<"/orgs/{org}/teams/{team_slug}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project-card + */ + "GET /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project-column + */ + "GET /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-cards + */ + "GET /projects/columns/{column_id}/cards": Operation<"/projects/columns/{column_id}/cards", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project + */ + "GET /projects/{project_id}": Operation<"/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-collaborators + */ + "GET /projects/{project_id}/collaborators": Operation<"/projects/{project_id}/collaborators", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-project-permission-for-a-user + */ + "GET /projects/{project_id}/collaborators/{username}/permission": Operation<"/projects/{project_id}/collaborators/{username}/permission", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-columns + */ + "GET /projects/{project_id}/columns": Operation<"/projects/{project_id}/columns", "get">; + /** + * @see https://docs.github.com/rest/reference/rate-limit#get-rate-limit-status-for-the-authenticated-user + */ + "GET /rate_limit": Operation<"/rate_limit", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository + */ + "GET /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-artifacts-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/artifacts": Operation<"/repos/{owner}/{repo}/actions/artifacts", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-artifact + */ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-an-artifact + */ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/cache/usage": Operation<"/repos/{owner}/{repo}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/caches": Operation<"/repos/{owner}/{repo}/actions/caches", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-job-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-job-logs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}/logs", "get">; + /** + * @see https://docs.github.com/rest/actions/oidc#get-the-opt-out-flag-of-an-oidc-subject-claim-customization-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub": Operation<"/repos/{owner}/{repo}/actions/oidc/customization/sub", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions": Operation<"/repos/{owner}/{repo}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-access-level-to-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/access": Operation<"/repos/{owner}/{repo}/actions/permissions/access", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions": Operation<"/repos/{owner}/{repo}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/workflow": Operation<"/repos/{owner}/{repo}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners": Operation<"/repos/{owner}/{repo}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/downloads": Operation<"/repos/{owner}/{repo}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runs": Operation<"/repos/{owner}/{repo}/actions/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-the-review-history-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/approvals", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-run-artifacts + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-workflow-run-attempt-logs + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-workflow-run-logs + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/logs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-pending-deployments-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-run-usage + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/actions/secrets": Operation<"/repos/{owner}/{repo}/actions/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/actions/secrets/public-key": Operation<"/repos/{owner}/{repo}/actions/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-workflows + */ + "GET /repos/{owner}/{repo}/actions/workflows": Operation<"/repos/{owner}/{repo}/actions/workflows", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-usage + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-assignees + */ + "GET /repos/{owner}/{repo}/assignees": Operation<"/repos/{owner}/{repo}/assignees", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#check-if-a-user-can-be-assigned + */ + "GET /repos/{owner}/{repo}/assignees/{assignee}": Operation<"/repos/{owner}/{repo}/assignees/{assignee}", "get">; + /** + * @see https://docs.github.com/v3/repos#list-autolinks + */ + "GET /repos/{owner}/{repo}/autolinks": Operation<"/repos/{owner}/{repo}/autolinks", "get">; + /** + * @see https://docs.github.com/v3/repos#get-autolink + */ + "GET /repos/{owner}/{repo}/autolinks/{autolink_id}": Operation<"/repos/{owner}/{repo}/autolinks/{autolink_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-branches + */ + "GET /repos/{owner}/{repo}/branches": Operation<"/repos/{owner}/{repo}/branches", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}": Operation<"/repos/{owner}/{repo}/branches/{branch}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-branch-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-admin-branch-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-pull-request-review-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-commit-signature-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-status-checks-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-status-check-contexts + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-access-restrictions + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-apps-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-teams-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-users-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#get-a-check-run + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-run-annotations + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#get-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-in-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts": Operation<"/repos/{owner}/{repo}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-alert + * @deprecated "alert_id" is now "alert_number" + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-instances-of-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-analyses-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses": Operation<"/repos/{owner}/{repo}/code-scanning/analyses", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}": Operation<"/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-recent-code-scanning-analyses-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}": Operation<"/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-codeowners-errors + */ + "GET /repos/{owner}/{repo}/codeowners/errors": Operation<"/repos/{owner}/{repo}/codeowners/errors", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces": Operation<"/repos/{owner}/{repo}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-devcontainers-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces/devcontainers": Operation<"/repos/{owner}/{repo}/codespaces/devcontainers", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-available-machine-types-for-a-repository + */ + "GET /repos/{owner}/{repo}/codespaces/machines": Operation<"/repos/{owner}/{repo}/codespaces/machines", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#preview-attributes-for-a-new-codespace + */ + "GET /repos/{owner}/{repo}/codespaces/new": Operation<"/repos/{owner}/{repo}/codespaces/new", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/codespaces/secrets": Operation<"/repos/{owner}/{repo}/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key": Operation<"/repos/{owner}/{repo}/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-collaborators + */ + "GET /repos/{owner}/{repo}/collaborators": Operation<"/repos/{owner}/{repo}/collaborators", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#check-if-a-user-is-a-repository-collaborator + */ + "GET /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-repository-permissions-for-a-user + */ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission": Operation<"/repos/{owner}/{repo}/collaborators/{username}/permission", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-commit-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/comments": Operation<"/repos/{owner}/{repo}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-commits + */ + "GET /repos/{owner}/{repo}/commits": Operation<"/repos/{owner}/{repo}/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-branches-for-head-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-commit-comments + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-pull-requests-associated-with-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/pulls", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{ref}": Operation<"/repos/{owner}/{repo}/commits/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs": Operation<"/repos/{owner}/{repo}/commits/{ref}/check-runs", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-suites-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites": Operation<"/repos/{owner}/{repo}/commits/{ref}/check-suites", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-combined-status-for-a-specific-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/status": Operation<"/repos/{owner}/{repo}/commits/{ref}/status", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-commit-statuses-for-a-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses": Operation<"/repos/{owner}/{repo}/commits/{ref}/statuses", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-community-profile-metrics + */ + "GET /repos/{owner}/{repo}/community/profile": Operation<"/repos/{owner}/{repo}/community/profile", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#compare-two-commits + */ + "GET /repos/{owner}/{repo}/compare/{basehead}": Operation<"/repos/{owner}/{repo}/compare/{basehead}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#compare-two-commits + */ + "GET /repos/{owner}/{repo}/compare/{base}...{head}": Operation<"/repos/{owner}/{repo}/compare/{base}...{head}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-repository-content + */ + "GET /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-contributors + */ + "GET /repos/{owner}/{repo}/contributors": Operation<"/repos/{owner}/{repo}/contributors", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/dependabot/secrets": Operation<"/repos/{owner}/{repo}/dependabot/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key": Operation<"/repos/{owner}/{repo}/dependabot/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependency-graph#get-a-diff-of-the-dependencies-between-commits + */ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}": Operation<"/repos/{owner}/{repo}/dependency-graph/compare/{basehead}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-deployments + */ + "GET /repos/{owner}/{repo}/deployments": Operation<"/repos/{owner}/{repo}/deployments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-deployment + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-deployment-statuses + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-deployment-status + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-environments + */ + "GET /repos/{owner}/{repo}/environments": Operation<"/repos/{owner}/{repo}/environments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-an-environment + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-events + */ + "GET /repos/{owner}/{repo}/events": Operation<"/repos/{owner}/{repo}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-forks + */ + "GET /repos/{owner}/{repo}/forks": Operation<"/repos/{owner}/{repo}/forks", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-blob + */ + "GET /repos/{owner}/{repo}/git/blobs/{file_sha}": Operation<"/repos/{owner}/{repo}/git/blobs/{file_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-commit + */ + "GET /repos/{owner}/{repo}/git/commits/{commit_sha}": Operation<"/repos/{owner}/{repo}/git/commits/{commit_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#list-matching-references + */ + "GET /repos/{owner}/{repo}/git/matching-refs/{ref}": Operation<"/repos/{owner}/{repo}/git/matching-refs/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-reference + */ + "GET /repos/{owner}/{repo}/git/ref/{ref}": Operation<"/repos/{owner}/{repo}/git/ref/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-tag + */ + "GET /repos/{owner}/{repo}/git/tags/{tag_sha}": Operation<"/repos/{owner}/{repo}/git/tags/{tag_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-tree + */ + "GET /repos/{owner}/{repo}/git/trees/{tree_sha}": Operation<"/repos/{owner}/{repo}/git/trees/{tree_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-webhooks + */ + "GET /repos/{owner}/{repo}/hooks": Operation<"/repos/{owner}/{repo}/hooks", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-webhook-configuration-for-a-repository + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/config", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-deliveries-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-delivery-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-an-import-status + */ + "GET /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-commit-authors + */ + "GET /repos/{owner}/{repo}/import/authors": Operation<"/repos/{owner}/{repo}/import/authors", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-large-files + */ + "GET /repos/{owner}/{repo}/import/large_files": Operation<"/repos/{owner}/{repo}/import/large_files", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-repository-installation-for-the-authenticated-app + */ + "GET /repos/{owner}/{repo}/installation": Operation<"/repos/{owner}/{repo}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-a-repository + */ + "GET /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-invitations + */ + "GET /repos/{owner}/{repo}/invitations": Operation<"/repos/{owner}/{repo}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-repository-issues + */ + "GET /repos/{owner}/{repo}/issues": Operation<"/repos/{owner}/{repo}/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/comments": Operation<"/repos/{owner}/{repo}/issues/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/events": Operation<"/repos/{owner}/{repo}/issues/events", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue-event + */ + "GET /repos/{owner}/{repo}/issues/events/{event_id}": Operation<"/repos/{owner}/{repo}/issues/events/{event_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/events": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-timeline-events-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/timeline", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-deploy-keys + */ + "GET /repos/{owner}/{repo}/keys": Operation<"/repos/{owner}/{repo}/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-deploy-key + */ + "GET /repos/{owner}/{repo}/keys/{key_id}": Operation<"/repos/{owner}/{repo}/keys/{key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-a-repository + */ + "GET /repos/{owner}/{repo}/labels": Operation<"/repos/{owner}/{repo}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-a-label + */ + "GET /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-languages + */ + "GET /repos/{owner}/{repo}/languages": Operation<"/repos/{owner}/{repo}/languages", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses/#get-the-license-for-a-repository + */ + "GET /repos/{owner}/{repo}/license": Operation<"/repos/{owner}/{repo}/license", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-milestones + */ + "GET /repos/{owner}/{repo}/milestones": Operation<"/repos/{owner}/{repo}/milestones", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-issues-in-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/notifications": Operation<"/repos/{owner}/{repo}/notifications", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-github-pages-site + */ + "GET /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-github-pages-builds + */ + "GET /repos/{owner}/{repo}/pages/builds": Operation<"/repos/{owner}/{repo}/pages/builds", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-latest-pages-build + */ + "GET /repos/{owner}/{repo}/pages/builds/latest": Operation<"/repos/{owner}/{repo}/pages/builds/latest", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-github-pages-build + */ + "GET /repos/{owner}/{repo}/pages/builds/{build_id}": Operation<"/repos/{owner}/{repo}/pages/builds/{build_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-dns-health-check-for-github-pages + */ + "GET /repos/{owner}/{repo}/pages/health": Operation<"/repos/{owner}/{repo}/pages/health", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-repository-projects + */ + "GET /repos/{owner}/{repo}/projects": Operation<"/repos/{owner}/{repo}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests + */ + "GET /repos/{owner}/{repo}/pulls": Operation<"/repos/{owner}/{repo}/pulls", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-in-a-repository + */ + "GET /repos/{owner}/{repo}/pulls/comments": Operation<"/repos/{owner}/{repo}/pulls/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-review-comment-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-pull-request-review-comment + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-commits-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests-files + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/files", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#check-if-a-pull-request-has-been-merged + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/merge", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-requested-reviewers-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-reviews-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-review-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-comments-for-a-pull-request-review + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository-readme + */ + "GET /repos/{owner}/{repo}/readme": Operation<"/repos/{owner}/{repo}/readme", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository-directory-readme + */ + "GET /repos/{owner}/{repo}/readme/{dir}": Operation<"/repos/{owner}/{repo}/readme/{dir}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-releases + */ + "GET /repos/{owner}/{repo}/releases": Operation<"/repos/{owner}/{repo}/releases", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release-asset + */ + "GET /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-latest-release + */ + "GET /repos/{owner}/{repo}/releases/latest": Operation<"/repos/{owner}/{repo}/releases/latest", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release-by-tag-name + */ + "GET /repos/{owner}/{repo}/releases/tags/{tag}": Operation<"/repos/{owner}/{repo}/releases/tags/{tag}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-release-assets + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets": Operation<"/repos/{owner}/{repo}/releases/{release_id}/assets", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#get-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-locations-for-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-stargazers + */ + "GET /repos/{owner}/{repo}/stargazers": Operation<"/repos/{owner}/{repo}/stargazers", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-weekly-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/code_frequency": Operation<"/repos/{owner}/{repo}/stats/code_frequency", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-last-year-of-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/commit_activity": Operation<"/repos/{owner}/{repo}/stats/commit_activity", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-contributor-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/contributors": Operation<"/repos/{owner}/{repo}/stats/contributors", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-weekly-commit-count + */ + "GET /repos/{owner}/{repo}/stats/participation": Operation<"/repos/{owner}/{repo}/stats/participation", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-hourly-commit-count-for-each-day + */ + "GET /repos/{owner}/{repo}/stats/punch_card": Operation<"/repos/{owner}/{repo}/stats/punch_card", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-watchers + */ + "GET /repos/{owner}/{repo}/subscribers": Operation<"/repos/{owner}/{repo}/subscribers", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-repository-subscription + */ + "GET /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-tags + */ + "GET /repos/{owner}/{repo}/tags": Operation<"/repos/{owner}/{repo}/tags", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-tag-protection-state-of-a-repository + */ + "GET /repos/{owner}/{repo}/tags/protection": Operation<"/repos/{owner}/{repo}/tags/protection", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#download-a-repository-archive + */ + "GET /repos/{owner}/{repo}/tarball/{ref}": Operation<"/repos/{owner}/{repo}/tarball/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-teams + */ + "GET /repos/{owner}/{repo}/teams": Operation<"/repos/{owner}/{repo}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-repository-topics + */ + "GET /repos/{owner}/{repo}/topics": Operation<"/repos/{owner}/{repo}/topics", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-repository-clones + */ + "GET /repos/{owner}/{repo}/traffic/clones": Operation<"/repos/{owner}/{repo}/traffic/clones", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-top-referral-paths + */ + "GET /repos/{owner}/{repo}/traffic/popular/paths": Operation<"/repos/{owner}/{repo}/traffic/popular/paths", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-top-referral-sources + */ + "GET /repos/{owner}/{repo}/traffic/popular/referrers": Operation<"/repos/{owner}/{repo}/traffic/popular/referrers", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-page-views + */ + "GET /repos/{owner}/{repo}/traffic/views": Operation<"/repos/{owner}/{repo}/traffic/views", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#check-if-vulnerability-alerts-are-enabled-for-a-repository + */ + "GET /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#download-a-repository-archive + */ + "GET /repos/{owner}/{repo}/zipball/{ref}": Operation<"/repos/{owner}/{repo}/zipball/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-public-repositories + */ + "GET /repositories": Operation<"/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-environment-secrets + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-environment-public-key + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-environment-secret + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#list-provisioned-scim-groups-for-an-enterprise + */ + "GET /scim/v2/enterprises/{enterprise}/Groups": Operation<"/scim/v2/enterprises/{enterprise}/Groups", "get">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#get-scim-provisioning-information-for-an-enterprise-group + */ + "GET /scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}": Operation<"/scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#list-scim-provisioned-identities-for-an-enterprise + */ + "GET /scim/v2/enterprises/{enterprise}/Users": Operation<"/scim/v2/enterprises/{enterprise}/Users", "get">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#get-scim-provisioning-information-for-an-enterprise-user + */ + "GET /scim/v2/enterprises/{enterprise}/Users/{scim_user_id}": Operation<"/scim/v2/enterprises/{enterprise}/Users/{scim_user_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/scim#list-scim-provisioned-identities + */ + "GET /scim/v2/organizations/{org}/Users": Operation<"/scim/v2/organizations/{org}/Users", "get">; + /** + * @see https://docs.github.com/rest/reference/scim#get-scim-provisioning-information-for-a-user + */ + "GET /scim/v2/organizations/{org}/Users/{scim_user_id}": Operation<"/scim/v2/organizations/{org}/Users/{scim_user_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-code + */ + "GET /search/code": Operation<"/search/code", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-commits + */ + "GET /search/commits": Operation<"/search/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-issues-and-pull-requests + */ + "GET /search/issues": Operation<"/search/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-labels + */ + "GET /search/labels": Operation<"/search/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-repositories + */ + "GET /search/repositories": Operation<"/search/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-topics + */ + "GET /search/topics": Operation<"/search/topics", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-users + */ + "GET /search/users": Operation<"/search/users", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#get-a-team-legacy + */ + "GET /teams/{team_id}": Operation<"/teams/{team_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions-legacy + */ + "GET /teams/{team_id}/discussions": Operation<"/teams/{team_id}/discussions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations-legacy + */ + "GET /teams/{team_id}/invitations": Operation<"/teams/{team_id}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members-legacy + */ + "GET /teams/{team_id}/members": Operation<"/teams/{team_id}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-member-legacy + */ + "GET /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user-legacy + */ + "GET /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-projects-legacy + */ + "GET /teams/{team_id}/projects": Operation<"/teams/{team_id}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-project-legacy + */ + "GET /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-repositories-legacy + */ + "GET /teams/{team_id}/repos": Operation<"/teams/{team_id}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-repository-legacy + */ + "GET /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-idp-groups-for-a-team-legacy + */ + "GET /teams/{team_id}/team-sync/group-mappings": Operation<"/teams/{team_id}/team-sync/group-mappings", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-child-teams-legacy + */ + "GET /teams/{team_id}/teams": Operation<"/teams/{team_id}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-the-authenticated-user + */ + "GET /user": Operation<"/user", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-users-blocked-by-the-authenticated-user + */ + "GET /user/blocks": Operation<"/user/blocks", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-user-is-blocked-by-the-authenticated-user + */ + "GET /user/blocks/{username}": Operation<"/user/blocks/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-for-the-authenticated-user + */ + "GET /user/codespaces": Operation<"/user/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-secrets-for-the-authenticated-user + */ + "GET /user/codespaces/secrets": Operation<"/user/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-public-key-for-the-authenticated-user + */ + "GET /user/codespaces/secrets/public-key": Operation<"/user/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-secret-for-the-authenticated-user + */ + "GET /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret + */ + "GET /user/codespaces/secrets/{secret_name}/repositories": Operation<"/user/codespaces/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-codespace-for-the-authenticated-user + */ + "GET /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "get">; + /** + * @see + */ + "GET /user/codespaces/{codespace_name}/exports/{export_id}": Operation<"/user/codespaces/{codespace_name}/exports/{export_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-machine-types-for-a-codespace + */ + "GET /user/codespaces/{codespace_name}/machines": Operation<"/user/codespaces/{codespace_name}/machines", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-email-addresses-for-the-authenticated-user + */ + "GET /user/emails": Operation<"/user/emails", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-the-authenticated-user + */ + "GET /user/followers": Operation<"/user/followers", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-the-authenticated-user-follows + */ + "GET /user/following": Operation<"/user/following", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-person-is-followed-by-the-authenticated-user + */ + "GET /user/following/{username}": Operation<"/user/following/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-the-authenticated-user + */ + "GET /user/gpg_keys": Operation<"/user/gpg_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-gpg-key-for-the-authenticated-user + */ + "GET /user/gpg_keys/{gpg_key_id}": Operation<"/user/gpg_keys/{gpg_key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-app-installations-accessible-to-the-user-access-token + */ + "GET /user/installations": Operation<"/user/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-user-access-token + */ + "GET /user/installations/{installation_id}/repositories": Operation<"/user/installations/{installation_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-your-public-repositories + */ + "GET /user/interaction-limits": Operation<"/user/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-user-account-issues-assigned-to-the-authenticated-user + */ + "GET /user/issues": Operation<"/user/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-keys-for-the-authenticated-user + */ + "GET /user/keys": Operation<"/user/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-public-ssh-key-for-the-authenticated-user + */ + "GET /user/keys/{key_id}": Operation<"/user/keys/{key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user + */ + "GET /user/marketplace_purchases": Operation<"/user/marketplace_purchases", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user-stubbed + */ + "GET /user/marketplace_purchases/stubbed": Operation<"/user/marketplace_purchases/stubbed", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-memberships-for-the-authenticated-user + */ + "GET /user/memberships/orgs": Operation<"/user/memberships/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization-membership-for-the-authenticated-user + */ + "GET /user/memberships/orgs/{org}": Operation<"/user/memberships/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#list-user-migrations + */ + "GET /user/migrations": Operation<"/user/migrations", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-a-user-migration-status + */ + "GET /user/migrations/{migration_id}": Operation<"/user/migrations/{migration_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive + */ + "GET /user/migrations/{migration_id}/archive": Operation<"/user/migrations/{migration_id}/archive", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#list-repositories-for-a-user-migration + */ + "GET /user/migrations/{migration_id}/repositories": Operation<"/user/migrations/{migration_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user + */ + "GET /user/orgs": Operation<"/user/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-the-authenticated-user + */ + "GET /user/packages": Operation<"/user/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}": Operation<"/user/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions": Operation<"/user/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-email-addresses-for-the-authenticated-user + */ + "GET /user/public_emails": Operation<"/user/public_emails", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-the-authenticated-user + */ + "GET /user/repos": Operation<"/user/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-invitations-for-the-authenticated-user + */ + "GET /user/repository_invitations": Operation<"/user/repository_invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-the-authenticated-user + */ + "GET /user/starred": Operation<"/user/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#check-if-a-repository-is-starred-by-the-authenticated-user + */ + "GET /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-the-authenticated-user + */ + "GET /user/subscriptions": Operation<"/user/subscriptions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams-for-the-authenticated-user + */ + "GET /user/teams": Operation<"/user/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-users + */ + "GET /users": Operation<"/users", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-user + */ + "GET /users/{username}": Operation<"/users/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-for-the-authenticated-user + */ + "GET /users/{username}/events": Operation<"/users/{username}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-organization-events-for-the-authenticated-user + */ + "GET /users/{username}/events/orgs/{org}": Operation<"/users/{username}/events/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-user + */ + "GET /users/{username}/events/public": Operation<"/users/{username}/events/public", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-a-user + */ + "GET /users/{username}/followers": Operation<"/users/{username}/followers", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-a-user-follows + */ + "GET /users/{username}/following": Operation<"/users/{username}/following", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-user-follows-another-user + */ + "GET /users/{username}/following/{target_user}": Operation<"/users/{username}/following/{target_user}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-a-user + */ + "GET /users/{username}/gists": Operation<"/users/{username}/gists", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-a-user + */ + "GET /users/{username}/gpg_keys": Operation<"/users/{username}/gpg_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-contextual-information-for-a-user + */ + "GET /users/{username}/hovercard": Operation<"/users/{username}/hovercard", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-user-installation-for-the-authenticated-app + */ + "GET /users/{username}/installation": Operation<"/users/{username}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-keys-for-a-user + */ + "GET /users/{username}/keys": Operation<"/users/{username}/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-a-user + */ + "GET /users/{username}/orgs": Operation<"/users/{username}/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-user + */ + "GET /users/{username}/packages": Operation<"/users/{username}/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}": Operation<"/users/{username}/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-all-package-versions-for-a-package-owned-by-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}/versions": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-user-projects + */ + "GET /users/{username}/projects": Operation<"/users/{username}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-received-by-the-authenticated-user + */ + "GET /users/{username}/received_events": Operation<"/users/{username}/received_events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-received-by-a-user + */ + "GET /users/{username}/received_events/public": Operation<"/users/{username}/received_events/public", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-a-user + */ + "GET /users/{username}/repos": Operation<"/users/{username}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-actions-billing-for-a-user + */ + "GET /users/{username}/settings/billing/actions": Operation<"/users/{username}/settings/billing/actions", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-packages-billing-for-a-user + */ + "GET /users/{username}/settings/billing/packages": Operation<"/users/{username}/settings/billing/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-shared-storage-billing-for-a-user + */ + "GET /users/{username}/settings/billing/shared-storage": Operation<"/users/{username}/settings/billing/shared-storage", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-a-user + */ + "GET /users/{username}/starred": Operation<"/users/{username}/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-a-user + */ + "GET /users/{username}/subscriptions": Operation<"/users/{username}/subscriptions", "get">; + /** + * @see + */ + "GET /zen": Operation<"/zen", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#update-a-webhook-configuration-for-an-app + */ + "PATCH /app/hook/config": Operation<"/app/hook/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/apps#reset-a-token + */ + "PATCH /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "patch">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#update-an-existing-authorization + */ + "PATCH /authorizations/{authorization_id}": Operation<"/authorizations/{authorization_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/actions#update-a-self-hosted-runner-group-for-an-enterprise + */ + "PATCH /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/gists/#update-a-gist + */ + "PATCH /gists/{gist_id}": Operation<"/gists/{gist_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/gists#update-a-gist-comment + */ + "PATCH /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-a-thread-as-read + */ + "PATCH /notifications/threads/{thread_id}": Operation<"/notifications/threads/{thread_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs/#update-an-organization + */ + "PATCH /orgs/{org}": Operation<"/orgs/{org}", "patch">; + /** + * @see https://docs.github.com/rest/reference/actions#update-a-self-hosted-runner-group-for-an-organization + */ + "PATCH /orgs/{org}/actions/runner-groups/{runner_group_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization-webhook + */ + "PATCH /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-a-webhook-configuration-for-an-organization + */ + "PATCH /orgs/{org}/hooks/{hook_id}/config": Operation<"/orgs/{org}/hooks/{hook_id}/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-team + */ + "PATCH /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion + */ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-comment + */ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#link-external-idp-group-team-connection + */ + "PATCH /orgs/{org}/teams/{team_slug}/external-groups": Operation<"/orgs/{org}/teams/{team_slug}/external-groups", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#create-or-update-idp-group-connections + */ + "PATCH /orgs/{org}/teams/{team_slug}/team-sync/group-mappings": Operation<"/orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project-card + */ + "PATCH /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project-column + */ + "PATCH /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project + */ + "PATCH /projects/{project_id}": Operation<"/projects/{project_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos/#update-a-repository + */ + "PATCH /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-pull-request-review-protection + */ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-status-check-protection + */ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "patch">; + /** + * @see https://docs.github.com/rest/reference/checks#update-a-check-run + */ + "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites + */ + "PATCH /repos/{owner}/{repo}/check-suites/preferences": Operation<"/repos/{owner}/{repo}/check-suites/preferences", "patch">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#update-a-code-scanning-alert + */ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-commit-comment + */ + "PATCH /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/git#update-a-reference + */ + "PATCH /repos/{owner}/{repo}/git/refs/{ref}": Operation<"/repos/{owner}/{repo}/git/refs/{ref}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-repository-webhook + */ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-webhook-configuration-for-a-repository + */ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/migrations#update-an-import + */ + "PATCH /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "patch">; + /** + * @see https://docs.github.com/rest/reference/migrations#map-a-commit-author + */ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}": Operation<"/repos/{owner}/{repo}/import/authors/{author_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/migrations#update-git-lfs-preference + */ + "PATCH /repos/{owner}/{repo}/import/lfs": Operation<"/repos/{owner}/{repo}/import/lfs", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-repository-invitation + */ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}": Operation<"/repos/{owner}/{repo}/invitations/{invitation_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-an-issue-comment + */ + "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues/#update-an-issue + */ + "PATCH /repos/{owner}/{repo}/issues/{issue_number}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-a-label + */ + "PATCH /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-a-milestone + */ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-review-comment-for-a-pull-request + */ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/pulls/#update-a-pull-request + */ + "PATCH /repos/{owner}/{repo}/pulls/{pull_number}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-release-asset + */ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-release + */ + "PATCH /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#update-a-secret-scanning-alert + */ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#update-an-attribute-for-a-scim-enterprise-group + */ + "PATCH /scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}": Operation<"/scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#update-an-attribute-for-a-scim-enterprise-user + */ + "PATCH /scim/v2/enterprises/{enterprise}/Users/{scim_user_id}": Operation<"/scim/v2/enterprises/{enterprise}/Users/{scim_user_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/scim#update-an-attribute-for-a-scim-user + */ + "PATCH /scim/v2/organizations/{org}/Users/{scim_user_id}": Operation<"/scim/v2/organizations/{org}/Users/{scim_user_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams/#update-a-team-legacy + */ + "PATCH /teams/{team_id}": Operation<"/teams/{team_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-legacy + */ + "PATCH /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-comment-legacy + */ + "PATCH /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#create-or-update-idp-group-connections-legacy + */ + "PATCH /teams/{team_id}/team-sync/group-mappings": Operation<"/teams/{team_id}/team-sync/group-mappings", "patch">; + /** + * @see https://docs.github.com/rest/reference/users/#update-the-authenticated-user + */ + "PATCH /user": Operation<"/user", "patch">; + /** + * @see https://docs.github.com/rest/reference/codespaces#update-a-codespace-for-the-authenticated-user + */ + "PATCH /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user + */ + "PATCH /user/email/visibility": Operation<"/user/email/visibility", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization-membership-for-the-authenticated-user + */ + "PATCH /user/memberships/orgs/{org}": Operation<"/user/memberships/orgs/{org}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#accept-a-repository-invitation + */ + "PATCH /user/repository_invitations/{invitation_id}": Operation<"/user/repository_invitations/{invitation_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/apps#create-a-github-app-from-a-manifest + */ + "POST /app-manifests/{code}/conversions": Operation<"/app-manifests/{code}/conversions", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#redeliver-a-delivery-for-an-app-webhook + */ + "POST /app/hook/deliveries/{delivery_id}/attempts": Operation<"/app/hook/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/reference/apps/#create-an-installation-access-token-for-an-app + */ + "POST /app/installations/{installation_id}/access_tokens": Operation<"/app/installations/{installation_id}/access_tokens", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#check-a-token + */ + "POST /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#create-a-scoped-access-token + */ + "POST /applications/{client_id}/token/scoped": Operation<"/applications/{client_id}/token/scoped", "post">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#create-a-new-authorization + */ + "POST /authorizations": Operation<"/authorizations", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-self-hosted-runner-group-for-an-enterprise + */ + "POST /enterprises/{enterprise}/actions/runner-groups": Operation<"/enterprises/{enterprise}/actions/runner-groups", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-an-enterprise + */ + "POST /enterprises/{enterprise}/actions/runners/registration-token": Operation<"/enterprises/{enterprise}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-an-enterprise + */ + "POST /enterprises/{enterprise}/actions/runners/remove-token": Operation<"/enterprises/{enterprise}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-an-enterprise + */ + "POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#create-a-gist + */ + "POST /gists": Operation<"/gists", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#create-a-gist-comment + */ + "POST /gists/{gist_id}/comments": Operation<"/gists/{gist_id}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#fork-a-gist + */ + "POST /gists/{gist_id}/forks": Operation<"/gists/{gist_id}/forks", "post">; + /** + * @see https://docs.github.com/rest/reference/markdown#render-a-markdown-document + */ + "POST /markdown": Operation<"/markdown", "post">; + /** + * @see https://docs.github.com/rest/reference/markdown#render-a-markdown-document-in-raw-mode + */ + "POST /markdown/raw": Operation<"/markdown/raw", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-self-hosted-runner-group-for-an-organization + */ + "POST /orgs/{org}/actions/runner-groups": Operation<"/orgs/{org}/actions/runner-groups", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-an-organization + */ + "POST /orgs/{org}/actions/runners/registration-token": Operation<"/orgs/{org}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-an-organization + */ + "POST /orgs/{org}/actions/runners/remove-token": Operation<"/orgs/{org}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-an-organization + */ + "POST /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#create-an-organization-webhook + */ + "POST /orgs/{org}/hooks": Operation<"/orgs/{org}/hooks", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#redeliver-a-delivery-for-an-organization-webhook + */ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#ping-an-organization-webhook + */ + "POST /orgs/{org}/hooks/{hook_id}/pings": Operation<"/orgs/{org}/hooks/{hook_id}/pings", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#create-an-organization-invitation + */ + "POST /orgs/{org}/invitations": Operation<"/orgs/{org}/invitations", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces + */ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop": Operation<"/orgs/{org}/members/{username}/codespaces/{codespace_name}/stop", "post">; + /** + * @see https://docs.github.com/rest/reference/migrations#start-an-organization-migration + */ + "POST /orgs/{org}/migrations": Operation<"/orgs/{org}/migrations", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-an-organization + */ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-an-organization + */ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-an-organization-project + */ + "POST /orgs/{org}/projects": Operation<"/orgs/{org}/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-an-organization-repository + */ + "POST /orgs/{org}/repos": Operation<"/orgs/{org}/repos", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-team + */ + "POST /orgs/{org}/teams": Operation<"/orgs/{org}/teams", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion + */ + "POST /orgs/{org}/teams/{team_slug}/discussions": Operation<"/orgs/{org}/teams/{team_slug}/discussions", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-comment + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#move-a-project-card + */ + "POST /projects/columns/cards/{card_id}/moves": Operation<"/projects/columns/cards/{card_id}/moves", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-project-card + */ + "POST /projects/columns/{column_id}/cards": Operation<"/projects/columns/{column_id}/cards", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#move-a-project-column + */ + "POST /projects/columns/{column_id}/moves": Operation<"/projects/columns/{column_id}/moves", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-project-column + */ + "POST /projects/{project_id}/columns": Operation<"/projects/{project_id}/columns", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-job-for-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}/rerun", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/registration-token": Operation<"/repos/{owner}/{repo}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/remove-token": Operation<"/repos/{owner}/{repo}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#approve-a-workflow-run-for-a-fork-pull-request + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/approve", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#cancel-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/cancel", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#review-pending-deployments-for-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-a-workflow + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/rerun", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-workflow-failed-jobs + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-workflow-dispatch-event + */ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", "post">; + /** + * @see https://docs.github.com/v3/repos#create-an-autolink + */ + "POST /repos/{owner}/{repo}/autolinks": Operation<"/repos/{owner}/{repo}/autolinks", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#set-admin-branch-protection + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-commit-signature-protection + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#add-status-check-contexts + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#add-app-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#add-team-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#add-user-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#rename-a-branch + */ + "POST /repos/{owner}/{repo}/branches/{branch}/rename": Operation<"/repos/{owner}/{repo}/branches/{branch}/rename", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#create-a-check-run + */ + "POST /repos/{owner}/{repo}/check-runs": Operation<"/repos/{owner}/{repo}/check-runs", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#rerequest-a-check-run + */ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#create-a-check-suite + */ + "POST /repos/{owner}/{repo}/check-suites": Operation<"/repos/{owner}/{repo}/check-suites", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#rerequest-a-check-suite + */ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", "post">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#upload-a-sarif-file + */ + "POST /repos/{owner}/{repo}/code-scanning/sarifs": Operation<"/repos/{owner}/{repo}/code-scanning/sarifs", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-in-a-repository + */ + "POST /repos/{owner}/{repo}/codespaces": Operation<"/repos/{owner}/{repo}/codespaces", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-commit-comment + */ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-commit-comment + */ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/dependency-graph#create-a-snapshot-of-dependencies-for-a-repository + */ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots": Operation<"/repos/{owner}/{repo}/dependency-graph/snapshots", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-deployment + */ + "POST /repos/{owner}/{repo}/deployments": Operation<"/repos/{owner}/{repo}/deployments", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-deployment-status + */ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-dispatch-event + */ + "POST /repos/{owner}/{repo}/dispatches": Operation<"/repos/{owner}/{repo}/dispatches", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-fork + */ + "POST /repos/{owner}/{repo}/forks": Operation<"/repos/{owner}/{repo}/forks", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-blob + */ + "POST /repos/{owner}/{repo}/git/blobs": Operation<"/repos/{owner}/{repo}/git/blobs", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-commit + */ + "POST /repos/{owner}/{repo}/git/commits": Operation<"/repos/{owner}/{repo}/git/commits", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-reference + */ + "POST /repos/{owner}/{repo}/git/refs": Operation<"/repos/{owner}/{repo}/git/refs", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-tag-object + */ + "POST /repos/{owner}/{repo}/git/tags": Operation<"/repos/{owner}/{repo}/git/tags", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-tree + */ + "POST /repos/{owner}/{repo}/git/trees": Operation<"/repos/{owner}/{repo}/git/trees", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks": Operation<"/repos/{owner}/{repo}/hooks", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#redeliver-a-delivery-for-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#ping-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/pings", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#test-the-push-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/tests", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-an-issue + */ + "POST /repos/{owner}/{repo}/issues": Operation<"/repos/{owner}/{repo}/issues", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-an-issue-comment + */ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#add-assignees-to-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-an-issue-comment + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#add-labels-to-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-deploy-key + */ + "POST /repos/{owner}/{repo}/keys": Operation<"/repos/{owner}/{repo}/keys", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-a-label + */ + "POST /repos/{owner}/{repo}/labels": Operation<"/repos/{owner}/{repo}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#sync-a-fork-branch-with-the-upstream-repository + */ + "POST /repos/{owner}/{repo}/merge-upstream": Operation<"/repos/{owner}/{repo}/merge-upstream", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#merge-a-branch + */ + "POST /repos/{owner}/{repo}/merges": Operation<"/repos/{owner}/{repo}/merges", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-a-milestone + */ + "POST /repos/{owner}/{repo}/milestones": Operation<"/repos/{owner}/{repo}/milestones", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-github-pages-site + */ + "POST /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#request-a-github-pages-build + */ + "POST /repos/{owner}/{repo}/pages/builds": Operation<"/repos/{owner}/{repo}/pages/builds", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-repository-project + */ + "POST /repos/{owner}/{repo}/projects": Operation<"/repos/{owner}/{repo}/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls": Operation<"/repos/{owner}/{repo}/pulls", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-pull-request-review-comment + */ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-from-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/codespaces", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-review-comment-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-reply-for-a-review-comment + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#request-reviewers-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-review-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#submit-a-review-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-release + */ + "POST /repos/{owner}/{repo}/releases": Operation<"/repos/{owner}/{repo}/releases", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#generate-release-notes + */ + "POST /repos/{owner}/{repo}/releases/generate-notes": Operation<"/repos/{owner}/{repo}/releases/generate-notes", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-release + */ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-commit-status + */ + "POST /repos/{owner}/{repo}/statuses/{sha}": Operation<"/repos/{owner}/{repo}/statuses/{sha}", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-tag-protection-state-for-a-repository + */ + "POST /repos/{owner}/{repo}/tags/protection": Operation<"/repos/{owner}/{repo}/tags/protection", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#transfer-a-repository + */ + "POST /repos/{owner}/{repo}/transfer": Operation<"/repos/{owner}/{repo}/transfer", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-using-a-template + */ + "POST /repos/{template_owner}/{template_repo}/generate": Operation<"/repos/{template_owner}/{template_repo}/generate", "post">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#provision-a-scim-enterprise-group-and-invite-users + */ + "POST /scim/v2/enterprises/{enterprise}/Groups": Operation<"/scim/v2/enterprises/{enterprise}/Groups", "post">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#provision-and-invite-a-scim-enterprise-user + */ + "POST /scim/v2/enterprises/{enterprise}/Users": Operation<"/scim/v2/enterprises/{enterprise}/Users", "post">; + /** + * @see https://docs.github.com/rest/reference/scim#provision-and-invite-a-scim-user + */ + "POST /scim/v2/organizations/{org}/Users": Operation<"/scim/v2/organizations/{org}/Users", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-legacy + */ + "POST /teams/{team_id}/discussions": Operation<"/teams/{team_id}/discussions", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-comment-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/comments": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-team-discussion-comment-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-team-discussion-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces": Operation<"/user/codespaces", "post">; + /** + * @see + */ + "POST /user/codespaces/{codespace_name}/exports": Operation<"/user/codespaces/{codespace_name}/exports", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#start-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/start": Operation<"/user/codespaces/{codespace_name}/start", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#stop-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/stop": Operation<"/user/codespaces/{codespace_name}/stop", "post">; + /** + * @see https://docs.github.com/rest/reference/users#add-an-email-address-for-the-authenticated-user + */ + "POST /user/emails": Operation<"/user/emails", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-a-gpg-key-for-the-authenticated-user + */ + "POST /user/gpg_keys": Operation<"/user/gpg_keys", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-a-public-ssh-key-for-the-authenticated-user + */ + "POST /user/keys": Operation<"/user/keys", "post">; + /** + * @see https://docs.github.com/rest/reference/migrations#start-a-user-migration + */ + "POST /user/migrations": Operation<"/user/migrations", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-the-authenticated-user + */ + "POST /user/packages/{package_type}/{package_name}/restore{?token}": Operation<"/user/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-the-authenticated-user + */ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-user-project + */ + "POST /user/projects": Operation<"/user/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user + */ + "POST /user/repos": Operation<"/user/repos", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-a-user + */ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}": Operation<"/users/{username}/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-a-user + */ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#upload-a-release-asset + */ + "POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}": Operation<"/repos/{owner}/{repo}/releases/{release_id}/assets", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#suspend-an-app-installation + */ + "PUT /app/installations/{installation_id}/suspended": Operation<"/app/installations/{installation_id}/suspended", "put">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#get-or-create-an-authorization-for-a-specific-app + */ + "PUT /authorizations/clients/{client_id}": Operation<"/authorizations/clients/{client_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/oauth-authorizations#get-or-create-an-authorization-for-a-specific-app-and-fingerprint + */ + "PUT /authorizations/clients/{client_id}/{fingerprint}": Operation<"/authorizations/clients/{client_id}/{fingerprint}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions/oidc#set-actions-oidc-custom-issuer-policy-for-enterprise + */ + "PUT /enterprises/{enterprise}/actions/oidc/customization/issuer": Operation<"/enterprises/{enterprise}/actions/oidc/customization/issuer", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions": Operation<"/enterprises/{enterprise}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-organizations-enabled-for-github-actions-in-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions/organizations": Operation<"/enterprises/{enterprise}/actions/permissions/organizations", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-selected-organization-for-github-actions-in-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}": Operation<"/enterprises/{enterprise}/actions/permissions/organizations/{org_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions/selected-actions": Operation<"/enterprises/{enterprise}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions/workflow": Operation<"/enterprises/{enterprise}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-organization-access-to-a-self-hosted-runner-group-in-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-organization-access-to-a-self-hosted-runner-group-in-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-self-hosted-runners-in-a-group-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-a-self-hosted-runner-to-a-group-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/gists#star-a-gist + */ + "PUT /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-notifications-as-read + */ + "PUT /notifications": Operation<"/notifications", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#set-a-thread-subscription + */ + "PUT /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "put">; + /** + * @see https://docs.github.com/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-an-organization + */ + "PUT /orgs/{org}/actions/oidc/customization/sub": Operation<"/orgs/{org}/actions/oidc/customization/sub", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-an-organization + */ + "PUT /orgs/{org}/actions/permissions": Operation<"/orgs/{org}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "PUT /orgs/{org}/actions/permissions/repositories": Operation<"/orgs/{org}/actions/permissions/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-selected-repository-for-github-actions-in-an-organization + */ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}": Operation<"/orgs/{org}/actions/permissions/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-an-organization + */ + "PUT /orgs/{org}/actions/permissions/selected-actions": Operation<"/orgs/{org}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions + */ + "PUT /orgs/{org}/actions/permissions/workflow": Operation<"/orgs/{org}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-repository-access-to-a-self-hosted-runner-group-in-an-organization + */ + "PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-repository-acess-to-a-self-hosted-runner-group-in-an-organization + */ + "PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-self-hosted-runners-in-a-group-for-an-organization + */ + "PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/runners": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-a-self-hosted-runner-to-a-group-for-an-organization + */ + "PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-an-organization + */ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#block-a-user-from-an-organization + */ + "PUT /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-an-organization + */ + "PUT /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#set-organization-membership-for-a-user + */ + "PUT /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#convert-an-organization-member-to-outside-collaborator + */ + "PUT /orgs/{org}/outside_collaborators/{username}": Operation<"/orgs/{org}/outside_collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#set-public-organization-membership-for-the-authenticated-user + */ + "PUT /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user + */ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions + */ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-repository-permissions + */ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "put">; + /** + * @see https://docs.github.com/rest/reference/projects#add-project-collaborator + */ + "PUT /projects/{project_id}/collaborators/{username}": Operation<"/projects/{project_id}/collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/actions/oidc#set-the-opt-out-flag-of-an-oidc-subject-claim-customization-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub": Operation<"/repos/{owner}/{repo}/actions/oidc/customization/sub", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions": Operation<"/repos/{owner}/{repo}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-workflow-access-to-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/access": Operation<"/repos/{owner}/{repo}/actions/permissions/access", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions": Operation<"/repos/{owner}/{repo}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow": Operation<"/repos/{owner}/{repo}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-workflow + */ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-workflow + */ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-automated-security-fixes + */ + "PUT /repos/{owner}/{repo}/automated-security-fixes": Operation<"/repos/{owner}/{repo}/automated-security-fixes", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#update-branch-protection + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#set-status-check-contexts + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#set-app-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#set-team-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#set-user-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#add-a-repository-collaborator + */ + "PUT /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#create-or-update-file-contents + */ + "PUT /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#create-or-update-an-environment + */ + "PUT /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/migrations#start-an-import + */ + "PUT /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/issues#set-labels-for-an-issue + */ + "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/issues#lock-an-issue + */ + "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/lock", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-git-lfs-for-a-repository + */ + "PUT /repos/{owner}/{repo}/lfs": Operation<"/repos/{owner}/{repo}/lfs", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-repository-notifications-as-read + */ + "PUT /repos/{owner}/{repo}/notifications": Operation<"/repos/{owner}/{repo}/notifications", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#update-information-about-a-github-pages-site + */ + "PUT /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#merge-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/merge", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-review-for-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#dismiss-a-review-for-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-pull-request-branch + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/update-branch", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#set-a-repository-subscription + */ + "PUT /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#replace-all-repository-topics + */ + "PUT /repos/{owner}/{repo}/topics": Operation<"/repos/{owner}/{repo}/topics", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-vulnerability-alerts + */ + "PUT /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-an-environment-secret + */ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#set-scim-information-for-a-provisioned-enterprise-group + */ + "PUT /scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}": Operation<"/scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#set-scim-information-for-a-provisioned-enterprise-user + */ + "PUT /scim/v2/enterprises/{enterprise}/Users/{scim_user_id}": Operation<"/scim/v2/enterprises/{enterprise}/Users/{scim_user_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/scim#set-scim-information-for-a-provisioned-user + */ + "PUT /scim/v2/organizations/{org}/Users/{scim_user_id}": Operation<"/scim/v2/organizations/{org}/Users/{scim_user_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-team-member-legacy + */ + "PUT /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user-legacy + */ + "PUT /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-project-permissions-legacy + */ + "PUT /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-repository-permissions-legacy + */ + "PUT /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "put">; + /** + * @see https://docs.github.com/rest/reference/users#block-a-user + */ + "PUT /user/blocks/{username}": Operation<"/user/blocks/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-a-secret-for-the-authenticated-user + */ + "PUT /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-a-user-secret + */ + "PUT /user/codespaces/secrets/{secret_name}/repositories": Operation<"/user/codespaces/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#add-a-selected-repository-to-a-user-secret + */ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/users#follow-a-user + */ + "PUT /user/following/{username}": Operation<"/user/following/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/apps#add-a-repository-to-an-app-installation + */ + "PUT /user/installations/{installation_id}/repositories/{repository_id}": Operation<"/user/installations/{installation_id}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-your-public-repositories + */ + "PUT /user/interaction-limits": Operation<"/user/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#star-a-repository-for-the-authenticated-user + */ + "PUT /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "put">; +} +export {}; diff --git a/node_modules/@octokit/types/dist-types/index.d.ts b/node_modules/@octokit/types/dist-types/index.d.ts new file mode 100644 index 0000000..004ae9b --- /dev/null +++ b/node_modules/@octokit/types/dist-types/index.d.ts @@ -0,0 +1,21 @@ +export * from "./AuthInterface"; +export * from "./EndpointDefaults"; +export * from "./EndpointInterface"; +export * from "./EndpointOptions"; +export * from "./Fetch"; +export * from "./OctokitResponse"; +export * from "./RequestError"; +export * from "./RequestHeaders"; +export * from "./RequestInterface"; +export * from "./RequestMethod"; +export * from "./RequestOptions"; +export * from "./RequestParameters"; +export * from "./RequestRequestOptions"; +export * from "./ResponseHeaders"; +export * from "./Route"; +export * from "./Signal"; +export * from "./StrategyInterface"; +export * from "./Url"; +export * from "./VERSION"; +export * from "./GetResponseTypeFromEndpointMethod"; +export * from "./generated/Endpoints"; diff --git a/node_modules/@octokit/types/dist-web/index.js b/node_modules/@octokit/types/dist-web/index.js new file mode 100644 index 0000000..b1b47d5 --- /dev/null +++ b/node_modules/@octokit/types/dist-web/index.js @@ -0,0 +1,4 @@ +const VERSION = "6.41.0"; + +export { VERSION }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/types/dist-web/index.js.map b/node_modules/@octokit/types/dist-web/index.js.map new file mode 100644 index 0000000..cd0e254 --- /dev/null +++ b/node_modules/@octokit/types/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/VERSION.js"],"sourcesContent":["export const VERSION = \"0.0.0-development\";\n"],"names":[],"mappings":"AAAY,MAAC,OAAO,GAAG;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/types/package.json b/node_modules/@octokit/types/package.json new file mode 100644 index 0000000..960747f --- /dev/null +++ b/node_modules/@octokit/types/package.json @@ -0,0 +1,54 @@ +{ + "name": "@octokit/types", + "description": "Shared TypeScript definitions for Octokit projects", + "version": "6.41.0", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "octokit": { + "openapi-version": "6.8.0" + }, + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "pika": true, + "sideEffects": false, + "keywords": [ + "github", + "api", + "sdk", + "toolkit", + "typescript" + ], + "repository": "github:octokit/types.ts", + "dependencies": { + "@octokit/openapi-types": "^12.11.0" + }, + "devDependencies": { + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/node": ">= 8", + "github-openapi-graphql-query": "^2.0.0", + "handlebars": "^4.7.6", + "json-schema-to-typescript": "^11.0.0", + "lodash.set": "^4.3.2", + "npm-run-all": "^4.1.5", + "pascal-case": "^3.1.1", + "pika-plugin-merge-properties": "^1.0.6", + "prettier": "^2.0.0", + "semantic-release": "^19.0.3", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^4.2.0", + "string-to-jsdoc-comment": "^1.0.0", + "typedoc": "^0.23.0", + "typescript": "^4.0.2" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/node_modules/@types/node-fetch/LICENSE b/node_modules/@types/node-fetch/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/node-fetch/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/node-fetch/README.md b/node_modules/@types/node-fetch/README.md new file mode 100644 index 0000000..7fda7e4 --- /dev/null +++ b/node_modules/@types/node-fetch/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/node-fetch` + +# Summary +This package contains type definitions for node-fetch (https://github.com/bitinn/node-fetch). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch. + +### Additional Details + * Last updated: Mon, 11 Nov 2024 18:36:31 GMT + * Dependencies: [@types/node](https://npmjs.com/package/@types/node), [form-data](https://npmjs.com/package/form-data) + +# Credits +These definitions were written by [Torsten Werner](https://github.com/torstenwerner), [Niklas Lindgren](https://github.com/nikcorg), [Vinay Bedre](https://github.com/vinaybedre), [Antonio Román](https://github.com/kyranet), [Andrew Leedham](https://github.com/AndrewLeedham), [Jason Li](https://github.com/JasonLi914), [Steve Faulkner](https://github.com/southpolesteve), [ExE Boss](https://github.com/ExE-Boss), [Alex Savin](https://github.com/alexandrusavin), [Alexis Tyler](https://github.com/OmgImAlexis), [Jakub Kisielewski](https://github.com/kbkk), and [David Glasser](https://github.com/glasser). diff --git a/node_modules/@types/node-fetch/externals.d.ts b/node_modules/@types/node-fetch/externals.d.ts new file mode 100644 index 0000000..20f1467 --- /dev/null +++ b/node_modules/@types/node-fetch/externals.d.ts @@ -0,0 +1,32 @@ +// `AbortSignal` is defined here to prevent a dependency on a particular +// implementation like the `abort-controller` package, and to avoid requiring +// the `dom` library in `tsconfig.json`. + +export interface AbortSignal { + aborted: boolean; + reason: any; + + addEventListener: ( + type: "abort", + listener: (this: AbortSignal, event: any) => any, + options?: boolean | { + capture?: boolean | undefined; + once?: boolean | undefined; + passive?: boolean | undefined; + }, + ) => void; + + removeEventListener: ( + type: "abort", + listener: (this: AbortSignal, event: any) => any, + options?: boolean | { + capture?: boolean | undefined; + }, + ) => void; + + dispatchEvent: (event: any) => boolean; + + onabort: null | ((this: AbortSignal, event: any) => any); + + throwIfAborted(): void; +} diff --git a/node_modules/@types/node-fetch/index.d.ts b/node_modules/@types/node-fetch/index.d.ts new file mode 100644 index 0000000..f95f9c1 --- /dev/null +++ b/node_modules/@types/node-fetch/index.d.ts @@ -0,0 +1,239 @@ +/// + +import FormData = require("form-data"); +import { RequestOptions } from "http"; +import { URL, URLSearchParams } from "url"; +import { AbortSignal } from "./externals"; + +declare class Request extends Body { + constructor(input: RequestInfo, init?: RequestInit); + clone(): Request; + context: RequestContext; + headers: Headers; + method: string; + redirect: RequestRedirect; + referrer: string; + url: string; + + // node-fetch extensions to the whatwg/fetch spec + agent?: RequestOptions["agent"] | ((parsedUrl: URL) => RequestOptions["agent"]); + compress: boolean; + counter: number; + follow: number; + hostname: string; + port?: number | undefined; + protocol: string; + size: number; + timeout: number; +} + +interface RequestInit { + // whatwg/fetch standard options + body?: BodyInit | undefined; + headers?: HeadersInit | undefined; + method?: string | undefined; + redirect?: RequestRedirect | undefined; + signal?: AbortSignal | null | undefined; + + // node-fetch extensions + agent?: RequestOptions["agent"] | ((parsedUrl: URL) => RequestOptions["agent"]); // =null http.Agent instance, allows custom proxy, certificate etc. + compress?: boolean | undefined; // =true support gzip/deflate content encoding. false to disable + follow?: number | undefined; // =20 maximum redirect count. 0 to not follow redirect + size?: number | undefined; // =0 maximum response body size in bytes. 0 to disable + timeout?: number | undefined; // =0 req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies) + + // node-fetch does not support mode, cache or credentials options +} + +type RequestContext = + | "audio" + | "beacon" + | "cspreport" + | "download" + | "embed" + | "eventsource" + | "favicon" + | "fetch" + | "font" + | "form" + | "frame" + | "hyperlink" + | "iframe" + | "image" + | "imageset" + | "import" + | "internal" + | "location" + | "manifest" + | "object" + | "ping" + | "plugin" + | "prefetch" + | "script" + | "serviceworker" + | "sharedworker" + | "style" + | "subresource" + | "track" + | "video" + | "worker" + | "xmlhttprequest" + | "xslt"; +type RequestMode = "cors" | "no-cors" | "same-origin"; +type RequestRedirect = "error" | "follow" | "manual"; +type RequestCredentials = "omit" | "include" | "same-origin"; + +type RequestCache = + | "default" + | "force-cache" + | "no-cache" + | "no-store" + | "only-if-cached" + | "reload"; + +declare class Headers implements Iterable<[string, string]> { + constructor(init?: HeadersInit); + forEach(callback: (value: string, name: string) => void): void; + append(name: string, value: string): void; + delete(name: string): void; + get(name: string): string | null; + has(name: string): boolean; + raw(): { [k: string]: string[] }; + set(name: string, value: string): void; + + // Iterable methods + entries(): IterableIterator<[string, string]>; + keys(): IterableIterator; + values(): IterableIterator; + [Symbol.iterator](): Iterator<[string, string]>; +} + +type BlobPart = ArrayBuffer | ArrayBufferView | Blob | string; + +interface BlobOptions { + type?: string | undefined; + endings?: "transparent" | "native" | undefined; +} + +declare class Blob { + constructor(blobParts?: BlobPart[], options?: BlobOptions); + readonly type: string; + readonly size: number; + slice(start?: number, end?: number): Blob; + text(): Promise; + arrayBuffer(): Promise; +} + +declare class Body { + constructor(body?: any, opts?: { size?: number | undefined; timeout?: number | undefined }); + arrayBuffer(): Promise; + blob(): Promise; + body: NodeJS.ReadableStream; + bodyUsed: boolean; + buffer(): Promise; + json(): Promise; + size: number; + text(): Promise; + textConverted(): Promise; + timeout: number; +} + +interface SystemError extends Error { + code?: string | undefined; +} + +declare class AbortError extends Error { + readonly name: "AbortError"; + constructor(message: string); + readonly type: "aborted"; +} + +declare class FetchError extends Error { + name: "FetchError"; + constructor(message: string, type: string, systemError?: SystemError); + type: string; + code?: string | undefined; + errno?: string | undefined; +} + +declare class Response extends Body { + constructor(body?: BodyInit, init?: ResponseInit); + static error(): Response; + static redirect(url: string, status: number): Response; + clone(): Response; + headers: Headers; + ok: boolean; + redirected: boolean; + status: number; + statusText: string; + type: ResponseType; + url: string; +} + +type ResponseType = + | "basic" + | "cors" + | "default" + | "error" + | "opaque" + | "opaqueredirect"; + +interface ResponseInit { + headers?: HeadersInit | undefined; + size?: number | undefined; + status?: number | undefined; + statusText?: string | undefined; + timeout?: number | undefined; + url?: string | undefined; + counter?: number | undefined; +} + +interface URLLike { + href: string; +} + +type HeadersInit = Headers | string[][] | { [key: string]: string | string[] }; +type BodyInit = + | ArrayBuffer + | ArrayBufferView + | NodeJS.ReadableStream + | string + | URLSearchParams + | FormData; +type RequestInfo = string | URLLike | Request; + +declare function fetch( + url: RequestInfo, + init?: RequestInit, +): Promise; + +declare namespace fetch { + export { + AbortError, + Blob, + Body, + BodyInit, + FetchError, + Headers, + HeadersInit, + // HeaderInit is exported to support backwards compatibility. See PR #34382 + HeadersInit as HeaderInit, + Request, + RequestCache, + RequestContext, + RequestCredentials, + RequestInfo, + RequestInit, + RequestMode, + RequestRedirect, + Response, + ResponseInit, + ResponseType, + }; + export function isRedirect(code: number): boolean; + + import _default = fetch; + export { _default as default }; +} + +export = fetch; diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/LICENSE b/node_modules/@types/node-fetch/node_modules/@types/node/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/README.md b/node_modules/@types/node-fetch/node_modules/@types/node/README.md new file mode 100644 index 0000000..5963604 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/node` + +# Summary +This package contains type definitions for node (https://nodejs.org/). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node. + +### Additional Details + * Last updated: Wed, 11 Dec 2024 09:35:14 GMT + * Dependencies: [undici-types](https://npmjs.com/package/undici-types) + +# Credits +These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Victor Perin](https://github.com/victorperin), [NodeJS Contributors](https://github.com/NodeJS), [Linus Unnebäck](https://github.com/LinusU), [wafuwafu13](https://github.com/wafuwafu13), [Matteo Collina](https://github.com/mcollina), and [Dmitry Semigradsky](https://github.com/Semigradsky). diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/assert.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/assert.d.ts new file mode 100644 index 0000000..d7e3719 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/assert.d.ts @@ -0,0 +1,1040 @@ +/** + * The `node:assert` module provides a set of assertion functions for verifying + * invariants. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/assert.js) + */ +declare module "assert" { + /** + * An alias of {@link ok}. + * @since v0.5.9 + * @param value The input that is checked for being truthy. + */ + function assert(value: unknown, message?: string | Error): asserts value; + namespace assert { + /** + * Indicates the failure of an assertion. All errors thrown by the `node:assert` module will be instances of the `AssertionError` class. + */ + class AssertionError extends Error { + /** + * Set to the `actual` argument for methods such as {@link assert.strictEqual()}. + */ + actual: unknown; + /** + * Set to the `expected` argument for methods such as {@link assert.strictEqual()}. + */ + expected: unknown; + /** + * Set to the passed in operator value. + */ + operator: string; + /** + * Indicates if the message was auto-generated (`true`) or not. + */ + generatedMessage: boolean; + /** + * Value is always `ERR_ASSERTION` to show that the error is an assertion error. + */ + code: "ERR_ASSERTION"; + constructor(options?: { + /** If provided, the error message is set to this value. */ + message?: string | undefined; + /** The `actual` property on the error instance. */ + actual?: unknown | undefined; + /** The `expected` property on the error instance. */ + expected?: unknown | undefined; + /** The `operator` property on the error instance. */ + operator?: string | undefined; + /** If provided, the generated stack trace omits frames before this function. */ + // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type + stackStartFn?: Function | undefined; + }); + } + /** + * This feature is deprecated and will be removed in a future version. + * Please consider using alternatives such as the `mock` helper function. + * @since v14.2.0, v12.19.0 + * @deprecated Deprecated + */ + class CallTracker { + /** + * The wrapper function is expected to be called exactly `exact` times. If the + * function has not been called exactly `exact` times when `tracker.verify()` is called, then `tracker.verify()` will throw an + * error. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func); + * ``` + * @since v14.2.0, v12.19.0 + * @param [fn='A no-op function'] + * @param [exact=1] + * @return A function that wraps `fn`. + */ + calls(exact?: number): () => void; + calls any>(fn?: Func, exact?: number): Func; + /** + * Example: + * + * ```js + * import assert from 'node:assert'; + * + * const tracker = new assert.CallTracker(); + * + * function func() {} + * const callsfunc = tracker.calls(func); + * callsfunc(1, 2, 3); + * + * assert.deepStrictEqual(tracker.getCalls(callsfunc), + * [{ thisArg: undefined, arguments: [1, 2, 3] }]); + * ``` + * @since v18.8.0, v16.18.0 + * @return An array with all the calls to a tracked function. + */ + getCalls(fn: Function): CallTrackerCall[]; + /** + * The arrays contains information about the expected and actual number of calls of + * the functions that have not been called the expected number of times. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * // Returns an array containing information on callsfunc() + * console.log(tracker.report()); + * // [ + * // { + * // message: 'Expected the func function to be executed 2 time(s) but was + * // executed 0 time(s).', + * // actual: 0, + * // expected: 2, + * // operator: 'func', + * // stack: stack trace + * // } + * // ] + * ``` + * @since v14.2.0, v12.19.0 + * @return An array of objects containing information about the wrapper functions returned by {@link tracker.calls()}. + */ + report(): CallTrackerReportInformation[]; + /** + * Reset calls of the call tracker. If a tracked function is passed as an argument, the calls will be reset for it. + * If no arguments are passed, all tracked functions will be reset. + * + * ```js + * import assert from 'node:assert'; + * + * const tracker = new assert.CallTracker(); + * + * function func() {} + * const callsfunc = tracker.calls(func); + * + * callsfunc(); + * // Tracker was called once + * assert.strictEqual(tracker.getCalls(callsfunc).length, 1); + * + * tracker.reset(callsfunc); + * assert.strictEqual(tracker.getCalls(callsfunc).length, 0); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn a tracked function to reset. + */ + reset(fn?: Function): void; + /** + * Iterates through the list of functions passed to {@link tracker.calls()} and will throw an error for functions that + * have not been called the expected number of times. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * callsfunc(); + * + * // Will throw an error since callsfunc() was only called once. + * tracker.verify(); + * ``` + * @since v14.2.0, v12.19.0 + */ + verify(): void; + } + interface CallTrackerCall { + thisArg: object; + arguments: unknown[]; + } + interface CallTrackerReportInformation { + message: string; + /** The actual number of times the function was called. */ + actual: number; + /** The number of times the function was expected to be called. */ + expected: number; + /** The name of the function that is wrapped. */ + operator: string; + /** A stack trace of the function. */ + stack: object; + } + type AssertPredicate = RegExp | (new() => object) | ((thrown: unknown) => boolean) | object | Error; + /** + * Throws an `AssertionError` with the provided error message or a default + * error message. If the `message` parameter is an instance of an `Error` then + * it will be thrown instead of the `AssertionError`. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.fail(); + * // AssertionError [ERR_ASSERTION]: Failed + * + * assert.fail('boom'); + * // AssertionError [ERR_ASSERTION]: boom + * + * assert.fail(new TypeError('need array')); + * // TypeError: need array + * ``` + * + * Using `assert.fail()` with more than two arguments is possible but deprecated. + * See below for further details. + * @since v0.1.21 + * @param [message='Failed'] + */ + function fail(message?: string | Error): never; + /** @deprecated since v10.0.0 - use fail([message]) or other assert functions instead. */ + function fail( + actual: unknown, + expected: unknown, + message?: string | Error, + operator?: string, + // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type + stackStartFn?: Function, + ): never; + /** + * Tests if `value` is truthy. It is equivalent to `assert.equal(!!value, true, message)`. + * + * If `value` is not truthy, an `AssertionError` is thrown with a `message` property set equal to the value of the `message` parameter. If the `message` parameter is `undefined`, a default + * error message is assigned. If the `message` parameter is an instance of an `Error` then it will be thrown instead of the `AssertionError`. + * If no arguments are passed in at all `message` will be set to the string:`` 'No value argument passed to `assert.ok()`' ``. + * + * Be aware that in the `repl` the error message will be different to the one + * thrown in a file! See below for further details. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.ok(true); + * // OK + * assert.ok(1); + * // OK + * + * assert.ok(); + * // AssertionError: No value argument passed to `assert.ok()` + * + * assert.ok(false, 'it\'s false'); + * // AssertionError: it's false + * + * // In the repl: + * assert.ok(typeof 123 === 'string'); + * // AssertionError: false == true + * + * // In a file (e.g. test.js): + * assert.ok(typeof 123 === 'string'); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(typeof 123 === 'string') + * + * assert.ok(false); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(false) + * + * assert.ok(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(0) + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * // Using `assert()` works the same: + * assert(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert(0) + * ``` + * @since v0.1.21 + */ + function ok(value: unknown, message?: string | Error): asserts value; + /** + * **Strict assertion mode** + * + * An alias of {@link strictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link strictEqual} instead. + * + * Tests shallow, coercive equality between the `actual` and `expected` parameters + * using the [`==` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Equality). `NaN` is specially handled + * and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'node:assert'; + * + * assert.equal(1, 1); + * // OK, 1 == 1 + * assert.equal(1, '1'); + * // OK, 1 == '1' + * assert.equal(NaN, NaN); + * // OK + * + * assert.equal(1, 2); + * // AssertionError: 1 == 2 + * assert.equal({ a: { b: 1 } }, { a: { b: 1 } }); + * // AssertionError: { a: { b: 1 } } == { a: { b: 1 } } + * ``` + * + * If the values are not equal, an `AssertionError` is thrown with a `message` property set equal to the value of the `message` parameter. If the `message` parameter is undefined, a default + * error message is assigned. If the `message` parameter is an instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v0.1.21 + */ + function equal(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notStrictEqual} instead. + * + * Tests shallow, coercive inequality with the [`!=` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Inequality). `NaN` is + * specially handled and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'node:assert'; + * + * assert.notEqual(1, 2); + * // OK + * + * assert.notEqual(1, 1); + * // AssertionError: 1 != 1 + * + * assert.notEqual(1, '1'); + * // AssertionError: 1 != '1' + * ``` + * + * If the values are equal, an `AssertionError` is thrown with a `message` property set equal to the value of the `message` parameter. If the `message` parameter is undefined, a default error + * message is assigned. If the `message` parameter is an instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v0.1.21 + */ + function notEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link deepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link deepStrictEqual} instead. + * + * Tests for deep equality between the `actual` and `expected` parameters. Consider + * using {@link deepStrictEqual} instead. {@link deepEqual} can have + * surprising results. + * + * _Deep equality_ means that the enumerable "own" properties of child objects + * are also recursively evaluated by the following rules. + * @since v0.1.21 + */ + function deepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notDeepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notDeepStrictEqual} instead. + * + * Tests for any deep inequality. Opposite of {@link deepEqual}. + * + * ```js + * import assert from 'node:assert'; + * + * const obj1 = { + * a: { + * b: 1, + * }, + * }; + * const obj2 = { + * a: { + * b: 2, + * }, + * }; + * const obj3 = { + * a: { + * b: 1, + * }, + * }; + * const obj4 = { __proto__: obj1 }; + * + * assert.notDeepEqual(obj1, obj1); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj2); + * // OK + * + * assert.notDeepEqual(obj1, obj3); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj4); + * // OK + * ``` + * + * If the values are deeply equal, an `AssertionError` is thrown with a `message` property set equal to the value of the `message` parameter. If the `message` parameter is undefined, a default + * error message is assigned. If the `message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notDeepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests strict equality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.strictEqual(1, 2); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + * // 1 !== 2 + * + * assert.strictEqual(1, 1); + * // OK + * + * assert.strictEqual('Hello foobar', 'Hello World!'); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + actual - expected + * // + * // + 'Hello foobar' + * // - 'Hello World!' + * // ^ + * + * const apples = 1; + * const oranges = 2; + * assert.strictEqual(apples, oranges, `apples ${apples} !== oranges ${oranges}`); + * // AssertionError [ERR_ASSERTION]: apples 1 !== oranges 2 + * + * assert.strictEqual(1, '1', new TypeError('Inputs are not identical')); + * // TypeError: Inputs are not identical + * ``` + * + * If the values are not strictly equal, an `AssertionError` is thrown with a `message` property set equal to the value of the `message` parameter. If the `message` parameter is undefined, a + * default error message is assigned. If the `message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function strictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests strict inequality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.notStrictEqual(1, 2); + * // OK + * + * assert.notStrictEqual(1, 1); + * // AssertionError [ERR_ASSERTION]: Expected "actual" to be strictly unequal to: + * // + * // 1 + * + * assert.notStrictEqual(1, '1'); + * // OK + * ``` + * + * If the values are strictly equal, an `AssertionError` is thrown with a `message` property set equal to the value of the `message` parameter. If the `message` parameter is undefined, a + * default error message is assigned. If the `message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests for deep equality between the `actual` and `expected` parameters. + * "Deep" equality means that the enumerable "own" properties of child objects + * are recursively evaluated also by the following rules. + * @since v1.2.0 + */ + function deepStrictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests for deep strict inequality. Opposite of {@link deepStrictEqual}. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.notDeepStrictEqual({ a: 1 }, { a: '1' }); + * // OK + * ``` + * + * If the values are deeply and strictly equal, an `AssertionError` is thrown + * with a `message` property set equal to the value of the `message` parameter. If + * the `message` parameter is undefined, a default error message is assigned. If + * the `message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v1.2.0 + */ + function notDeepStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Expects the function `fn` to throw an error. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * a validation object where each property will be tested for strict deep equality, + * or an instance of error where each property will be tested for strict deep + * equality including the non-enumerable `message` and `name` properties. When + * using an object, it is also possible to use a regular expression, when + * validating against a string property. See below for examples. + * + * If specified, `message` will be appended to the message provided by the `AssertionError` if the `fn` call fails to throw or in case the error validation + * fails. + * + * Custom validation object/error instance: + * + * ```js + * import assert from 'node:assert/strict'; + * + * const err = new TypeError('Wrong value'); + * err.code = 404; + * err.foo = 'bar'; + * err.info = { + * nested: true, + * baz: 'text', + * }; + * err.reg = /abc/i; + * + * assert.throws( + * () => { + * throw err; + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * info: { + * nested: true, + * baz: 'text', + * }, + * // Only properties on the validation object will be tested for. + * // Using nested objects requires all properties to be present. Otherwise + * // the validation is going to fail. + * }, + * ); + * + * // Using regular expressions to validate error properties: + * assert.throws( + * () => { + * throw err; + * }, + * { + * // The `name` and `message` properties are strings and using regular + * // expressions on those will match against the string. If they fail, an + * // error is thrown. + * name: /^TypeError$/, + * message: /Wrong/, + * foo: 'bar', + * info: { + * nested: true, + * // It is not possible to use regular expressions for nested properties! + * baz: 'text', + * }, + * // The `reg` property contains a regular expression and only if the + * // validation object contains an identical regular expression, it is going + * // to pass. + * reg: /abc/i, + * }, + * ); + * + * // Fails due to the different `message` and `name` properties: + * assert.throws( + * () => { + * const otherErr = new Error('Not found'); + * // Copy all enumerable properties from `err` to `otherErr`. + * for (const [key, value] of Object.entries(err)) { + * otherErr[key] = value; + * } + * throw otherErr; + * }, + * // The error's `message` and `name` properties will also be checked when using + * // an error as validation object. + * err, + * ); + * ``` + * + * Validate instanceof using constructor: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * Error, + * ); + * ``` + * + * Validate error message using [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions): + * + * Using a regular expression runs `.toString` on the error object, and will + * therefore also include the error name. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * /^Error: Wrong value$/, + * ); + * ``` + * + * Custom error validation: + * + * The function must return `true` to indicate all internal validations passed. + * It will otherwise fail with an `AssertionError`. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * (err) => { + * assert(err instanceof Error); + * assert(/value/.test(err)); + * // Avoid returning anything from validation functions besides `true`. + * // Otherwise, it's not clear what part of the validation failed. Instead, + * // throw an error about the specific validation that failed (as done in this + * // example) and add as much helpful debugging information to that error as + * // possible. + * return true; + * }, + * 'unexpected error', + * ); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for `message` instead. This can lead to easy-to-miss mistakes. Using the same + * message as the thrown error message is going to result in an `ERR_AMBIGUOUS_ARGUMENT` error. Please read the example below carefully if using + * a string as the second argument gets considered: + * + * ```js + * import assert from 'node:assert/strict'; + * + * function throwingFirst() { + * throw new Error('First'); + * } + * + * function throwingSecond() { + * throw new Error('Second'); + * } + * + * function notThrowing() {} + * + * // The second argument is a string and the input function threw an Error. + * // The first case will not throw as it does not match for the error message + * // thrown by the input function! + * assert.throws(throwingFirst, 'Second'); + * // In the next example the message has no benefit over the message from the + * // error and since it is not clear if the user intended to actually match + * // against the error message, Node.js throws an `ERR_AMBIGUOUS_ARGUMENT` error. + * assert.throws(throwingSecond, 'Second'); + * // TypeError [ERR_AMBIGUOUS_ARGUMENT] + * + * // The string is only used (as message) in case the function does not throw: + * assert.throws(notThrowing, 'Second'); + * // AssertionError [ERR_ASSERTION]: Missing expected exception: Second + * + * // If it was intended to match for the error message do this instead: + * // It does not throw because the error messages match. + * assert.throws(throwingSecond, /Second$/); + * + * // If the error message does not match, an AssertionError is thrown. + * assert.throws(throwingFirst, /Second$/); + * // AssertionError [ERR_ASSERTION] + * ``` + * + * Due to the confusing error-prone notation, avoid a string as the second + * argument. + * @since v0.1.21 + */ + function throws(block: () => unknown, message?: string | Error): void; + function throws(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Asserts that the function `fn` does not throw an error. + * + * Using `assert.doesNotThrow()` is actually not useful because there + * is no benefit in catching an error and then rethrowing it. Instead, consider + * adding a comment next to the specific code path that should not throw and keep + * error messages as expressive as possible. + * + * When `assert.doesNotThrow()` is called, it will immediately call the `fn` function. + * + * If an error is thrown and it is the same type as that specified by the `error` parameter, then an `AssertionError` is thrown. If the error is of a + * different type, or if the `error` parameter is undefined, the error is + * propagated back to the caller. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation + * function. See {@link throws} for more details. + * + * The following, for instance, will throw the `TypeError` because there is no + * matching error type in the assertion: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError, + * ); + * ``` + * + * However, the following will result in an `AssertionError` with the message + * 'Got unwanted exception...': + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * TypeError, + * ); + * ``` + * + * If an `AssertionError` is thrown and a value is provided for the `message` parameter, the value of `message` will be appended to the `AssertionError` message: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * /Wrong value/, + * 'Whoops', + * ); + * // Throws: AssertionError: Got unwanted exception: Whoops + * ``` + * @since v0.1.21 + */ + function doesNotThrow(block: () => unknown, message?: string | Error): void; + function doesNotThrow(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Throws `value` if `value` is not `undefined` or `null`. This is useful when + * testing the `error` argument in callbacks. The stack trace contains all frames + * from the error passed to `ifError()` including the potential new frames for `ifError()` itself. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.ifError(null); + * // OK + * assert.ifError(0); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 0 + * assert.ifError('error'); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 'error' + * assert.ifError(new Error()); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: Error + * + * // Create some random error frames. + * let err; + * (function errorFrame() { + * err = new Error('test error'); + * })(); + * + * (function ifErrorFrame() { + * assert.ifError(err); + * })(); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: test error + * // at ifErrorFrame + * // at errorFrame + * ``` + * @since v0.1.97 + */ + function ifError(value: unknown): asserts value is null | undefined; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is rejected. + * + * If `asyncFn` is a function and it throws an error synchronously, `assert.rejects()` will return a rejected `Promise` with that error. If the + * function does not return a promise, `assert.rejects()` will return a rejected `Promise` with an [ERR_INVALID_RETURN_VALUE](https://nodejs.org/docs/latest-v22.x/api/errors.html#err_invalid_return_value) + * error. In both cases the error handler is skipped. + * + * Besides the async nature to await the completion behaves identically to {@link throws}. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * an object where each property will be tested for, or an instance of error where + * each property will be tested for including the non-enumerable `message` and `name` properties. + * + * If specified, `message` will be the message provided by the `{@link AssertionError}` if the `asyncFn` fails to reject. + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * }, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * (err) => { + * assert.strictEqual(err.name, 'TypeError'); + * assert.strictEqual(err.message, 'Wrong value'); + * return true; + * }, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.rejects( + * Promise.reject(new Error('Wrong value')), + * Error, + * ).then(() => { + * // ... + * }); + * ``` + * + * `error` cannot be a string. If a string is provided as the second argument, then `error` is assumed to + * be omitted and the string will be used for `message` instead. This can lead to easy-to-miss mistakes. Please read the + * example in {@link throws} carefully if using a string as the second argument gets considered. + * @since v10.0.0 + */ + function rejects(block: (() => Promise) | Promise, message?: string | Error): Promise; + function rejects( + block: (() => Promise) | Promise, + error: AssertPredicate, + message?: string | Error, + ): Promise; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is not rejected. + * + * If `asyncFn` is a function and it throws an error synchronously, `assert.doesNotReject()` will return a rejected `Promise` with that error. If + * the function does not return a promise, `assert.doesNotReject()` will return a + * rejected `Promise` with an [ERR_INVALID_RETURN_VALUE](https://nodejs.org/docs/latest-v22.x/api/errors.html#err_invalid_return_value) error. In both cases + * the error handler is skipped. + * + * Using `assert.doesNotReject()` is actually not useful because there is little + * benefit in catching a rejection and then rejecting it again. Instead, consider + * adding a comment next to the specific code path that should not reject and keep + * error messages as expressive as possible. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation + * function. See {@link throws} for more details. + * + * Besides the async nature to await the completion behaves identically to {@link doesNotThrow}. + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.doesNotReject( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotReject(Promise.reject(new TypeError('Wrong value'))) + * .then(() => { + * // ... + * }); + * ``` + * @since v10.0.0 + */ + function doesNotReject( + block: (() => Promise) | Promise, + message?: string | Error, + ): Promise; + function doesNotReject( + block: (() => Promise) | Promise, + error: AssertPredicate, + message?: string | Error, + ): Promise; + /** + * Expects the `string` input to match the regular expression. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.match('I will fail', /pass/); + * // AssertionError [ERR_ASSERTION]: The input did not match the regular ... + * + * assert.match(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.match('I will pass', /pass/); + * // OK + * ``` + * + * If the values do not match, or if the `string` argument is of another type than `string`, an `{@link AssertionError}` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an [Error](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-error) then it will be thrown instead of the `{@link AssertionError}`. + * @since v13.6.0, v12.16.0 + */ + function match(value: string, regExp: RegExp, message?: string | Error): void; + /** + * Expects the `string` input not to match the regular expression. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotMatch('I will fail', /fail/); + * // AssertionError [ERR_ASSERTION]: The input was expected to not match the ... + * + * assert.doesNotMatch(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.doesNotMatch('I will pass', /different/); + * // OK + * ``` + * + * If the values do match, or if the `string` argument is of another type than `string`, an `{@link AssertionError}` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an [Error](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-error) then it will be thrown instead of the `{@link AssertionError}`. + * @since v13.6.0, v12.16.0 + */ + function doesNotMatch(value: string, regExp: RegExp, message?: string | Error): void; + /** + * In strict assertion mode, non-strict methods behave like their corresponding strict methods. For example, + * {@link deepEqual} will behave like {@link deepStrictEqual}. + * + * In strict assertion mode, error messages for objects display a diff. In legacy assertion mode, error + * messages for objects display the objects, often truncated. + * + * To use strict assertion mode: + * + * ```js + * import { strict as assert } from 'node:assert'; + * import assert from 'node:assert/strict'; + * ``` + * + * Example error diff: + * + * ```js + * import { strict as assert } from 'node:assert'; + * + * assert.deepEqual([[[1, 2, 3]], 4, 5], [[[1, 2, '3']], 4, 5]); + * // AssertionError: Expected inputs to be strictly deep-equal: + * // + actual - expected ... Lines skipped + * // + * // [ + * // [ + * // ... + * // 2, + * // + 3 + * // - '3' + * // ], + * // ... + * // 5 + * // ] + * ``` + * + * To deactivate the colors, use the `NO_COLOR` or `NODE_DISABLE_COLORS` environment variables. This will also + * deactivate the colors in the REPL. For more on color support in terminal environments, read the tty + * `getColorDepth()` documentation. + * + * @since v15.0.0, v13.9.0, v12.16.2, v9.9.0 + */ + namespace strict { + type AssertionError = assert.AssertionError; + type AssertPredicate = assert.AssertPredicate; + type CallTrackerCall = assert.CallTrackerCall; + type CallTrackerReportInformation = assert.CallTrackerReportInformation; + } + const strict: + & Omit< + typeof assert, + | "equal" + | "notEqual" + | "deepEqual" + | "notDeepEqual" + | "ok" + | "strictEqual" + | "deepStrictEqual" + | "ifError" + | "strict" + > + & { + (value: unknown, message?: string | Error): asserts value; + equal: typeof strictEqual; + notEqual: typeof notStrictEqual; + deepEqual: typeof deepStrictEqual; + notDeepEqual: typeof notDeepStrictEqual; + // Mapped types and assertion functions are incompatible? + // TS2775: Assertions require every name in the call target + // to be declared with an explicit type annotation. + ok: typeof ok; + strictEqual: typeof strictEqual; + deepStrictEqual: typeof deepStrictEqual; + ifError: typeof ifError; + strict: typeof strict; + }; + } + export = assert; +} +declare module "node:assert" { + import assert = require("assert"); + export = assert; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/assert/strict.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/assert/strict.d.ts new file mode 100644 index 0000000..f333913 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/assert/strict.d.ts @@ -0,0 +1,8 @@ +declare module "assert/strict" { + import { strict } from "node:assert"; + export = strict; +} +declare module "node:assert/strict" { + import { strict } from "node:assert"; + export = strict; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/async_hooks.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/async_hooks.d.ts new file mode 100644 index 0000000..4d6df81 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/async_hooks.d.ts @@ -0,0 +1,541 @@ +/** + * We strongly discourage the use of the `async_hooks` API. + * Other APIs that can cover most of its use cases include: + * + * * [`AsyncLocalStorage`](https://nodejs.org/docs/latest-v22.x/api/async_context.html#class-asynclocalstorage) tracks async context + * * [`process.getActiveResourcesInfo()`](https://nodejs.org/docs/latest-v22.x/api/process.html#processgetactiveresourcesinfo) tracks active resources + * + * The `node:async_hooks` module provides an API to track asynchronous resources. + * It can be accessed using: + * + * ```js + * import async_hooks from 'node:async_hooks'; + * ``` + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/async_hooks.js) + */ +declare module "async_hooks" { + /** + * ```js + * import { executionAsyncId } from 'node:async_hooks'; + * import fs from 'node:fs'; + * + * console.log(executionAsyncId()); // 1 - bootstrap + * const path = '.'; + * fs.open(path, 'r', (err, fd) => { + * console.log(executionAsyncId()); // 6 - open() + * }); + * ``` + * + * The ID returned from `executionAsyncId()` is related to execution timing, not + * causality (which is covered by `triggerAsyncId()`): + * + * ```js + * const server = net.createServer((conn) => { + * // Returns the ID of the server, not of the new connection, because the + * // callback runs in the execution scope of the server's MakeCallback(). + * async_hooks.executionAsyncId(); + * + * }).listen(port, () => { + * // Returns the ID of a TickObject (process.nextTick()) because all + * // callbacks passed to .listen() are wrapped in a nextTick(). + * async_hooks.executionAsyncId(); + * }); + * ``` + * + * Promise contexts may not get precise `executionAsyncIds` by default. + * See the section on [promise execution tracking](https://nodejs.org/docs/latest-v22.x/api/async_hooks.html#promise-execution-tracking). + * @since v8.1.0 + * @return The `asyncId` of the current execution context. Useful to track when something calls. + */ + function executionAsyncId(): number; + /** + * Resource objects returned by `executionAsyncResource()` are most often internal + * Node.js handle objects with undocumented APIs. Using any functions or properties + * on the object is likely to crash your application and should be avoided. + * + * Using `executionAsyncResource()` in the top-level execution context will + * return an empty object as there is no handle or request object to use, + * but having an object representing the top-level can be helpful. + * + * ```js + * import { open } from 'node:fs'; + * import { executionAsyncId, executionAsyncResource } from 'node:async_hooks'; + * + * console.log(executionAsyncId(), executionAsyncResource()); // 1 {} + * open(new URL(import.meta.url), 'r', (err, fd) => { + * console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap + * }); + * ``` + * + * This can be used to implement continuation local storage without the + * use of a tracking `Map` to store the metadata: + * + * ```js + * import { createServer } from 'node:http'; + * import { + * executionAsyncId, + * executionAsyncResource, + * createHook, + * } from 'node:async_hooks'; + * const sym = Symbol('state'); // Private symbol to avoid pollution + * + * createHook({ + * init(asyncId, type, triggerAsyncId, resource) { + * const cr = executionAsyncResource(); + * if (cr) { + * resource[sym] = cr[sym]; + * } + * }, + * }).enable(); + * + * const server = createServer((req, res) => { + * executionAsyncResource()[sym] = { state: req.url }; + * setTimeout(function() { + * res.end(JSON.stringify(executionAsyncResource()[sym])); + * }, 100); + * }).listen(3000); + * ``` + * @since v13.9.0, v12.17.0 + * @return The resource representing the current execution. Useful to store data within the resource. + */ + function executionAsyncResource(): object; + /** + * ```js + * const server = net.createServer((conn) => { + * // The resource that caused (or triggered) this callback to be called + * // was that of the new connection. Thus the return value of triggerAsyncId() + * // is the asyncId of "conn". + * async_hooks.triggerAsyncId(); + * + * }).listen(port, () => { + * // Even though all callbacks passed to .listen() are wrapped in a nextTick() + * // the callback itself exists because the call to the server's .listen() + * // was made. So the return value would be the ID of the server. + * async_hooks.triggerAsyncId(); + * }); + * ``` + * + * Promise contexts may not get valid `triggerAsyncId`s by default. See + * the section on [promise execution tracking](https://nodejs.org/docs/latest-v22.x/api/async_hooks.html#promise-execution-tracking). + * @return The ID of the resource responsible for calling the callback that is currently being executed. + */ + function triggerAsyncId(): number; + interface HookCallbacks { + /** + * Called when a class is constructed that has the possibility to emit an asynchronous event. + * @param asyncId A unique ID for the async resource + * @param type The type of the async resource + * @param triggerAsyncId The unique ID of the async resource in whose execution context this async resource was created + * @param resource Reference to the resource representing the async operation, needs to be released during destroy + */ + init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void; + /** + * When an asynchronous operation is initiated or completes a callback is called to notify the user. + * The before callback is called just before said callback is executed. + * @param asyncId the unique identifier assigned to the resource about to execute the callback. + */ + before?(asyncId: number): void; + /** + * Called immediately after the callback specified in `before` is completed. + * + * If an uncaught exception occurs during execution of the callback, then `after` will run after the `'uncaughtException'` event is emitted or a `domain`'s handler runs. + * @param asyncId the unique identifier assigned to the resource which has executed the callback. + */ + after?(asyncId: number): void; + /** + * Called when a promise has resolve() called. This may not be in the same execution id + * as the promise itself. + * @param asyncId the unique id for the promise that was resolve()d. + */ + promiseResolve?(asyncId: number): void; + /** + * Called after the resource corresponding to asyncId is destroyed + * @param asyncId a unique ID for the async resource + */ + destroy?(asyncId: number): void; + } + interface AsyncHook { + /** + * Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop. + */ + enable(): this; + /** + * Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled. + */ + disable(): this; + } + /** + * Registers functions to be called for different lifetime events of each async + * operation. + * + * The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the + * respective asynchronous event during a resource's lifetime. + * + * All callbacks are optional. For example, if only resource cleanup needs to + * be tracked, then only the `destroy` callback needs to be passed. The + * specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section. + * + * ```js + * import { createHook } from 'node:async_hooks'; + * + * const asyncHook = createHook({ + * init(asyncId, type, triggerAsyncId, resource) { }, + * destroy(asyncId) { }, + * }); + * ``` + * + * The callbacks will be inherited via the prototype chain: + * + * ```js + * class MyAsyncCallbacks { + * init(asyncId, type, triggerAsyncId, resource) { } + * destroy(asyncId) {} + * } + * + * class MyAddedCallbacks extends MyAsyncCallbacks { + * before(asyncId) { } + * after(asyncId) { } + * } + * + * const asyncHook = async_hooks.createHook(new MyAddedCallbacks()); + * ``` + * + * Because promises are asynchronous resources whose lifecycle is tracked + * via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises. + * @since v8.1.0 + * @param callbacks The `Hook Callbacks` to register + * @return Instance used for disabling and enabling hooks + */ + function createHook(callbacks: HookCallbacks): AsyncHook; + interface AsyncResourceOptions { + /** + * The ID of the execution context that created this async event. + * @default executionAsyncId() + */ + triggerAsyncId?: number | undefined; + /** + * Disables automatic `emitDestroy` when the object is garbage collected. + * This usually does not need to be set (even if `emitDestroy` is called + * manually), unless the resource's `asyncId` is retrieved and the + * sensitive API's `emitDestroy` is called with it. + * @default false + */ + requireManualDestroy?: boolean | undefined; + } + /** + * The class `AsyncResource` is designed to be extended by the embedder's async + * resources. Using this, users can easily trigger the lifetime events of their + * own resources. + * + * The `init` hook will trigger when an `AsyncResource` is instantiated. + * + * The following is an overview of the `AsyncResource` API. + * + * ```js + * import { AsyncResource, executionAsyncId } from 'node:async_hooks'; + * + * // AsyncResource() is meant to be extended. Instantiating a + * // new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * // async_hook.executionAsyncId() is used. + * const asyncResource = new AsyncResource( + * type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false }, + * ); + * + * // Run a function in the execution context of the resource. This will + * // * establish the context of the resource + * // * trigger the AsyncHooks before callbacks + * // * call the provided function `fn` with the supplied arguments + * // * trigger the AsyncHooks after callbacks + * // * restore the original execution context + * asyncResource.runInAsyncScope(fn, thisArg, ...args); + * + * // Call AsyncHooks destroy callbacks. + * asyncResource.emitDestroy(); + * + * // Return the unique ID assigned to the AsyncResource instance. + * asyncResource.asyncId(); + * + * // Return the trigger ID for the AsyncResource instance. + * asyncResource.triggerAsyncId(); + * ``` + */ + class AsyncResource { + /** + * AsyncResource() is meant to be extended. Instantiating a + * new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * async_hook.executionAsyncId() is used. + * @param type The type of async event. + * @param triggerAsyncId The ID of the execution context that created + * this async event (default: `executionAsyncId()`), or an + * AsyncResourceOptions object (since v9.3.0) + */ + constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions); + /** + * Binds the given function to the current execution context. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current execution context. + * @param type An optional name to associate with the underlying `AsyncResource`. + */ + static bind any, ThisArg>( + fn: Func, + type?: string, + thisArg?: ThisArg, + ): Func; + /** + * Binds the given function to execute to this `AsyncResource`'s scope. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current `AsyncResource`. + */ + bind any>(fn: Func): Func; + /** + * Call the provided function with the provided arguments in the execution context + * of the async resource. This will establish the context, trigger the AsyncHooks + * before callbacks, call the function, trigger the AsyncHooks after callbacks, and + * then restore the original execution context. + * @since v9.6.0 + * @param fn The function to call in the execution context of this async resource. + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runInAsyncScope( + fn: (this: This, ...args: any[]) => Result, + thisArg?: This, + ...args: any[] + ): Result; + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + * @return A reference to `asyncResource`. + */ + emitDestroy(): this; + /** + * @return The unique `asyncId` assigned to the resource. + */ + asyncId(): number; + /** + * @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor. + */ + triggerAsyncId(): number; + } + /** + * This class creates stores that stay coherent through asynchronous operations. + * + * While you can create your own implementation on top of the `node:async_hooks` module, `AsyncLocalStorage` should be preferred as it is a performant and memory + * safe implementation that involves significant optimizations that are non-obvious + * to implement. + * + * The following example uses `AsyncLocalStorage` to build a simple logger + * that assigns IDs to incoming HTTP requests and includes them in messages + * logged within each request. + * + * ```js + * import http from 'node:http'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const asyncLocalStorage = new AsyncLocalStorage(); + * + * function logWithId(msg) { + * const id = asyncLocalStorage.getStore(); + * console.log(`${id !== undefined ? id : '-'}:`, msg); + * } + * + * let idSeq = 0; + * http.createServer((req, res) => { + * asyncLocalStorage.run(idSeq++, () => { + * logWithId('start'); + * // Imagine any chain of async operations here + * setImmediate(() => { + * logWithId('finish'); + * res.end(); + * }); + * }); + * }).listen(8080); + * + * http.get('http://localhost:8080'); + * http.get('http://localhost:8080'); + * // Prints: + * // 0: start + * // 1: start + * // 0: finish + * // 1: finish + * ``` + * + * Each instance of `AsyncLocalStorage` maintains an independent storage context. + * Multiple instances can safely exist simultaneously without risk of interfering + * with each other's data. + * @since v13.10.0, v12.17.0 + */ + class AsyncLocalStorage { + /** + * Binds the given function to the current execution context. + * @since v19.8.0 + * @experimental + * @param fn The function to bind to the current execution context. + * @return A new function that calls `fn` within the captured execution context. + */ + static bind any>(fn: Func): Func; + /** + * Captures the current execution context and returns a function that accepts a + * function as an argument. Whenever the returned function is called, it + * calls the function passed to it within the captured context. + * + * ```js + * const asyncLocalStorage = new AsyncLocalStorage(); + * const runInAsyncScope = asyncLocalStorage.run(123, () => AsyncLocalStorage.snapshot()); + * const result = asyncLocalStorage.run(321, () => runInAsyncScope(() => asyncLocalStorage.getStore())); + * console.log(result); // returns 123 + * ``` + * + * AsyncLocalStorage.snapshot() can replace the use of AsyncResource for simple + * async context tracking purposes, for example: + * + * ```js + * class Foo { + * #runInAsyncScope = AsyncLocalStorage.snapshot(); + * + * get() { return this.#runInAsyncScope(() => asyncLocalStorage.getStore()); } + * } + * + * const foo = asyncLocalStorage.run(123, () => new Foo()); + * console.log(asyncLocalStorage.run(321, () => foo.get())); // returns 123 + * ``` + * @since v19.8.0 + * @experimental + * @return A new function with the signature `(fn: (...args) : R, ...args) : R`. + */ + static snapshot(): (fn: (...args: TArgs) => R, ...args: TArgs) => R; + /** + * Disables the instance of `AsyncLocalStorage`. All subsequent calls + * to `asyncLocalStorage.getStore()` will return `undefined` until `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again. + * + * When calling `asyncLocalStorage.disable()`, all current contexts linked to the + * instance will be exited. + * + * Calling `asyncLocalStorage.disable()` is required before the `asyncLocalStorage` can be garbage collected. This does not apply to stores + * provided by the `asyncLocalStorage`, as those objects are garbage collected + * along with the corresponding async resources. + * + * Use this method when the `asyncLocalStorage` is not in use anymore + * in the current process. + * @since v13.10.0, v12.17.0 + * @experimental + */ + disable(): void; + /** + * Returns the current store. + * If called outside of an asynchronous context initialized by + * calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it + * returns `undefined`. + * @since v13.10.0, v12.17.0 + */ + getStore(): T | undefined; + /** + * Runs a function synchronously within a context and returns its + * return value. The store is not accessible outside of the callback function. + * The store is accessible to any asynchronous operations created within the + * callback. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `run()` too. + * The stacktrace is not impacted by this call and the context is exited. + * + * Example: + * + * ```js + * const store = { id: 2 }; + * try { + * asyncLocalStorage.run(store, () => { + * asyncLocalStorage.getStore(); // Returns the store object + * setTimeout(() => { + * asyncLocalStorage.getStore(); // Returns the store object + * }, 200); + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns undefined + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + */ + run(store: T, callback: () => R): R; + run(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Runs a function synchronously outside of a context and returns its + * return value. The store is not accessible within the callback function or + * the asynchronous operations created within the callback. Any `getStore()` call done within the callback function will always return `undefined`. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `exit()` too. + * The stacktrace is not impacted by this call and the context is re-entered. + * + * Example: + * + * ```js + * // Within a call to run + * try { + * asyncLocalStorage.getStore(); // Returns the store object or value + * asyncLocalStorage.exit(() => { + * asyncLocalStorage.getStore(); // Returns undefined + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns the same object or value + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + * @experimental + */ + exit(callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Transitions into the context for the remainder of the current + * synchronous execution and then persists the store through any following + * asynchronous calls. + * + * Example: + * + * ```js + * const store = { id: 1 }; + * // Replaces previous store with the given store object + * asyncLocalStorage.enterWith(store); + * asyncLocalStorage.getStore(); // Returns the store object + * someAsyncOperation(() => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * ``` + * + * This transition will continue for the _entire_ synchronous execution. + * This means that if, for example, the context is entered within an event + * handler subsequent event handlers will also run within that context unless + * specifically bound to another context with an `AsyncResource`. That is why `run()` should be preferred over `enterWith()` unless there are strong reasons + * to use the latter method. + * + * ```js + * const store = { id: 1 }; + * + * emitter.on('my-event', () => { + * asyncLocalStorage.enterWith(store); + * }); + * emitter.on('my-event', () => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * + * asyncLocalStorage.getStore(); // Returns undefined + * emitter.emit('my-event'); + * asyncLocalStorage.getStore(); // Returns the same object + * ``` + * @since v13.11.0, v12.17.0 + * @experimental + */ + enterWith(store: T): void; + } +} +declare module "node:async_hooks" { + export * from "async_hooks"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/buffer.buffer.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/buffer.buffer.d.ts new file mode 100644 index 0000000..0b6c49a --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/buffer.buffer.d.ts @@ -0,0 +1,385 @@ +declare module "buffer" { + global { + interface BufferConstructor { + // see buffer.d.ts for implementation shared with all TypeScript versions + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead. + */ + new(str: string, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`). + */ + new(size: number): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: Uint8Array): Buffer; + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}/{SharedArrayBuffer}. + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead. + */ + new(arrayBuffer: TArrayBuffer): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: readonly any[]): Buffer; + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead. + */ + new(buffer: Buffer): Buffer; + /** + * Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`. + * Array entries outside that range will be truncated to fit into it. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'. + * const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); + * ``` + * + * If `array` is an `Array`\-like object (that is, one with a `length` property of + * type `number`), it is treated as if it is an array, unless it is a `Buffer` or + * a `Uint8Array`. This means all other `TypedArray` variants get treated as an `Array`. To create a `Buffer` from the bytes backing a `TypedArray`, use `Buffer.copyBytesFrom()`. + * + * A `TypeError` will be thrown if `array` is not an `Array` or another type + * appropriate for `Buffer.from()` variants. + * + * `Buffer.from(array)` and `Buffer.from(string)` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v5.10.0 + */ + from( + arrayBuffer: WithImplicitCoercion, + byteOffset?: number, + length?: number, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param data data to create a new Buffer + */ + from(data: Uint8Array | readonly number[]): Buffer; + from(data: WithImplicitCoercion): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + */ + from( + str: + | WithImplicitCoercion + | { + [Symbol.toPrimitive](hint: "string"): string; + }, + encoding?: BufferEncoding, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param values to create a new Buffer + */ + of(...items: number[]): Buffer; + /** + * Returns a new `Buffer` which is the result of concatenating all the `Buffer` instances in the `list` together. + * + * If the list has no items, or if the `totalLength` is 0, then a new zero-length `Buffer` is returned. + * + * If `totalLength` is not provided, it is calculated from the `Buffer` instances + * in `list` by adding their lengths. + * + * If `totalLength` is provided, it is coerced to an unsigned integer. If the + * combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is + * truncated to `totalLength`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a single `Buffer` from a list of three `Buffer` instances. + * + * const buf1 = Buffer.alloc(10); + * const buf2 = Buffer.alloc(14); + * const buf3 = Buffer.alloc(18); + * const totalLength = buf1.length + buf2.length + buf3.length; + * + * console.log(totalLength); + * // Prints: 42 + * + * const bufA = Buffer.concat([buf1, buf2, buf3], totalLength); + * + * console.log(bufA); + * // Prints: + * console.log(bufA.length); + * // Prints: 42 + * ``` + * + * `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v0.7.11 + * @param list List of `Buffer` or {@link Uint8Array} instances to concatenate. + * @param totalLength Total length of the `Buffer` instances in `list` when concatenated. + */ + concat(list: readonly Uint8Array[], totalLength?: number): Buffer; + /** + * Copies the underlying memory of `view` into a new `Buffer`. + * + * ```js + * const u16 = new Uint16Array([0, 0xffff]); + * const buf = Buffer.copyBytesFrom(u16, 1, 1); + * u16[1] = 0; + * console.log(buf.length); // 2 + * console.log(buf[0]); // 255 + * console.log(buf[1]); // 255 + * ``` + * @since v19.8.0 + * @param view The {TypedArray} to copy. + * @param [offset=0] The starting offset within `view`. + * @param [length=view.length - offset] The number of elements from `view` to copy. + */ + copyBytesFrom(view: NodeJS.TypedArray, offset?: number, length?: number): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5); + * + * console.log(buf); + * // Prints: + * ``` + * + * If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5, 'a'); + * + * console.log(buf); + * // Prints: + * ``` + * + * If both `fill` and `encoding` are specified, the allocated `Buffer` will be + * initialized by calling `buf.fill(fill, encoding)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); + * + * console.log(buf); + * // Prints: + * ``` + * + * Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance + * contents will never contain sensitive data from previous allocations, including + * data that might not have been allocated for `Buffer`s. + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + * @param [fill=0] A value to pre-fill the new `Buffer` with. + * @param [encoding='utf8'] If `fill` is a string, this is its encoding. + */ + alloc(size: number, fill?: string | Uint8Array | number, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(10); + * + * console.log(buf); + * // Prints (contents may vary): + * + * buf.fill(0); + * + * console.log(buf); + * // Prints: + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * + * The `Buffer` module pre-allocates an internal `Buffer` instance of + * size `Buffer.poolSize` that is used as a pool for the fast allocation of new `Buffer` instances created using `Buffer.allocUnsafe()`, `Buffer.from(array)`, + * and `Buffer.concat()` only when `size` is less than `Buffer.poolSize >>> 1` (floor of `Buffer.poolSize` divided by two). + * + * Use of this pre-allocated internal memory pool is a key difference between + * calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. + * Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less + * than or equal to half `Buffer.poolSize`. The + * difference is subtle but can be important when an application requires the + * additional performance that `Buffer.allocUnsafe()` provides. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafe(size: number): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. A zero-length `Buffer` is created if + * `size` is 0. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize + * such `Buffer` instances with zeroes. + * + * When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, + * allocations under 4 KiB are sliced from a single pre-allocated `Buffer`. This + * allows applications to avoid the garbage collection overhead of creating many + * individually allocated `Buffer` instances. This approach improves both + * performance and memory usage by eliminating the need to track and clean up as + * many individual `ArrayBuffer` objects. + * + * However, in the case where a developer may need to retain a small chunk of + * memory from a pool for an indeterminate amount of time, it may be appropriate + * to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and + * then copying out the relevant bits. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Need to keep around a few small chunks of memory. + * const store = []; + * + * socket.on('readable', () => { + * let data; + * while (null !== (data = readable.read())) { + * // Allocate for retained data. + * const sb = Buffer.allocUnsafeSlow(10); + * + * // Copy the data into the new allocation. + * data.copy(sb, 0, 0, 10); + * + * store.push(sb); + * } + * }); + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.12.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafeSlow(size: number): Buffer; + } + interface Buffer extends Uint8Array { + // see buffer.d.ts for implementation shared with all TypeScript versions + + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * This method is not compatible with the `Uint8Array.prototype.slice()`, + * which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * const copiedBuf = Uint8Array.prototype.slice.call(buf); + * copiedBuf[0]++; + * console.log(copiedBuf.toString()); + * // Prints: cuffer + * + * console.log(buf.toString()); + * // Prints: buffer + * + * // With buf.slice(), the original buffer is modified. + * const notReallyCopiedBuf = buf.slice(); + * notReallyCopiedBuf[0]++; + * console.log(notReallyCopiedBuf.toString()); + * // Prints: cuffer + * console.log(buf.toString()); + * // Also prints: cuffer (!) + * ``` + * @since v0.3.0 + * @deprecated Use `subarray` instead. + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + slice(start?: number, end?: number): Buffer; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * Specifying `end` greater than `buf.length` will return the same result as + * that of `end` equal to `buf.length`. + * + * This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). + * + * Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte + * // from the original `Buffer`. + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * const buf2 = buf1.subarray(0, 3); + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: abc + * + * buf1[0] = 33; + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: !bc + * ``` + * + * Specifying negative indexes causes the slice to be generated relative to the + * end of `buf` rather than the beginning. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * console.log(buf.subarray(-6, -1).toString()); + * // Prints: buffe + * // (Equivalent to buf.subarray(0, 5).) + * + * console.log(buf.subarray(-6, -2).toString()); + * // Prints: buff + * // (Equivalent to buf.subarray(0, 4).) + * + * console.log(buf.subarray(-5, -2).toString()); + * // Prints: uff + * // (Equivalent to buf.subarray(1, 4).) + * ``` + * @since v3.0.0 + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + subarray(start?: number, end?: number): Buffer; + } + } +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/buffer.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/buffer.d.ts new file mode 100644 index 0000000..792bf4d --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/buffer.d.ts @@ -0,0 +1,1933 @@ +// If lib.dom.d.ts or lib.webworker.d.ts is loaded, then use the global types. +// Otherwise, use the types from node. +type _Blob = typeof globalThis extends { onmessage: any; Blob: any } ? {} : import("buffer").Blob; +type _File = typeof globalThis extends { onmessage: any; File: any } ? {} : import("buffer").File; + +/** + * `Buffer` objects are used to represent a fixed-length sequence of bytes. Many + * Node.js APIs support `Buffer`s. + * + * The `Buffer` class is a subclass of JavaScript's [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) class and + * extends it with methods that cover additional use cases. Node.js APIs accept + * plain [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) s wherever `Buffer`s are supported as well. + * + * While the `Buffer` class is available within the global scope, it is still + * recommended to explicitly reference it via an import or require statement. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Creates a zero-filled Buffer of length 10. + * const buf1 = Buffer.alloc(10); + * + * // Creates a Buffer of length 10, + * // filled with bytes which all have the value `1`. + * const buf2 = Buffer.alloc(10, 1); + * + * // Creates an uninitialized buffer of length 10. + * // This is faster than calling Buffer.alloc() but the returned + * // Buffer instance might contain old data that needs to be + * // overwritten using fill(), write(), or other functions that fill the Buffer's + * // contents. + * const buf3 = Buffer.allocUnsafe(10); + * + * // Creates a Buffer containing the bytes [1, 2, 3]. + * const buf4 = Buffer.from([1, 2, 3]); + * + * // Creates a Buffer containing the bytes [1, 1, 1, 1] – the entries + * // are all truncated using `(value & 255)` to fit into the range 0–255. + * const buf5 = Buffer.from([257, 257.5, -255, '1']); + * + * // Creates a Buffer containing the UTF-8-encoded bytes for the string 'tést': + * // [0x74, 0xc3, 0xa9, 0x73, 0x74] (in hexadecimal notation) + * // [116, 195, 169, 115, 116] (in decimal notation) + * const buf6 = Buffer.from('tést'); + * + * // Creates a Buffer containing the Latin-1 bytes [0x74, 0xe9, 0x73, 0x74]. + * const buf7 = Buffer.from('tést', 'latin1'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/buffer.js) + */ +declare module "buffer" { + import { BinaryLike } from "node:crypto"; + import { ReadableStream as WebReadableStream } from "node:stream/web"; + /** + * This function returns `true` if `input` contains only valid UTF-8-encoded data, + * including the case in which `input` is empty. + * + * Throws if the `input` is a detached array buffer. + * @since v19.4.0, v18.14.0 + * @param input The input to validate. + */ + export function isUtf8(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean; + /** + * This function returns `true` if `input` contains only valid ASCII-encoded data, + * including the case in which `input` is empty. + * + * Throws if the `input` is a detached array buffer. + * @since v19.6.0, v18.15.0 + * @param input The input to validate. + */ + export function isAscii(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean; + export const INSPECT_MAX_BYTES: number; + export const kMaxLength: number; + export const kStringMaxLength: number; + export const constants: { + MAX_LENGTH: number; + MAX_STRING_LENGTH: number; + }; + export type TranscodeEncoding = + | "ascii" + | "utf8" + | "utf-8" + | "utf16le" + | "utf-16le" + | "ucs2" + | "ucs-2" + | "latin1" + | "binary"; + /** + * Re-encodes the given `Buffer` or `Uint8Array` instance from one character + * encoding to another. Returns a new `Buffer` instance. + * + * Throws if the `fromEnc` or `toEnc` specify invalid character encodings or if + * conversion from `fromEnc` to `toEnc` is not permitted. + * + * Encodings supported by `buffer.transcode()` are: `'ascii'`, `'utf8'`, `'utf16le'`, `'ucs2'`, `'latin1'`, and `'binary'`. + * + * The transcoding process will use substitution characters if a given byte + * sequence cannot be adequately represented in the target encoding. For instance: + * + * ```js + * import { Buffer, transcode } from 'node:buffer'; + * + * const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii'); + * console.log(newBuf.toString('ascii')); + * // Prints: '?' + * ``` + * + * Because the Euro (`€`) sign is not representable in US-ASCII, it is replaced + * with `?` in the transcoded `Buffer`. + * @since v7.1.0 + * @param source A `Buffer` or `Uint8Array` instance. + * @param fromEnc The current encoding. + * @param toEnc To target encoding. + */ + export function transcode(source: Uint8Array, fromEnc: TranscodeEncoding, toEnc: TranscodeEncoding): Buffer; + export const SlowBuffer: { + /** @deprecated since v6.0.0, use `Buffer.allocUnsafeSlow()` */ + new(size: number): Buffer; + prototype: Buffer; + }; + /** + * Resolves a `'blob:nodedata:...'` an associated `Blob` object registered using + * a prior call to `URL.createObjectURL()`. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + export function resolveObjectURL(id: string): Blob | undefined; + export { Buffer }; + /** + * @experimental + */ + export interface BlobOptions { + /** + * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts + * will be converted to the platform native line-ending as specified by `import { EOL } from 'node:os'`. + */ + endings?: "transparent" | "native"; + /** + * The Blob content-type. The intent is for `type` to convey + * the MIME media type of the data, however no validation of the type format + * is performed. + */ + type?: string | undefined; + } + /** + * A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) encapsulates immutable, raw data that can be safely shared across + * multiple worker threads. + * @since v15.7.0, v14.18.0 + */ + export class Blob { + /** + * The total size of the `Blob` in bytes. + * @since v15.7.0, v14.18.0 + */ + readonly size: number; + /** + * The content-type of the `Blob`. + * @since v15.7.0, v14.18.0 + */ + readonly type: string; + /** + * Creates a new `Blob` object containing a concatenation of the given sources. + * + * {ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into + * the 'Blob' and can therefore be safely modified after the 'Blob' is created. + * + * String sources are also copied into the `Blob`. + */ + constructor(sources: Array, options?: BlobOptions); + /** + * Returns a promise that fulfills with an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) containing a copy of + * the `Blob` data. + * @since v15.7.0, v14.18.0 + */ + arrayBuffer(): Promise; + /** + * The `blob.bytes()` method returns the byte of the `Blob` object as a `Promise`. + * + * ```js + * const blob = new Blob(['hello']); + * blob.bytes().then((bytes) => { + * console.log(bytes); // Outputs: Uint8Array(5) [ 104, 101, 108, 108, 111 ] + * }); + * ``` + */ + bytes(): Promise; + /** + * Creates and returns a new `Blob` containing a subset of this `Blob` objects + * data. The original `Blob` is not altered. + * @since v15.7.0, v14.18.0 + * @param start The starting index. + * @param end The ending index. + * @param type The content-type for the new `Blob` + */ + slice(start?: number, end?: number, type?: string): Blob; + /** + * Returns a promise that fulfills with the contents of the `Blob` decoded as a + * UTF-8 string. + * @since v15.7.0, v14.18.0 + */ + text(): Promise; + /** + * Returns a new `ReadableStream` that allows the content of the `Blob` to be read. + * @since v16.7.0 + */ + stream(): WebReadableStream; + } + export interface FileOptions { + /** + * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts will be + * converted to the platform native line-ending as specified by `import { EOL } from 'node:os'`. + */ + endings?: "native" | "transparent"; + /** The File content-type. */ + type?: string; + /** The last modified date of the file. `Default`: Date.now(). */ + lastModified?: number; + } + /** + * A [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) provides information about files. + * @since v19.2.0, v18.13.0 + */ + export class File extends Blob { + constructor(sources: Array, fileName: string, options?: FileOptions); + /** + * The name of the `File`. + * @since v19.2.0, v18.13.0 + */ + readonly name: string; + /** + * The last modified date of the `File`. + * @since v19.2.0, v18.13.0 + */ + readonly lastModified: number; + } + export import atob = globalThis.atob; + export import btoa = globalThis.btoa; + + global { + namespace NodeJS { + export { BufferEncoding }; + } + // Buffer class + type BufferEncoding = + | "ascii" + | "utf8" + | "utf-8" + | "utf16le" + | "utf-16le" + | "ucs2" + | "ucs-2" + | "base64" + | "base64url" + | "latin1" + | "binary" + | "hex"; + type WithImplicitCoercion = + | T + | { + valueOf(): T; + }; + /** + * Raw data is stored in instances of the Buffer class. + * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized. + * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'base64url'|'binary'(deprecated)|'hex' + */ + interface BufferConstructor { + // see buffer.buffer.d.ts for implementation specific to TypeScript 5.7 and later + // see ts5.6/buffer.buffer.d.ts for implementation specific to TypeScript 5.6 and earlier + + /** + * Returns `true` if `obj` is a `Buffer`, `false` otherwise. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * Buffer.isBuffer(Buffer.alloc(10)); // true + * Buffer.isBuffer(Buffer.from('foo')); // true + * Buffer.isBuffer('a string'); // false + * Buffer.isBuffer([]); // false + * Buffer.isBuffer(new Uint8Array(1024)); // false + * ``` + * @since v0.1.101 + */ + isBuffer(obj: any): obj is Buffer; + /** + * Returns `true` if `encoding` is the name of a supported character encoding, + * or `false` otherwise. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * console.log(Buffer.isEncoding('utf8')); + * // Prints: true + * + * console.log(Buffer.isEncoding('hex')); + * // Prints: true + * + * console.log(Buffer.isEncoding('utf/8')); + * // Prints: false + * + * console.log(Buffer.isEncoding('')); + * // Prints: false + * ``` + * @since v0.9.1 + * @param encoding A character encoding name to check. + */ + isEncoding(encoding: string): encoding is BufferEncoding; + /** + * Returns the byte length of a string when encoded using `encoding`. + * This is not the same as [`String.prototype.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), which does not account + * for the encoding that is used to convert the string into bytes. + * + * For `'base64'`, `'base64url'`, and `'hex'`, this function assumes valid input. + * For strings that contain non-base64/hex-encoded data (e.g. whitespace), the + * return value might be greater than the length of a `Buffer` created from the + * string. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const str = '\u00bd + \u00bc = \u00be'; + * + * console.log(`${str}: ${str.length} characters, ` + + * `${Buffer.byteLength(str, 'utf8')} bytes`); + * // Prints: ½ + ¼ = ¾: 9 characters, 12 bytes + * ``` + * + * When `string` is a + * `Buffer`/[`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView)/[`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/- + * Reference/Global_Objects/TypedArray)/[`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer)/[`SharedArrayBuffer`](https://develop- + * er.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer), the byte length as reported by `.byteLength`is returned. + * @since v0.1.90 + * @param string A value to calculate the length of. + * @param [encoding='utf8'] If `string` is a string, this is its encoding. + * @return The number of bytes contained within `string`. + */ + byteLength( + string: string | Buffer | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer, + encoding?: BufferEncoding, + ): number; + /** + * Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of `Buffer` instances. This is equivalent to calling `buf1.compare(buf2)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('1234'); + * const buf2 = Buffer.from('0123'); + * const arr = [buf1, buf2]; + * + * console.log(arr.sort(Buffer.compare)); + * // Prints: [ , ] + * // (This result is equal to: [buf2, buf1].) + * ``` + * @since v0.11.13 + * @return Either `-1`, `0`, or `1`, depending on the result of the comparison. See `compare` for details. + */ + compare(buf1: Uint8Array, buf2: Uint8Array): -1 | 0 | 1; + /** + * This is the size (in bytes) of pre-allocated internal `Buffer` instances used + * for pooling. This value may be modified. + * @since v0.11.3 + */ + poolSize: number; + } + interface Buffer { + // see buffer.buffer.d.ts for implementation specific to TypeScript 5.7 and later + // see ts5.6/buffer.buffer.d.ts for implementation specific to TypeScript 5.6 and earlier + + /** + * Writes `string` to `buf` at `offset` according to the character encoding in`encoding`. The `length` parameter is the number of bytes to write. If `buf` did + * not contain enough space to fit the entire string, only part of `string` will be + * written. However, partially encoded characters will not be written. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(256); + * + * const len = buf.write('\u00bd + \u00bc = \u00be', 0); + * + * console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`); + * // Prints: 12 bytes: ½ + ¼ = ¾ + * + * const buffer = Buffer.alloc(10); + * + * const length = buffer.write('abcd', 8); + * + * console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`); + * // Prints: 2 bytes : ab + * ``` + * @since v0.1.90 + * @param string String to write to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write `string`. + * @param [length=buf.length - offset] Maximum number of bytes to write (written bytes will not exceed `buf.length - offset`). + * @param [encoding='utf8'] The character encoding of `string`. + * @return Number of bytes written. + */ + write(string: string, encoding?: BufferEncoding): number; + write(string: string, offset: number, encoding?: BufferEncoding): number; + write(string: string, offset: number, length: number, encoding?: BufferEncoding): number; + /** + * Decodes `buf` to a string according to the specified character encoding in`encoding`. `start` and `end` may be passed to decode only a subset of `buf`. + * + * If `encoding` is `'utf8'` and a byte sequence in the input is not valid UTF-8, + * then each invalid byte is replaced with the replacement character `U+FFFD`. + * + * The maximum length of a string instance (in UTF-16 code units) is available + * as {@link constants.MAX_STRING_LENGTH}. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * console.log(buf1.toString('utf8')); + * // Prints: abcdefghijklmnopqrstuvwxyz + * console.log(buf1.toString('utf8', 0, 5)); + * // Prints: abcde + * + * const buf2 = Buffer.from('tést'); + * + * console.log(buf2.toString('hex')); + * // Prints: 74c3a97374 + * console.log(buf2.toString('utf8', 0, 3)); + * // Prints: té + * console.log(buf2.toString(undefined, 0, 3)); + * // Prints: té + * ``` + * @since v0.1.90 + * @param [encoding='utf8'] The character encoding to use. + * @param [start=0] The byte offset to start decoding at. + * @param [end=buf.length] The byte offset to stop decoding at (not inclusive). + */ + toString(encoding?: BufferEncoding, start?: number, end?: number): string; + /** + * Returns a JSON representation of `buf`. [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) implicitly calls + * this function when stringifying a `Buffer` instance. + * + * `Buffer.from()` accepts objects in the format returned from this method. + * In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]); + * const json = JSON.stringify(buf); + * + * console.log(json); + * // Prints: {"type":"Buffer","data":[1,2,3,4,5]} + * + * const copy = JSON.parse(json, (key, value) => { + * return value && value.type === 'Buffer' ? + * Buffer.from(value) : + * value; + * }); + * + * console.log(copy); + * // Prints: + * ``` + * @since v0.9.2 + */ + toJSON(): { + type: "Buffer"; + data: number[]; + }; + /** + * Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes,`false` otherwise. Equivalent to `buf.compare(otherBuffer) === 0`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('414243', 'hex'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.equals(buf2)); + * // Prints: true + * console.log(buf1.equals(buf3)); + * // Prints: false + * ``` + * @since v0.11.13 + * @param otherBuffer A `Buffer` or {@link Uint8Array} with which to compare `buf`. + */ + equals(otherBuffer: Uint8Array): boolean; + /** + * Compares `buf` with `target` and returns a number indicating whether `buf`comes before, after, or is the same as `target` in sort order. + * Comparison is based on the actual sequence of bytes in each `Buffer`. + * + * * `0` is returned if `target` is the same as `buf` + * * `1` is returned if `target` should come _before_`buf` when sorted. + * * `-1` is returned if `target` should come _after_`buf` when sorted. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('BCD'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.compare(buf1)); + * // Prints: 0 + * console.log(buf1.compare(buf2)); + * // Prints: -1 + * console.log(buf1.compare(buf3)); + * // Prints: -1 + * console.log(buf2.compare(buf1)); + * // Prints: 1 + * console.log(buf2.compare(buf3)); + * // Prints: 1 + * console.log([buf1, buf2, buf3].sort(Buffer.compare)); + * // Prints: [ , , ] + * // (This result is equal to: [buf1, buf3, buf2].) + * ``` + * + * The optional `targetStart`, `targetEnd`, `sourceStart`, and `sourceEnd` arguments can be used to limit the comparison to specific ranges within `target` and `buf` respectively. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]); + * const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]); + * + * console.log(buf1.compare(buf2, 5, 9, 0, 4)); + * // Prints: 0 + * console.log(buf1.compare(buf2, 0, 6, 4)); + * // Prints: -1 + * console.log(buf1.compare(buf2, 5, 6, 5)); + * // Prints: 1 + * ``` + * + * `ERR_OUT_OF_RANGE` is thrown if `targetStart < 0`, `sourceStart < 0`, `targetEnd > target.byteLength`, or `sourceEnd > source.byteLength`. + * @since v0.11.13 + * @param target A `Buffer` or {@link Uint8Array} with which to compare `buf`. + * @param [targetStart=0] The offset within `target` at which to begin comparison. + * @param [targetEnd=target.length] The offset within `target` at which to end comparison (not inclusive). + * @param [sourceStart=0] The offset within `buf` at which to begin comparison. + * @param [sourceEnd=buf.length] The offset within `buf` at which to end comparison (not inclusive). + */ + compare( + target: Uint8Array, + targetStart?: number, + targetEnd?: number, + sourceStart?: number, + sourceEnd?: number, + ): -1 | 0 | 1; + /** + * Copies data from a region of `buf` to a region in `target`, even if the `target`memory region overlaps with `buf`. + * + * [`TypedArray.prototype.set()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/set) performs the same operation, and is available + * for all TypedArrays, including Node.js `Buffer`s, although it takes + * different function arguments. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create two `Buffer` instances. + * const buf1 = Buffer.allocUnsafe(26); + * const buf2 = Buffer.allocUnsafe(26).fill('!'); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * // Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`. + * buf1.copy(buf2, 8, 16, 20); + * // This is equivalent to: + * // buf2.set(buf1.subarray(16, 20), 8); + * + * console.log(buf2.toString('ascii', 0, 25)); + * // Prints: !!!!!!!!qrst!!!!!!!!!!!!! + * ``` + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a `Buffer` and copy data from one region to an overlapping region + * // within the same `Buffer`. + * + * const buf = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf[i] = i + 97; + * } + * + * buf.copy(buf, 0, 4, 10); + * + * console.log(buf.toString()); + * // Prints: efghijghijklmnopqrstuvwxyz + * ``` + * @since v0.1.90 + * @param target A `Buffer` or {@link Uint8Array} to copy into. + * @param [targetStart=0] The offset within `target` at which to begin writing. + * @param [sourceStart=0] The offset within `buf` from which to begin copying. + * @param [sourceEnd=buf.length] The offset within `buf` at which to stop copying (not inclusive). + * @return The number of bytes copied. + */ + copy(target: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64BE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64LE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64LE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * This function is also available under the `writeBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64BE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64BE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64LE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * + * This function is also available under the `writeBigUint64LE` alias. + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64LE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64LE(value: bigint, offset?: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintLE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntLE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntLE + * @since v14.9.0, v12.19.0 + */ + writeUintLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintBE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntBE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntBE + * @since v14.9.0, v12.19.0 + */ + writeUintBE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than a signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined when`value` is anything other than a + * signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntBE(value: number, offset: number, byteLength: number): number; + /** + * Reads an unsigned, big-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64BE(0)); + * // Prints: 4294967295n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64BE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + readBigUint64BE(offset?: number): bigint; + /** + * Reads an unsigned, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64LE(0)); + * // Prints: 18446744069414584320n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64LE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + readBigUint64LE(offset?: number): bigint; + /** + * Reads a signed, big-endian 64-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64BE(offset?: number): bigint; + /** + * Reads a signed, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64LE(offset?: number): bigint; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an unsigned, little-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintLE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntLE(0, 6).toString(16)); + * // Prints: ab9078563412 + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntLE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntLE + * @since v14.9.0, v12.19.0 + */ + readUintLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an unsigned big-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintBE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readUIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntBE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntBE + * @since v14.9.0, v12.19.0 + */ + readUintBE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a little-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntLE(0, 6).toString(16)); + * // Prints: -546f87a9cbee + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a big-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * console.log(buf.readIntBE(1, 0).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntBE(offset: number, byteLength: number): number; + /** + * Reads an unsigned 8-bit integer from `buf` at the specified `offset`. + * + * This function is also available under the `readUint8` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, -2]); + * + * console.log(buf.readUInt8(0)); + * // Prints: 1 + * console.log(buf.readUInt8(1)); + * // Prints: 254 + * console.log(buf.readUInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readUInt8(offset?: number): number; + /** + * @alias Buffer.readUInt8 + * @since v14.9.0, v12.19.0 + */ + readUint8(offset?: number): number; + /** + * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`. + * + * This function is also available under the `readUint16LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16LE(0).toString(16)); + * // Prints: 3412 + * console.log(buf.readUInt16LE(1).toString(16)); + * // Prints: 5634 + * console.log(buf.readUInt16LE(2).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16LE(offset?: number): number; + /** + * @alias Buffer.readUInt16LE + * @since v14.9.0, v12.19.0 + */ + readUint16LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16BE(0).toString(16)); + * // Prints: 1234 + * console.log(buf.readUInt16BE(1).toString(16)); + * // Prints: 3456 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16BE(offset?: number): number; + /** + * @alias Buffer.readUInt16BE + * @since v14.9.0, v12.19.0 + */ + readUint16BE(offset?: number): number; + /** + * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32LE(0).toString(16)); + * // Prints: 78563412 + * console.log(buf.readUInt32LE(1).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32LE(offset?: number): number; + /** + * @alias Buffer.readUInt32LE + * @since v14.9.0, v12.19.0 + */ + readUint32LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32BE(0).toString(16)); + * // Prints: 12345678 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32BE(offset?: number): number; + /** + * @alias Buffer.readUInt32BE + * @since v14.9.0, v12.19.0 + */ + readUint32BE(offset?: number): number; + /** + * Reads a signed 8-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([-1, 5]); + * + * console.log(buf.readInt8(0)); + * // Prints: -1 + * console.log(buf.readInt8(1)); + * // Prints: 5 + * console.log(buf.readInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readInt8(offset?: number): number; + /** + * Reads a signed, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16LE(0)); + * // Prints: 1280 + * console.log(buf.readInt16LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16LE(offset?: number): number; + /** + * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16BE(offset?: number): number; + /** + * Reads a signed, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32LE(0)); + * // Prints: 83886080 + * console.log(buf.readInt32LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32LE(offset?: number): number; + /** + * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32BE(offset?: number): number; + /** + * Reads a 32-bit, little-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatLE(0)); + * // Prints: 1.539989614439558e-36 + * console.log(buf.readFloatLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatLE(offset?: number): number; + /** + * Reads a 32-bit, big-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatBE(0)); + * // Prints: 2.387939260590663e-38 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatBE(offset?: number): number; + /** + * Reads a 64-bit, little-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleLE(0)); + * // Prints: 5.447603722011605e-270 + * console.log(buf.readDoubleLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleLE(offset?: number): number; + /** + * Reads a 64-bit, big-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleBE(0)); + * // Prints: 8.20788039913184e-304 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleBE(offset?: number): number; + reverse(): this; + /** + * Interprets `buf` as an array of unsigned 16-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 2. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap16(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap16(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * + * One convenient use of `buf.swap16()` is to perform a fast in-place conversion + * between UTF-16 little-endian and UTF-16 big-endian: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('This is little-endian UTF-16', 'utf16le'); + * buf.swap16(); // Convert to big-endian UTF-16 text. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap16(): this; + /** + * Interprets `buf` as an array of unsigned 32-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 4. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap32(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap32(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap32(): this; + /** + * Interprets `buf` as an array of 64-bit numbers and swaps byte order _in-place_. + * Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 8. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap64(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap64(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v6.3.0 + * @return A reference to `buf`. + */ + swap64(): this; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a + * valid unsigned 8-bit integer. Behavior is undefined when `value` is anything + * other than an unsigned 8-bit integer. + * + * This function is also available under the `writeUint8` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt8(0x3, 0); + * buf.writeUInt8(0x4, 1); + * buf.writeUInt8(0x23, 2); + * buf.writeUInt8(0x42, 3); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeUInt8(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt8 + * @since v14.9.0, v12.19.0 + */ + writeUint8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 16-bit integer. + * + * This function is also available under the `writeUint16LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16LE(0xdead, 0); + * buf.writeUInt16LE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16LE + * @since v14.9.0, v12.19.0 + */ + writeUint16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 16-bit integer. + * + * This function is also available under the `writeUint16BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16BE(0xdead, 0); + * buf.writeUInt16BE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16BE + * @since v14.9.0, v12.19.0 + */ + writeUint16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 32-bit integer. + * + * This function is also available under the `writeUint32LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32LE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32LE + * @since v14.9.0, v12.19.0 + */ + writeUint32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 32-bit integer. + * + * This function is also available under the `writeUint32BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32BE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32BE + * @since v14.9.0, v12.19.0 + */ + writeUint32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a valid + * signed 8-bit integer. Behavior is undefined when `value` is anything other than + * a signed 8-bit integer. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt8(2, 0); + * buf.writeInt8(-2, 1); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeInt8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16LE(0x0304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16BE(0x0102, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32LE(0x05060708, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32BE(0x01020304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatLE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatBE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatBE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleLE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleBE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleBE(value: number, offset?: number): number; + /** + * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, + * the entire `buf` will be filled: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Fill a `Buffer` with the ASCII character 'h'. + * + * const b = Buffer.allocUnsafe(50).fill('h'); + * + * console.log(b.toString()); + * // Prints: hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh + * + * // Fill a buffer with empty string + * const c = Buffer.allocUnsafe(5).fill(''); + * + * console.log(c.fill('')); + * // Prints: + * ``` + * + * `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or + * integer. If the resulting integer is greater than `255` (decimal), `buf` will be + * filled with `value & 255`. + * + * If the final write of a `fill()` operation falls on a multi-byte character, + * then only the bytes of that character that fit into `buf` are written: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Fill a `Buffer` with character that takes up two bytes in UTF-8. + * + * console.log(Buffer.allocUnsafe(5).fill('\u0222')); + * // Prints: + * ``` + * + * If `value` contains invalid characters, it is truncated; if no valid + * fill data remains, an exception is thrown: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(5); + * + * console.log(buf.fill('a')); + * // Prints: + * console.log(buf.fill('aazz', 'hex')); + * // Prints: + * console.log(buf.fill('zz', 'hex')); + * // Throws an exception. + * ``` + * @since v0.5.0 + * @param value The value with which to fill `buf`. Empty value (string, Uint8Array, Buffer) is coerced to `0`. + * @param [offset=0] Number of bytes to skip before starting to fill `buf`. + * @param [end=buf.length] Where to stop filling `buf` (not inclusive). + * @param [encoding='utf8'] The encoding for `value` if `value` is a string. + * @return A reference to `buf`. + */ + fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this; + /** + * If `value` is: + * + * * a string, `value` is interpreted according to the character encoding in `encoding`. + * * a `Buffer` or [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array), `value` will be used in its entirety. + * To compare a partial `Buffer`, use `buf.subarray`. + * * a number, `value` will be interpreted as an unsigned 8-bit integer + * value between `0` and `255`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.indexOf('this')); + * // Prints: 0 + * console.log(buf.indexOf('is')); + * // Prints: 2 + * console.log(buf.indexOf(Buffer.from('a buffer'))); + * // Prints: 8 + * console.log(buf.indexOf(97)); + * // Prints: 8 (97 is the decimal ASCII value for 'a') + * console.log(buf.indexOf(Buffer.from('a buffer example'))); + * // Prints: -1 + * console.log(buf.indexOf(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: 8 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.indexOf('\u03a3', 0, 'utf16le')); + * // Prints: 4 + * console.log(utf16Buffer.indexOf('\u03a3', -4, 'utf16le')); + * // Prints: 6 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a `TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. If the result + * of coercion is `NaN` or `0`, then the entire buffer will be searched. This + * behavior matches [`String.prototype.indexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/indexOf). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.indexOf(99.9)); + * console.log(b.indexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN or 0. + * // Prints: 1, searching the whole buffer. + * console.log(b.indexOf('b', undefined)); + * console.log(b.indexOf('b', {})); + * console.log(b.indexOf('b', null)); + * console.log(b.indexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer` and `byteOffset` is less + * than `buf.length`, `byteOffset` will be returned. If `value` is empty and`byteOffset` is at least `buf.length`, `buf.length` will be returned. + * @since v1.5.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the first occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Identical to `buf.indexOf()`, except the last occurrence of `value` is found + * rather than the first occurrence. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this buffer is a buffer'); + * + * console.log(buf.lastIndexOf('this')); + * // Prints: 0 + * console.log(buf.lastIndexOf('buffer')); + * // Prints: 17 + * console.log(buf.lastIndexOf(Buffer.from('buffer'))); + * // Prints: 17 + * console.log(buf.lastIndexOf(97)); + * // Prints: 15 (97 is the decimal ASCII value for 'a') + * console.log(buf.lastIndexOf(Buffer.from('yolo'))); + * // Prints: -1 + * console.log(buf.lastIndexOf('buffer', 5)); + * // Prints: 5 + * console.log(buf.lastIndexOf('buffer', 4)); + * // Prints: -1 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.lastIndexOf('\u03a3', undefined, 'utf16le')); + * // Prints: 6 + * console.log(utf16Buffer.lastIndexOf('\u03a3', -5, 'utf16le')); + * // Prints: 4 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a `TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. Any arguments + * that coerce to `NaN`, like `{}` or `undefined`, will search the whole buffer. + * This behavior matches [`String.prototype.lastIndexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/lastIndexOf). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.lastIndexOf(99.9)); + * console.log(b.lastIndexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN. + * // Prints: 1, searching the whole buffer. + * console.log(b.lastIndexOf('b', undefined)); + * console.log(b.lastIndexOf('b', {})); + * + * // Passing a byteOffset that coerces to 0. + * // Prints: -1, equivalent to passing 0. + * console.log(b.lastIndexOf('b', null)); + * console.log(b.lastIndexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer`, `byteOffset` will be returned. + * @since v6.0.0 + * @param value What to search for. + * @param [byteOffset=buf.length - 1] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the last occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Equivalent to `buf.indexOf() !== -1`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.includes('this')); + * // Prints: true + * console.log(buf.includes('is')); + * // Prints: true + * console.log(buf.includes(Buffer.from('a buffer'))); + * // Prints: true + * console.log(buf.includes(97)); + * // Prints: true (97 is the decimal ASCII value for 'a') + * console.log(buf.includes(Buffer.from('a buffer example'))); + * // Prints: false + * console.log(buf.includes(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: true + * console.log(buf.includes('this', 4)); + * // Prints: false + * ``` + * @since v5.3.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is its encoding. + * @return `true` if `value` was found in `buf`, `false` otherwise. + */ + includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean; + } + var Buffer: BufferConstructor; + /** + * Decodes a string of Base64-encoded data into bytes, and encodes those bytes + * into a string using Latin-1 (ISO-8859-1). + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and `buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @legacy Use `Buffer.from(data, 'base64')` instead. + * @param data The Base64-encoded input string. + */ + function atob(data: string): string; + /** + * Decodes a string into bytes using Latin-1 (ISO-8859), and encodes those bytes + * into a string using Base64. + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and `buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @legacy Use `buf.toString('base64')` instead. + * @param data An ASCII (Latin1) string. + */ + function btoa(data: string): string; + interface Blob extends _Blob {} + /** + * `Blob` class is a global reference for `import { Blob } from 'node:buffer'` + * https://nodejs.org/api/buffer.html#class-blob + * @since v18.0.0 + */ + var Blob: typeof globalThis extends { onmessage: any; Blob: infer T } ? T + : typeof import("buffer").Blob; + interface File extends _File {} + /** + * `File` class is a global reference for `import { File } from 'node:buffer'` + * https://nodejs.org/api/buffer.html#class-file + * @since v20.0.0 + */ + var File: typeof globalThis extends { onmessage: any; File: infer T } ? T + : typeof import("buffer").File; + } +} +declare module "node:buffer" { + export * from "buffer"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/child_process.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/child_process.d.ts new file mode 100644 index 0000000..09f181f --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/child_process.d.ts @@ -0,0 +1,1549 @@ +/** + * The `node:child_process` module provides the ability to spawn subprocesses in + * a manner that is similar, but not identical, to [`popen(3)`](http://man7.org/linux/man-pages/man3/popen.3.html). This capability + * is primarily provided by the {@link spawn} function: + * + * ```js + * import { spawn } from 'node:child_process'; + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * By default, pipes for `stdin`, `stdout`, and `stderr` are established between + * the parent Node.js process and the spawned subprocess. These pipes have + * limited (and platform-specific) capacity. If the subprocess writes to + * stdout in excess of that limit without the output being captured, the + * subprocess blocks waiting for the pipe buffer to accept more data. This is + * identical to the behavior of pipes in the shell. Use the `{ stdio: 'ignore' }` option if the output will not be consumed. + * + * The command lookup is performed using the `options.env.PATH` environment + * variable if `env` is in the `options` object. Otherwise, `process.env.PATH` is + * used. If `options.env` is set without `PATH`, lookup on Unix is performed + * on a default search path search of `/usr/bin:/bin` (see your operating system's + * manual for execvpe/execvp), on Windows the current processes environment + * variable `PATH` is used. + * + * On Windows, environment variables are case-insensitive. Node.js + * lexicographically sorts the `env` keys and uses the first one that + * case-insensitively matches. Only first (in lexicographic order) entry will be + * passed to the subprocess. This might lead to issues on Windows when passing + * objects to the `env` option that have multiple variants of the same key, such as `PATH` and `Path`. + * + * The {@link spawn} method spawns the child process asynchronously, + * without blocking the Node.js event loop. The {@link spawnSync} function provides equivalent functionality in a synchronous manner that blocks + * the event loop until the spawned process either exits or is terminated. + * + * For convenience, the `node:child_process` module provides a handful of + * synchronous and asynchronous alternatives to {@link spawn} and {@link spawnSync}. Each of these alternatives are implemented on + * top of {@link spawn} or {@link spawnSync}. + * + * * {@link exec}: spawns a shell and runs a command within that + * shell, passing the `stdout` and `stderr` to a callback function when + * complete. + * * {@link execFile}: similar to {@link exec} except + * that it spawns the command directly without first spawning a shell by + * default. + * * {@link fork}: spawns a new Node.js process and invokes a + * specified module with an IPC communication channel established that allows + * sending messages between parent and child. + * * {@link execSync}: a synchronous version of {@link exec} that will block the Node.js event loop. + * * {@link execFileSync}: a synchronous version of {@link execFile} that will block the Node.js event loop. + * + * For certain use cases, such as automating shell scripts, the `synchronous counterparts` may be more convenient. In many cases, however, + * the synchronous methods can have significant impact on performance due to + * stalling the event loop while spawned processes complete. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/child_process.js) + */ +declare module "child_process" { + import { ObjectEncodingOptions } from "node:fs"; + import { Abortable, EventEmitter } from "node:events"; + import * as dgram from "node:dgram"; + import * as net from "node:net"; + import { Pipe, Readable, Stream, Writable } from "node:stream"; + import { URL } from "node:url"; + type Serializable = string | object | number | boolean | bigint; + type SendHandle = net.Socket | net.Server | dgram.Socket | undefined; + /** + * Instances of the `ChildProcess` represent spawned child processes. + * + * Instances of `ChildProcess` are not intended to be created directly. Rather, + * use the {@link spawn}, {@link exec},{@link execFile}, or {@link fork} methods to create + * instances of `ChildProcess`. + * @since v2.2.0 + */ + class ChildProcess extends EventEmitter { + /** + * A `Writable Stream` that represents the child process's `stdin`. + * + * If a child process waits to read all of its input, the child will not continue + * until this stream has been closed via `end()`. + * + * If the child was spawned with `stdio[0]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdin` is an alias for `subprocess.stdio[0]`. Both properties will + * refer to the same value. + * + * The `subprocess.stdin` property can be `null` or `undefined` if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stdin: Writable | null; + /** + * A `Readable Stream` that represents the child process's `stdout`. + * + * If the child was spawned with `stdio[1]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdout` is an alias for `subprocess.stdio[1]`. Both properties will + * refer to the same value. + * + * ```js + * import { spawn } from 'node:child_process'; + * + * const subprocess = spawn('ls'); + * + * subprocess.stdout.on('data', (data) => { + * console.log(`Received chunk ${data}`); + * }); + * ``` + * + * The `subprocess.stdout` property can be `null` or `undefined` if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stdout: Readable | null; + /** + * A `Readable Stream` that represents the child process's `stderr`. + * + * If the child was spawned with `stdio[2]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stderr` is an alias for `subprocess.stdio[2]`. Both properties will + * refer to the same value. + * + * The `subprocess.stderr` property can be `null` or `undefined` if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stderr: Readable | null; + /** + * The `subprocess.channel` property is a reference to the child's IPC channel. If + * no IPC channel exists, this property is `undefined`. + * @since v7.1.0 + */ + readonly channel?: Pipe | null | undefined; + /** + * A sparse array of pipes to the child process, corresponding with positions in + * the `stdio` option passed to {@link spawn} that have been set + * to the value `'pipe'`. `subprocess.stdio[0]`, `subprocess.stdio[1]`, and `subprocess.stdio[2]` are also available as `subprocess.stdin`, `subprocess.stdout`, and `subprocess.stderr`, + * respectively. + * + * In the following example, only the child's fd `1` (stdout) is configured as a + * pipe, so only the parent's `subprocess.stdio[1]` is a stream, all other values + * in the array are `null`. + * + * ```js + * import assert from 'node:assert'; + * import fs from 'node:fs'; + * import child_process from 'node:child_process'; + * + * const subprocess = child_process.spawn('ls', { + * stdio: [ + * 0, // Use parent's stdin for child. + * 'pipe', // Pipe child's stdout to parent. + * fs.openSync('err.out', 'w'), // Direct child's stderr to a file. + * ], + * }); + * + * assert.strictEqual(subprocess.stdio[0], null); + * assert.strictEqual(subprocess.stdio[0], subprocess.stdin); + * + * assert(subprocess.stdout); + * assert.strictEqual(subprocess.stdio[1], subprocess.stdout); + * + * assert.strictEqual(subprocess.stdio[2], null); + * assert.strictEqual(subprocess.stdio[2], subprocess.stderr); + * ``` + * + * The `subprocess.stdio` property can be `undefined` if the child process could + * not be successfully spawned. + * @since v0.7.10 + */ + readonly stdio: [ + Writable | null, + // stdin + Readable | null, + // stdout + Readable | null, + // stderr + Readable | Writable | null | undefined, + // extra + Readable | Writable | null | undefined, // extra + ]; + /** + * The `subprocess.killed` property indicates whether the child process + * successfully received a signal from `subprocess.kill()`. The `killed` property + * does not indicate that the child process has been terminated. + * @since v0.5.10 + */ + readonly killed: boolean; + /** + * Returns the process identifier (PID) of the child process. If the child process + * fails to spawn due to errors, then the value is `undefined` and `error` is + * emitted. + * + * ```js + * import { spawn } from 'node:child_process'; + * const grep = spawn('grep', ['ssh']); + * + * console.log(`Spawned child pid: ${grep.pid}`); + * grep.stdin.end(); + * ``` + * @since v0.1.90 + */ + readonly pid?: number | undefined; + /** + * The `subprocess.connected` property indicates whether it is still possible to + * send and receive messages from a child process. When `subprocess.connected` is `false`, it is no longer possible to send or receive messages. + * @since v0.7.2 + */ + readonly connected: boolean; + /** + * The `subprocess.exitCode` property indicates the exit code of the child process. + * If the child process is still running, the field will be `null`. + */ + readonly exitCode: number | null; + /** + * The `subprocess.signalCode` property indicates the signal received by + * the child process if any, else `null`. + */ + readonly signalCode: NodeJS.Signals | null; + /** + * The `subprocess.spawnargs` property represents the full list of command-line + * arguments the child process was launched with. + */ + readonly spawnargs: string[]; + /** + * The `subprocess.spawnfile` property indicates the executable file name of + * the child process that is launched. + * + * For {@link fork}, its value will be equal to `process.execPath`. + * For {@link spawn}, its value will be the name of + * the executable file. + * For {@link exec}, its value will be the name of the shell + * in which the child process is launched. + */ + readonly spawnfile: string; + /** + * The `subprocess.kill()` method sends a signal to the child process. If no + * argument is given, the process will be sent the `'SIGTERM'` signal. See [`signal(7)`](http://man7.org/linux/man-pages/man7/signal.7.html) for a list of available signals. This function + * returns `true` if [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) succeeds, and `false` otherwise. + * + * ```js + * import { spawn } from 'node:child_process'; + * const grep = spawn('grep', ['ssh']); + * + * grep.on('close', (code, signal) => { + * console.log( + * `child process terminated due to receipt of signal ${signal}`); + * }); + * + * // Send SIGHUP to process. + * grep.kill('SIGHUP'); + * ``` + * + * The `ChildProcess` object may emit an `'error'` event if the signal + * cannot be delivered. Sending a signal to a child process that has already exited + * is not an error but may have unforeseen consequences. Specifically, if the + * process identifier (PID) has been reassigned to another process, the signal will + * be delivered to that process instead which can have unexpected results. + * + * While the function is called `kill`, the signal delivered to the child process + * may not actually terminate the process. + * + * See [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for reference. + * + * On Windows, where POSIX signals do not exist, the `signal` argument will be + * ignored, and the process will be killed forcefully and abruptly (similar to `'SIGKILL'`). + * See `Signal Events` for more details. + * + * On Linux, child processes of child processes will not be terminated + * when attempting to kill their parent. This is likely to happen when running a + * new process in a shell or with the use of the `shell` option of `ChildProcess`: + * + * ```js + * 'use strict'; + * import { spawn } from 'node:child_process'; + * + * const subprocess = spawn( + * 'sh', + * [ + * '-c', + * `node -e "setInterval(() => { + * console.log(process.pid, 'is alive') + * }, 500);"`, + * ], { + * stdio: ['inherit', 'inherit', 'inherit'], + * }, + * ); + * + * setTimeout(() => { + * subprocess.kill(); // Does not terminate the Node.js process in the shell. + * }, 2000); + * ``` + * @since v0.1.90 + */ + kill(signal?: NodeJS.Signals | number): boolean; + /** + * Calls {@link ChildProcess.kill} with `'SIGTERM'`. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + /** + * When an IPC channel has been established between the parent and child ( + * i.e. when using {@link fork}), the `subprocess.send()` method can + * be used to send messages to the child process. When the child process is a + * Node.js instance, these messages can be received via the `'message'` event. + * + * The message goes through serialization and parsing. The resulting + * message might not be the same as what is originally sent. + * + * For example, in the parent script: + * + * ```js + * import cp from 'node:child_process'; + * const n = cp.fork(`${__dirname}/sub.js`); + * + * n.on('message', (m) => { + * console.log('PARENT got message:', m); + * }); + * + * // Causes the child to print: CHILD got message: { hello: 'world' } + * n.send({ hello: 'world' }); + * ``` + * + * And then the child script, `'sub.js'` might look like this: + * + * ```js + * process.on('message', (m) => { + * console.log('CHILD got message:', m); + * }); + * + * // Causes the parent to print: PARENT got message: { foo: 'bar', baz: null } + * process.send({ foo: 'bar', baz: NaN }); + * ``` + * + * Child Node.js processes will have a `process.send()` method of their own + * that allows the child to send messages back to the parent. + * + * There is a special case when sending a `{cmd: 'NODE_foo'}` message. Messages + * containing a `NODE_` prefix in the `cmd` property are reserved for use within + * Node.js core and will not be emitted in the child's `'message'` event. Rather, such messages are emitted using the `'internalMessage'` event and are consumed internally by Node.js. + * Applications should avoid using such messages or listening for `'internalMessage'` events as it is subject to change without notice. + * + * The optional `sendHandle` argument that may be passed to `subprocess.send()` is + * for passing a TCP server or socket object to the child process. The child will + * receive the object as the second argument passed to the callback function + * registered on the `'message'` event. Any data that is received and buffered in + * the socket will not be sent to the child. Sending IPC sockets is not supported on Windows. + * + * The optional `callback` is a function that is invoked after the message is + * sent but before the child may have received it. The function is called with a + * single argument: `null` on success, or an `Error` object on failure. + * + * If no `callback` function is provided and the message cannot be sent, an `'error'` event will be emitted by the `ChildProcess` object. This can + * happen, for instance, when the child process has already exited. + * + * `subprocess.send()` will return `false` if the channel has closed or when the + * backlog of unsent messages exceeds a threshold that makes it unwise to send + * more. Otherwise, the method returns `true`. The `callback` function can be + * used to implement flow control. + * + * #### Example: sending a server object + * + * The `sendHandle` argument can be used, for instance, to pass the handle of + * a TCP server object to the child process as illustrated in the example below: + * + * ```js + * import { createServer } from 'node:net'; + * import { fork } from 'node:child_process'; + * const subprocess = fork('subprocess.js'); + * + * // Open up the server object and send the handle. + * const server = createServer(); + * server.on('connection', (socket) => { + * socket.end('handled by parent'); + * }); + * server.listen(1337, () => { + * subprocess.send('server', server); + * }); + * ``` + * + * The child would then receive the server object as: + * + * ```js + * process.on('message', (m, server) => { + * if (m === 'server') { + * server.on('connection', (socket) => { + * socket.end('handled by child'); + * }); + * } + * }); + * ``` + * + * Once the server is now shared between the parent and child, some connections + * can be handled by the parent and some by the child. + * + * While the example above uses a server created using the `node:net` module, `node:dgram` module servers use exactly the same workflow with the exceptions of + * listening on a `'message'` event instead of `'connection'` and using `server.bind()` instead of `server.listen()`. This is, however, only + * supported on Unix platforms. + * + * #### Example: sending a socket object + * + * Similarly, the `sendHandler` argument can be used to pass the handle of a + * socket to the child process. The example below spawns two children that each + * handle connections with "normal" or "special" priority: + * + * ```js + * import { createServer } from 'node:net'; + * import { fork } from 'node:child_process'; + * const normal = fork('subprocess.js', ['normal']); + * const special = fork('subprocess.js', ['special']); + * + * // Open up the server and send sockets to child. Use pauseOnConnect to prevent + * // the sockets from being read before they are sent to the child process. + * const server = createServer({ pauseOnConnect: true }); + * server.on('connection', (socket) => { + * + * // If this is special priority... + * if (socket.remoteAddress === '74.125.127.100') { + * special.send('socket', socket); + * return; + * } + * // This is normal priority. + * normal.send('socket', socket); + * }); + * server.listen(1337); + * ``` + * + * The `subprocess.js` would receive the socket handle as the second argument + * passed to the event callback function: + * + * ```js + * process.on('message', (m, socket) => { + * if (m === 'socket') { + * if (socket) { + * // Check that the client socket exists. + * // It is possible for the socket to be closed between the time it is + * // sent and the time it is received in the child process. + * socket.end(`Request handled with ${process.argv[2]} priority`); + * } + * } + * }); + * ``` + * + * Do not use `.maxConnections` on a socket that has been passed to a subprocess. + * The parent cannot track when the socket is destroyed. + * + * Any `'message'` handlers in the subprocess should verify that `socket` exists, + * as the connection may have been closed during the time it takes to send the + * connection to the child. + * @since v0.5.9 + * @param sendHandle `undefined`, or a [`net.Socket`](https://nodejs.org/docs/latest-v22.x/api/net.html#class-netsocket), [`net.Server`](https://nodejs.org/docs/latest-v22.x/api/net.html#class-netserver), or [`dgram.Socket`](https://nodejs.org/docs/latest-v22.x/api/dgram.html#class-dgramsocket) object. + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: Serializable, callback?: (error: Error | null) => void): boolean; + send(message: Serializable, sendHandle?: SendHandle, callback?: (error: Error | null) => void): boolean; + send( + message: Serializable, + sendHandle?: SendHandle, + options?: MessageOptions, + callback?: (error: Error | null) => void, + ): boolean; + /** + * Closes the IPC channel between parent and child, allowing the child to exit + * gracefully once there are no other connections keeping it alive. After calling + * this method the `subprocess.connected` and `process.connected` properties in + * both the parent and child (respectively) will be set to `false`, and it will be + * no longer possible to pass messages between the processes. + * + * The `'disconnect'` event will be emitted when there are no messages in the + * process of being received. This will most often be triggered immediately after + * calling `subprocess.disconnect()`. + * + * When the child process is a Node.js instance (e.g. spawned using {@link fork}), the `process.disconnect()` method can be invoked + * within the child process to close the IPC channel as well. + * @since v0.7.2 + */ + disconnect(): void; + /** + * By default, the parent will wait for the detached child to exit. To prevent the + * parent from waiting for a given `subprocess` to exit, use the `subprocess.unref()` method. Doing so will cause the parent's event loop to not + * include the child in its reference count, allowing the parent to exit + * independently of the child, unless there is an established IPC channel between + * the child and the parent. + * + * ```js + * import { spawn } from 'node:child_process'; + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore', + * }); + * + * subprocess.unref(); + * ``` + * @since v0.7.10 + */ + unref(): void; + /** + * Calling `subprocess.ref()` after making a call to `subprocess.unref()` will + * restore the removed reference count for the child process, forcing the parent + * to wait for the child to exit before exiting itself. + * + * ```js + * import { spawn } from 'node:child_process'; + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore', + * }); + * + * subprocess.unref(); + * subprocess.ref(); + * ``` + * @since v0.7.10 + */ + ref(): void; + /** + * events.EventEmitter + * 1. close + * 2. disconnect + * 3. error + * 4. exit + * 5. message + * 6. spawn + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: "disconnect", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + addListener(event: "spawn", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close", code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: "disconnect"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "exit", code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: "message", message: Serializable, sendHandle: SendHandle): boolean; + emit(event: "spawn", listener: () => void): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: "disconnect", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + on(event: "spawn", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: "disconnect", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + once(event: "spawn", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: "disconnect", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependListener(event: "spawn", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "close", + listener: (code: number | null, signal: NodeJS.Signals | null) => void, + ): this; + prependOnceListener(event: "disconnect", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "exit", + listener: (code: number | null, signal: NodeJS.Signals | null) => void, + ): this; + prependOnceListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependOnceListener(event: "spawn", listener: () => void): this; + } + // return this object when stdio option is undefined or not specified + interface ChildProcessWithoutNullStreams extends ChildProcess { + stdin: Writable; + stdout: Readable; + stderr: Readable; + readonly stdio: [ + Writable, + Readable, + Readable, + // stderr + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined, // extra, no modification + ]; + } + // return this object when stdio option is a tuple of 3 + interface ChildProcessByStdio + extends ChildProcess + { + stdin: I; + stdout: O; + stderr: E; + readonly stdio: [ + I, + O, + E, + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined, // extra, no modification + ]; + } + interface MessageOptions { + keepOpen?: boolean | undefined; + } + type IOType = "overlapped" | "pipe" | "ignore" | "inherit"; + type StdioOptions = IOType | Array; + type SerializationType = "json" | "advanced"; + interface MessagingOptions extends Abortable { + /** + * Specify the kind of serialization used for sending messages between processes. + * @default 'json' + */ + serialization?: SerializationType | undefined; + /** + * The signal value to be used when the spawned process will be killed by the abort signal. + * @default 'SIGTERM' + */ + killSignal?: NodeJS.Signals | number | undefined; + /** + * In milliseconds the maximum amount of time the process is allowed to run. + */ + timeout?: number | undefined; + } + interface ProcessEnvOptions { + uid?: number | undefined; + gid?: number | undefined; + cwd?: string | URL | undefined; + env?: NodeJS.ProcessEnv | undefined; + } + interface CommonOptions extends ProcessEnvOptions { + /** + * @default false + */ + windowsHide?: boolean | undefined; + /** + * @default 0 + */ + timeout?: number | undefined; + } + interface CommonSpawnOptions extends CommonOptions, MessagingOptions, Abortable { + argv0?: string | undefined; + /** + * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + shell?: boolean | string | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + interface SpawnOptions extends CommonSpawnOptions { + detached?: boolean | undefined; + } + interface SpawnOptionsWithoutStdio extends SpawnOptions { + stdio?: StdioPipeNamed | StdioPipe[] | undefined; + } + type StdioNull = "inherit" | "ignore" | Stream; + type StdioPipeNamed = "pipe" | "overlapped"; + type StdioPipe = undefined | null | StdioPipeNamed; + interface SpawnOptionsWithStdioTuple< + Stdin extends StdioNull | StdioPipe, + Stdout extends StdioNull | StdioPipe, + Stderr extends StdioNull | StdioPipe, + > extends SpawnOptions { + stdio: [Stdin, Stdout, Stderr]; + } + /** + * The `child_process.spawn()` method spawns a new process using the given `command`, with command-line arguments in `args`. If omitted, `args` defaults + * to an empty array. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * A third argument may be used to specify additional options, with these defaults: + * + * ```js + * const defaults = { + * cwd: undefined, + * env: process.env, + * }; + * ``` + * + * Use `cwd` to specify the working directory from which the process is spawned. + * If not given, the default is to inherit the current working directory. If given, + * but the path does not exist, the child process emits an `ENOENT` error + * and exits immediately. `ENOENT` is also emitted when the command + * does not exist. + * + * Use `env` to specify environment variables that will be visible to the new + * process, the default is `process.env`. + * + * `undefined` values in `env` will be ignored. + * + * Example of running `ls -lh /usr`, capturing `stdout`, `stderr`, and the + * exit code: + * + * ```js + * import { spawn } from 'node:child_process'; + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * Example: A very elaborate way to run `ps ax | grep ssh` + * + * ```js + * import { spawn } from 'node:child_process'; + * const ps = spawn('ps', ['ax']); + * const grep = spawn('grep', ['ssh']); + * + * ps.stdout.on('data', (data) => { + * grep.stdin.write(data); + * }); + * + * ps.stderr.on('data', (data) => { + * console.error(`ps stderr: ${data}`); + * }); + * + * ps.on('close', (code) => { + * if (code !== 0) { + * console.log(`ps process exited with code ${code}`); + * } + * grep.stdin.end(); + * }); + * + * grep.stdout.on('data', (data) => { + * console.log(data.toString()); + * }); + * + * grep.stderr.on('data', (data) => { + * console.error(`grep stderr: ${data}`); + * }); + * + * grep.on('close', (code) => { + * if (code !== 0) { + * console.log(`grep process exited with code ${code}`); + * } + * }); + * ``` + * + * Example of checking for failed `spawn`: + * + * ```js + * import { spawn } from 'node:child_process'; + * const subprocess = spawn('bad_command'); + * + * subprocess.on('error', (err) => { + * console.error('Failed to start subprocess.'); + * }); + * ``` + * + * Certain platforms (macOS, Linux) will use the value of `argv[0]` for the process + * title while others (Windows, SunOS) will use `command`. + * + * Node.js overwrites `argv[0]` with `process.execPath` on startup, so `process.argv[0]` in a Node.js child process will not match the `argv0` parameter passed to `spawn` from the parent. Retrieve + * it with the `process.argv0` property instead. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding `AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * import { spawn } from 'node:child_process'; + * const controller = new AbortController(); + * const { signal } = controller; + * const grep = spawn('grep', ['ssh'], { signal }); + * grep.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * ``` + * @since v0.1.90 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawn(command: string, options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptions): ChildProcess; + // overloads of spawn with 'args' + function spawn( + command: string, + args?: readonly string[], + options?: SpawnOptionsWithoutStdio, + ): ChildProcessWithoutNullStreams; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn(command: string, args: readonly string[], options: SpawnOptions): ChildProcess; + interface ExecOptions extends CommonOptions { + shell?: string | undefined; + signal?: AbortSignal | undefined; + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + } + interface ExecOptionsWithStringEncoding extends ExecOptions { + encoding: BufferEncoding; + } + interface ExecOptionsWithBufferEncoding extends ExecOptions { + encoding: BufferEncoding | null; // specify `null`. + } + interface ExecException extends Error { + cmd?: string | undefined; + killed?: boolean | undefined; + code?: number | undefined; + signal?: NodeJS.Signals | undefined; + stdout?: string; + stderr?: string; + } + /** + * Spawns a shell then executes the `command` within that shell, buffering any + * generated output. The `command` string passed to the exec function is processed + * directly by the shell and special characters (vary based on [shell](https://en.wikipedia.org/wiki/List_of_command-line_interpreters)) + * need to be dealt with accordingly: + * + * ```js + * import { exec } from 'node:child_process'; + * + * exec('"/path/to/test file/test.sh" arg1 arg2'); + * // Double quotes are used so that the space in the path is not interpreted as + * // a delimiter of multiple arguments. + * + * exec('echo "The \\$HOME variable is $HOME"'); + * // The $HOME variable is escaped in the first instance, but not in the second. + * ``` + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * + * If a `callback` function is provided, it is called with the arguments `(error, stdout, stderr)`. On success, `error` will be `null`. On error, `error` will be an instance of `Error`. The + * `error.code` property will be + * the exit code of the process. By convention, any exit code other than `0` indicates an error. `error.signal` will be the signal that terminated the + * process. + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * ```js + * import { exec } from 'node:child_process'; + * exec('cat *.js missing_file | wc -l', (error, stdout, stderr) => { + * if (error) { + * console.error(`exec error: ${error}`); + * return; + * } + * console.log(`stdout: ${stdout}`); + * console.error(`stderr: ${stderr}`); + * }); + * ``` + * + * If `timeout` is greater than `0`, the parent will send the signal + * identified by the `killSignal` property (the default is `'SIGTERM'`) if the + * child runs longer than `timeout` milliseconds. + * + * Unlike the [`exec(3)`](http://man7.org/linux/man-pages/man3/exec.3.html) POSIX system call, `child_process.exec()` does not replace + * the existing process and uses a shell to execute the command. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned `ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * import util from 'node:util'; + * import child_process from 'node:child_process'; + * const exec = util.promisify(child_process.exec); + * + * async function lsExample() { + * const { stdout, stderr } = await exec('ls'); + * console.log('stdout:', stdout); + * console.error('stderr:', stderr); + * } + * lsExample(); + * ``` + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding `AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * import { exec } from 'node:child_process'; + * const controller = new AbortController(); + * const { signal } = controller; + * const child = exec('grep ssh', { signal }, (error) => { + * console.error(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.90 + * @param command The command to run, with space-separated arguments. + * @param callback called with the output when process terminates. + */ + function exec( + command: string, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function exec( + command: string, + options: { + encoding: "buffer" | null; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function exec( + command: string, + options: (ObjectEncodingOptions & ExecOptions) | undefined | null, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + interface PromiseWithChild extends Promise { + child: ChildProcess; + } + namespace exec { + function __promisify__(command: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: { + encoding: "buffer" | null; + } & ExecOptions, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: ExecOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options?: (ObjectEncodingOptions & ExecOptions) | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ExecFileOptions extends CommonOptions, Abortable { + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + windowsVerbatimArguments?: boolean | undefined; + shell?: boolean | string | undefined; + signal?: AbortSignal | undefined; + } + interface ExecFileOptionsWithStringEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions { + encoding: "buffer" | null; + } + interface ExecFileOptionsWithOtherEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + type ExecFileException = + & Omit + & Omit + & { code?: string | number | undefined | null }; + /** + * The `child_process.execFile()` function is similar to {@link exec} except that it does not spawn a shell by default. Rather, the specified + * executable `file` is spawned directly as a new process making it slightly more + * efficient than {@link exec}. + * + * The same options as {@link exec} are supported. Since a shell is + * not spawned, behaviors such as I/O redirection and file globbing are not + * supported. + * + * ```js + * import { execFile } from 'node:child_process'; + * const child = execFile('node', ['--version'], (error, stdout, stderr) => { + * if (error) { + * throw error; + * } + * console.log(stdout); + * }); + * ``` + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned `ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * import util from 'node:util'; + * import child_process from 'node:child_process'; + * const execFile = util.promisify(child_process.execFile); + * async function getVersion() { + * const { stdout } = await execFile('node', ['--version']); + * console.log(stdout); + * } + * getVersion(); + * ``` + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding `AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * import { execFile } from 'node:child_process'; + * const controller = new AbortController(); + * const { signal } = controller; + * const child = execFile('node', ['--version'], { signal }, (error) => { + * console.error(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.91 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @param callback Called with the output when process terminates. + */ + function execFile(file: string): ChildProcess; + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): ChildProcess; + function execFile(file: string, args?: readonly string[] | null): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): ChildProcess; + // no `options` definitely means stdout/stderr are `string`. + function execFile( + file: string, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function execFile( + file: string, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function execFile( + file: string, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function execFile( + file: string, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function execFile( + file: string, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: + | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) + | undefined + | null, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: + | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) + | undefined + | null, + ): ChildProcess; + namespace execFile { + function __promisify__(file: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithBufferEncoding, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithStringEncoding, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithStringEncoding, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithOtherEncoding, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ForkOptions extends ProcessEnvOptions, MessagingOptions, Abortable { + execPath?: string | undefined; + execArgv?: string[] | undefined; + silent?: boolean | undefined; + /** + * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + detached?: boolean | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + /** + * The `child_process.fork()` method is a special case of {@link spawn} used specifically to spawn new Node.js processes. + * Like {@link spawn}, a `ChildProcess` object is returned. The + * returned `ChildProcess` will have an additional communication channel + * built-in that allows messages to be passed back and forth between the parent and + * child. See `subprocess.send()` for details. + * + * Keep in mind that spawned Node.js child processes are + * independent of the parent with exception of the IPC communication channel + * that is established between the two. Each process has its own memory, with + * their own V8 instances. Because of the additional resource allocations + * required, spawning a large number of child Node.js processes is not + * recommended. + * + * By default, `child_process.fork()` will spawn new Node.js instances using the `process.execPath` of the parent process. The `execPath` property in the `options` object allows for an alternative + * execution path to be used. + * + * Node.js processes launched with a custom `execPath` will communicate with the + * parent process using the file descriptor (fd) identified using the + * environment variable `NODE_CHANNEL_FD` on the child process. + * + * Unlike the [`fork(2)`](http://man7.org/linux/man-pages/man2/fork.2.html) POSIX system call, `child_process.fork()` does not clone the + * current process. + * + * The `shell` option available in {@link spawn} is not supported by `child_process.fork()` and will be ignored if set. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding `AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * if (process.argv[2] === 'child') { + * setTimeout(() => { + * console.log(`Hello from ${process.argv[2]}!`); + * }, 1_000); + * } else { + * import { fork } from 'node:child_process'; + * const controller = new AbortController(); + * const { signal } = controller; + * const child = fork(__filename, ['child'], { signal }); + * child.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * } + * ``` + * @since v0.5.0 + * @param modulePath The module to run in the child. + * @param args List of string arguments. + */ + function fork(modulePath: string | URL, options?: ForkOptions): ChildProcess; + function fork(modulePath: string | URL, args?: readonly string[], options?: ForkOptions): ChildProcess; + interface SpawnSyncOptions extends CommonSpawnOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | "buffer" | null | undefined; + } + interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions { + encoding: BufferEncoding; + } + interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions { + encoding?: "buffer" | null | undefined; + } + interface SpawnSyncReturns { + pid: number; + output: Array; + stdout: T; + stderr: T; + status: number | null; + signal: NodeJS.Signals | null; + error?: Error | undefined; + } + /** + * The `child_process.spawnSync()` method is generally identical to {@link spawn} with the exception that the function will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the process intercepts and handles the `SIGTERM` signal + * and doesn't exit, the parent process will wait until the child process has + * exited. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawnSync(command: string): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; + function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns; + function spawnSync(command: string, args: readonly string[]): SpawnSyncReturns; + function spawnSync( + command: string, + args: readonly string[], + options: SpawnSyncOptionsWithStringEncoding, + ): SpawnSyncReturns; + function spawnSync( + command: string, + args: readonly string[], + options: SpawnSyncOptionsWithBufferEncoding, + ): SpawnSyncReturns; + function spawnSync( + command: string, + args?: readonly string[], + options?: SpawnSyncOptions, + ): SpawnSyncReturns; + interface CommonExecOptions extends CommonOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + /** + * Can be set to 'pipe', 'inherit, or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + killSignal?: NodeJS.Signals | number | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | "buffer" | null | undefined; + } + interface ExecSyncOptions extends CommonExecOptions { + shell?: string | undefined; + } + interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions { + encoding: BufferEncoding; + } + interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions { + encoding?: "buffer" | null | undefined; + } + /** + * The `child_process.execSync()` method is generally identical to {@link exec} with the exception that the method will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the child process intercepts and handles the `SIGTERM` signal and doesn't exit, the parent process will wait until the child process + * has exited. + * + * If the process times out or has a non-zero exit code, this method will throw. + * The `Error` object will contain the entire result from {@link spawnSync}. + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @return The stdout from the command. + */ + function execSync(command: string): Buffer; + function execSync(command: string, options: ExecSyncOptionsWithStringEncoding): string; + function execSync(command: string, options: ExecSyncOptionsWithBufferEncoding): Buffer; + function execSync(command: string, options?: ExecSyncOptions): string | Buffer; + interface ExecFileSyncOptions extends CommonExecOptions { + shell?: boolean | string | undefined; + } + interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions { + encoding: BufferEncoding; + } + interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions { + encoding?: "buffer" | null; // specify `null`. + } + /** + * The `child_process.execFileSync()` method is generally identical to {@link execFile} with the exception that the method will not + * return until the child process has fully closed. When a timeout has been + * encountered and `killSignal` is sent, the method won't return until the process + * has completely exited. + * + * If the child process intercepts and handles the `SIGTERM` signal and + * does not exit, the parent process will still wait until the child process has + * exited. + * + * If the process times out or has a non-zero exit code, this method will throw an `Error` that will include the full result of the underlying {@link spawnSync}. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @return The stdout from the command. + */ + function execFileSync(file: string): Buffer; + function execFileSync(file: string, options: ExecFileSyncOptionsWithStringEncoding): string; + function execFileSync(file: string, options: ExecFileSyncOptionsWithBufferEncoding): Buffer; + function execFileSync(file: string, options?: ExecFileSyncOptions): string | Buffer; + function execFileSync(file: string, args: readonly string[]): Buffer; + function execFileSync( + file: string, + args: readonly string[], + options: ExecFileSyncOptionsWithStringEncoding, + ): string; + function execFileSync( + file: string, + args: readonly string[], + options: ExecFileSyncOptionsWithBufferEncoding, + ): Buffer; + function execFileSync(file: string, args?: readonly string[], options?: ExecFileSyncOptions): string | Buffer; +} +declare module "node:child_process" { + export * from "child_process"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/cluster.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/cluster.d.ts new file mode 100644 index 0000000..86b48d9 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/cluster.d.ts @@ -0,0 +1,579 @@ +/** + * Clusters of Node.js processes can be used to run multiple instances of Node.js + * that can distribute workloads among their application threads. When process isolation + * is not needed, use the [`worker_threads`](https://nodejs.org/docs/latest-v22.x/api/worker_threads.html) + * module instead, which allows running multiple application threads within a single Node.js instance. + * + * The cluster module allows easy creation of child processes that all share + * server ports. + * + * ```js + * import cluster from 'node:cluster'; + * import http from 'node:http'; + * import { availableParallelism } from 'node:os'; + * import process from 'node:process'; + * + * const numCPUs = availableParallelism(); + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('exit', (worker, code, signal) => { + * console.log(`worker ${worker.process.pid} died`); + * }); + * } else { + * // Workers can share any TCP connection + * // In this case it is an HTTP server + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * + * console.log(`Worker ${process.pid} started`); + * } + * ``` + * + * Running Node.js will now share port 8000 between the workers: + * + * ```console + * $ node server.js + * Primary 3596 is running + * Worker 4324 started + * Worker 4520 started + * Worker 6056 started + * Worker 5644 started + * ``` + * + * On Windows, it is not yet possible to set up a named pipe server in a worker. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/cluster.js) + */ +declare module "cluster" { + import * as child from "node:child_process"; + import EventEmitter = require("node:events"); + import * as net from "node:net"; + type SerializationType = "json" | "advanced"; + export interface ClusterSettings { + /** + * List of string arguments passed to the Node.js executable. + * @default process.execArgv + */ + execArgv?: string[] | undefined; + /** + * File path to worker file. + * @default process.argv[1] + */ + exec?: string | undefined; + /** + * String arguments passed to worker. + * @default process.argv.slice(2) + */ + args?: string[] | undefined; + /** + * Whether or not to send output to parent's stdio. + * @default false + */ + silent?: boolean | undefined; + /** + * Configures the stdio of forked processes. Because the cluster module relies on IPC to function, this configuration must + * contain an `'ipc'` entry. When this option is provided, it overrides `silent`. See [`child_prcess.spawn()`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#child_processspawncommand-args-options)'s + * [`stdio`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#optionsstdio). + */ + stdio?: any[] | undefined; + /** + * Sets the user identity of the process. (See [`setuid(2)`](https://man7.org/linux/man-pages/man2/setuid.2.html).) + */ + uid?: number | undefined; + /** + * Sets the group identity of the process. (See [`setgid(2)`](https://man7.org/linux/man-pages/man2/setgid.2.html).) + */ + gid?: number | undefined; + /** + * Sets inspector port of worker. This can be a number, or a function that takes no arguments and returns a number. + * By default each worker gets its own port, incremented from the primary's `process.debugPort`. + */ + inspectPort?: number | (() => number) | undefined; + /** + * Specify the kind of serialization used for sending messages between processes. Possible values are `'json'` and `'advanced'`. + * See [Advanced serialization for `child_process`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#advanced-serialization) for more details. + * @default false + */ + serialization?: SerializationType | undefined; + /** + * Current working directory of the worker process. + * @default undefined (inherits from parent process) + */ + cwd?: string | undefined; + /** + * Hide the forked processes console window that would normally be created on Windows systems. + * @default false + */ + windowsHide?: boolean | undefined; + } + export interface Address { + address: string; + port: number; + /** + * The `addressType` is one of: + * + * * `4` (TCPv4) + * * `6` (TCPv6) + * * `-1` (Unix domain socket) + * * `'udp4'` or `'udp6'` (UDPv4 or UDPv6) + */ + addressType: 4 | 6 | -1 | "udp4" | "udp6"; + } + /** + * A `Worker` object contains all public information and method about a worker. + * In the primary it can be obtained using `cluster.workers`. In a worker + * it can be obtained using `cluster.worker`. + * @since v0.7.0 + */ + export class Worker extends EventEmitter { + /** + * Each new worker is given its own unique id, this id is stored in the `id`. + * + * While a worker is alive, this is the key that indexes it in `cluster.workers`. + * @since v0.8.0 + */ + id: number; + /** + * All workers are created using [`child_process.fork()`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#child_processforkmodulepath-args-options), the returned object + * from this function is stored as `.process`. In a worker, the global `process` is stored. + * + * See: [Child Process module](https://nodejs.org/docs/latest-v22.x/api/child_process.html#child_processforkmodulepath-args-options). + * + * Workers will call `process.exit(0)` if the `'disconnect'` event occurs + * on `process` and `.exitedAfterDisconnect` is not `true`. This protects against + * accidental disconnection. + * @since v0.7.0 + */ + process: child.ChildProcess; + /** + * Send a message to a worker or primary, optionally with a handle. + * + * In the primary, this sends a message to a specific worker. It is identical to [`ChildProcess.send()`](https://nodejs.org/docs/latest-v22.x/api/child_process.html#subprocesssendmessage-sendhandle-options-callback). + * + * In a worker, this sends a message to the primary. It is identical to `process.send()`. + * + * This example will echo back all messages from the primary: + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * worker.send('hi there'); + * + * } else if (cluster.isWorker) { + * process.on('message', (msg) => { + * process.send(msg); + * }); + * } + * ``` + * @since v0.7.0 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. + */ + send(message: child.Serializable, callback?: (error: Error | null) => void): boolean; + send( + message: child.Serializable, + sendHandle: child.SendHandle, + callback?: (error: Error | null) => void, + ): boolean; + send( + message: child.Serializable, + sendHandle: child.SendHandle, + options?: child.MessageOptions, + callback?: (error: Error | null) => void, + ): boolean; + /** + * This function will kill the worker. In the primary worker, it does this by + * disconnecting the `worker.process`, and once disconnected, killing with `signal`. In the worker, it does it by killing the process with `signal`. + * + * The `kill()` function kills the worker process without waiting for a graceful + * disconnect, it has the same behavior as `worker.process.kill()`. + * + * This method is aliased as `worker.destroy()` for backwards compatibility. + * + * In a worker, `process.kill()` exists, but it is not this function; + * it is [`kill()`](https://nodejs.org/docs/latest-v22.x/api/process.html#processkillpid-signal). + * @since v0.9.12 + * @param [signal='SIGTERM'] Name of the kill signal to send to the worker process. + */ + kill(signal?: string): void; + destroy(signal?: string): void; + /** + * In a worker, this function will close all servers, wait for the `'close'` event + * on those servers, and then disconnect the IPC channel. + * + * In the primary, an internal message is sent to the worker causing it to call `.disconnect()` on itself. + * + * Causes `.exitedAfterDisconnect` to be set. + * + * After a server is closed, it will no longer accept new connections, + * but connections may be accepted by any other listening worker. Existing + * connections will be allowed to close as usual. When no more connections exist, + * see `server.close()`, the IPC channel to the worker will close allowing it + * to die gracefully. + * + * The above applies _only_ to server connections, client connections are not + * automatically closed by workers, and disconnect does not wait for them to close + * before exiting. + * + * In a worker, `process.disconnect` exists, but it is not this function; + * it is `disconnect()`. + * + * Because long living server connections may block workers from disconnecting, it + * may be useful to send a message, so application specific actions may be taken to + * close them. It also may be useful to implement a timeout, killing a worker if + * the `'disconnect'` event has not been emitted after some time. + * + * ```js + * import net from 'node:net'; + * + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * let timeout; + * + * worker.on('listening', (address) => { + * worker.send('shutdown'); + * worker.disconnect(); + * timeout = setTimeout(() => { + * worker.kill(); + * }, 2000); + * }); + * + * worker.on('disconnect', () => { + * clearTimeout(timeout); + * }); + * + * } else if (cluster.isWorker) { + * const server = net.createServer((socket) => { + * // Connections never end + * }); + * + * server.listen(8000); + * + * process.on('message', (msg) => { + * if (msg === 'shutdown') { + * // Initiate graceful close of any connections to server + * } + * }); + * } + * ``` + * @since v0.7.7 + * @return A reference to `worker`. + */ + disconnect(): void; + /** + * This function returns `true` if the worker is connected to its primary via its + * IPC channel, `false` otherwise. A worker is connected to its primary after it + * has been created. It is disconnected after the `'disconnect'` event is emitted. + * @since v0.11.14 + */ + isConnected(): boolean; + /** + * This function returns `true` if the worker's process has terminated (either + * because of exiting or being signaled). Otherwise, it returns `false`. + * + * ```js + * import cluster from 'node:cluster'; + * import http from 'node:http'; + * import { availableParallelism } from 'node:os'; + * import process from 'node:process'; + * + * const numCPUs = availableParallelism(); + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('fork', (worker) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * + * cluster.on('exit', (worker, code, signal) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * } else { + * // Workers can share any TCP connection. In this case, it is an HTTP server. + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end(`Current process\n ${process.pid}`); + * process.kill(process.pid); + * }).listen(8000); + * } + * ``` + * @since v0.11.14 + */ + isDead(): boolean; + /** + * This property is `true` if the worker exited due to `.disconnect()`. + * If the worker exited any other way, it is `false`. If the + * worker has not exited, it is `undefined`. + * + * The boolean `worker.exitedAfterDisconnect` allows distinguishing between + * voluntary and accidental exit, the primary may choose not to respawn a worker + * based on this value. + * + * ```js + * cluster.on('exit', (worker, code, signal) => { + * if (worker.exitedAfterDisconnect === true) { + * console.log('Oh, it was just voluntary – no need to worry'); + * } + * }); + * + * // kill worker + * worker.kill(); + * ``` + * @since v6.0.0 + */ + exitedAfterDisconnect: boolean; + /** + * events.EventEmitter + * 1. disconnect + * 2. error + * 3. exit + * 4. listening + * 5. message + * 6. online + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "disconnect", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + addListener(event: "exit", listener: (code: number, signal: string) => void): this; + addListener(event: "listening", listener: (address: Address) => void): this; + addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: "online", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "disconnect"): boolean; + emit(event: "error", error: Error): boolean; + emit(event: "exit", code: number, signal: string): boolean; + emit(event: "listening", address: Address): boolean; + emit(event: "message", message: any, handle: net.Socket | net.Server): boolean; + emit(event: "online"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "disconnect", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + on(event: "exit", listener: (code: number, signal: string) => void): this; + on(event: "listening", listener: (address: Address) => void): this; + on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: "online", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "disconnect", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + once(event: "exit", listener: (code: number, signal: string) => void): this; + once(event: "listening", listener: (address: Address) => void): this; + once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: "online", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "disconnect", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependListener(event: "exit", listener: (code: number, signal: string) => void): this; + prependListener(event: "listening", listener: (address: Address) => void): this; + prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependListener(event: "online", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "disconnect", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this; + prependOnceListener(event: "listening", listener: (address: Address) => void): this; + prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener(event: "online", listener: () => void): this; + } + export interface Cluster extends EventEmitter { + disconnect(callback?: () => void): void; + /** + * Spawn a new worker process. + * + * This can only be called from the primary process. + * @param env Key/value pairs to add to worker process environment. + * @since v0.6.0 + */ + fork(env?: any): Worker; + /** @deprecated since v16.0.0 - use isPrimary. */ + readonly isMaster: boolean; + /** + * True if the process is a primary. This is determined by the `process.env.NODE_UNIQUE_ID`. If `process.env.NODE_UNIQUE_ID` + * is undefined, then `isPrimary` is `true`. + * @since v16.0.0 + */ + readonly isPrimary: boolean; + /** + * True if the process is not a primary (it is the negation of `cluster.isPrimary`). + * @since v0.6.0 + */ + readonly isWorker: boolean; + /** + * The scheduling policy, either `cluster.SCHED_RR` for round-robin or `cluster.SCHED_NONE` to leave it to the operating system. This is a + * global setting and effectively frozen once either the first worker is spawned, or [`.setupPrimary()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clustersetupprimarysettings) + * is called, whichever comes first. + * + * `SCHED_RR` is the default on all operating systems except Windows. Windows will change to `SCHED_RR` once libuv is able to effectively distribute + * IOCP handles without incurring a large performance hit. + * + * `cluster.schedulingPolicy` can also be set through the `NODE_CLUSTER_SCHED_POLICY` environment variable. Valid values are `'rr'` and `'none'`. + * @since v0.11.2 + */ + schedulingPolicy: number; + /** + * After calling [`.setupPrimary()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clustersetupprimarysettings) + * (or [`.fork()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clusterforkenv)) this settings object will contain + * the settings, including the default values. + * + * This object is not intended to be changed or set manually. + * @since v0.7.1 + */ + readonly settings: ClusterSettings; + /** @deprecated since v16.0.0 - use [`.setupPrimary()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clustersetupprimarysettings) instead. */ + setupMaster(settings?: ClusterSettings): void; + /** + * `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in `cluster.settings`. + * + * Any settings changes only affect future calls to [`.fork()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clusterforkenv) + * and have no effect on workers that are already running. + * + * The only attribute of a worker that cannot be set via `.setupPrimary()` is the `env` passed to + * [`.fork()`](https://nodejs.org/docs/latest-v22.x/api/cluster.html#clusterforkenv). + * + * The defaults above apply to the first call only; the defaults for later calls are the current values at the time of + * `cluster.setupPrimary()` is called. + * + * ```js + * import cluster from 'node:cluster'; + * + * cluster.setupPrimary({ + * exec: 'worker.js', + * args: ['--use', 'https'], + * silent: true, + * }); + * cluster.fork(); // https worker + * cluster.setupPrimary({ + * exec: 'worker.js', + * args: ['--use', 'http'], + * }); + * cluster.fork(); // http worker + * ``` + * + * This can only be called from the primary process. + * @since v16.0.0 + */ + setupPrimary(settings?: ClusterSettings): void; + /** + * A reference to the current worker object. Not available in the primary process. + * + * ```js + * import cluster from 'node:cluster'; + * + * if (cluster.isPrimary) { + * console.log('I am primary'); + * cluster.fork(); + * cluster.fork(); + * } else if (cluster.isWorker) { + * console.log(`I am worker #${cluster.worker.id}`); + * } + * ``` + * @since v0.7.0 + */ + readonly worker?: Worker | undefined; + /** + * A hash that stores the active worker objects, keyed by `id` field. This makes it easy to loop through all the workers. It is only available in the primary process. + * + * A worker is removed from `cluster.workers` after the worker has disconnected _and_ exited. The order between these two events cannot be determined in advance. However, it + * is guaranteed that the removal from the `cluster.workers` list happens before the last `'disconnect'` or `'exit'` event is emitted. + * + * ```js + * import cluster from 'node:cluster'; + * + * for (const worker of Object.values(cluster.workers)) { + * worker.send('big announcement to all workers'); + * } + * ``` + * @since v0.7.0 + */ + readonly workers?: NodeJS.Dict | undefined; + readonly SCHED_NONE: number; + readonly SCHED_RR: number; + /** + * events.EventEmitter + * 1. disconnect + * 2. exit + * 3. fork + * 4. listening + * 5. message + * 6. online + * 7. setup + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "disconnect", listener: (worker: Worker) => void): this; + addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + addListener(event: "fork", listener: (worker: Worker) => void): this; + addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + addListener( + event: "message", + listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void, + ): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: "online", listener: (worker: Worker) => void): this; + addListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "disconnect", worker: Worker): boolean; + emit(event: "exit", worker: Worker, code: number, signal: string): boolean; + emit(event: "fork", worker: Worker): boolean; + emit(event: "listening", worker: Worker, address: Address): boolean; + emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean; + emit(event: "online", worker: Worker): boolean; + emit(event: "setup", settings: ClusterSettings): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "disconnect", listener: (worker: Worker) => void): this; + on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + on(event: "fork", listener: (worker: Worker) => void): this; + on(event: "listening", listener: (worker: Worker, address: Address) => void): this; + on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: "online", listener: (worker: Worker) => void): this; + on(event: "setup", listener: (settings: ClusterSettings) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "disconnect", listener: (worker: Worker) => void): this; + once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + once(event: "fork", listener: (worker: Worker) => void): this; + once(event: "listening", listener: (worker: Worker, address: Address) => void): this; + once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: "online", listener: (worker: Worker) => void): this; + once(event: "setup", listener: (settings: ClusterSettings) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "disconnect", listener: (worker: Worker) => void): this; + prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + prependListener(event: "fork", listener: (worker: Worker) => void): this; + prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependListener( + event: "message", + listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void, + ): this; + prependListener(event: "online", listener: (worker: Worker) => void): this; + prependListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this; + prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + prependOnceListener(event: "fork", listener: (worker: Worker) => void): this; + prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener( + event: "message", + listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void, + ): this; + prependOnceListener(event: "online", listener: (worker: Worker) => void): this; + prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + } + const cluster: Cluster; + export default cluster; +} +declare module "node:cluster" { + export * from "cluster"; + export { default as default } from "cluster"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/disposable.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/disposable.d.ts new file mode 100644 index 0000000..5fff612 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/disposable.d.ts @@ -0,0 +1,16 @@ +// Polyfills for the explicit resource management types added in TypeScript 5.2. +// TODO: remove once this package no longer supports TS 5.1, and replace with a +// to TypeScript's disposable library in index.d.ts. + +interface SymbolConstructor { + readonly dispose: unique symbol; + readonly asyncDispose: unique symbol; +} + +interface Disposable { + [Symbol.dispose](): void; +} + +interface AsyncDisposable { + [Symbol.asyncDispose](): PromiseLike; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/index.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/index.d.ts new file mode 100644 index 0000000..5c41e37 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/index.d.ts @@ -0,0 +1,9 @@ +// Declaration files in this directory contain types relating to TypeScript library features +// that are not included in all TypeScript versions supported by DefinitelyTyped, but +// which can be made backwards-compatible without needing `typesVersions`. +// If adding declarations to this directory, please specify which versions of TypeScript require them, +// so that they can be removed when no longer needed. + +/// +/// +/// diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/indexable.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/indexable.d.ts new file mode 100644 index 0000000..9919702 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/indexable.d.ts @@ -0,0 +1,23 @@ +// Polyfill for ES2022's .at() method on string/array prototypes, added to TypeScript in 4.6. +// TODO: these methods are not used within @types/node, and should be removed at the next +// major @types/node version; users should include the es2022 TypeScript libraries +// if they need these features. + +interface RelativeIndexable { + at(index: number): T | undefined; +} + +interface String extends RelativeIndexable {} +interface Array extends RelativeIndexable {} +interface ReadonlyArray extends RelativeIndexable {} +interface Int8Array extends RelativeIndexable {} +interface Uint8Array extends RelativeIndexable {} +interface Uint8ClampedArray extends RelativeIndexable {} +interface Int16Array extends RelativeIndexable {} +interface Uint16Array extends RelativeIndexable {} +interface Int32Array extends RelativeIndexable {} +interface Uint32Array extends RelativeIndexable {} +interface Float32Array extends RelativeIndexable {} +interface Float64Array extends RelativeIndexable {} +interface BigInt64Array extends RelativeIndexable {} +interface BigUint64Array extends RelativeIndexable {} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/iterators.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/iterators.d.ts new file mode 100644 index 0000000..156e785 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/compatibility/iterators.d.ts @@ -0,0 +1,21 @@ +// Backwards-compatible iterator interfaces, augmented with iterator helper methods by lib.esnext.iterator in TypeScript 5.6. +// The IterableIterator interface does not contain these methods, which creates assignability issues in places where IteratorObjects +// are expected (eg. DOM-compatible APIs) if lib.esnext.iterator is loaded. +// Also ensures that iterators returned by the Node API, which inherit from Iterator.prototype, correctly expose the iterator helper methods +// if lib.esnext.iterator is loaded. +// TODO: remove once this package no longer supports TS 5.5, and replace NodeJS.BuiltinIteratorReturn with BuiltinIteratorReturn. + +// Placeholders for TS <5.6 +interface IteratorObject {} +interface AsyncIteratorObject {} + +declare namespace NodeJS { + // Populate iterator methods for TS <5.6 + interface Iterator extends globalThis.Iterator {} + interface AsyncIterator extends globalThis.AsyncIterator {} + + // Polyfill for TS 5.6's instrinsic BuiltinIteratorReturn type, required for DOM-compatible iterators + type BuiltinIteratorReturn = ReturnType extends + globalThis.Iterator ? TReturn + : any; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/console.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/console.d.ts new file mode 100644 index 0000000..3e4c2d9 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/console.d.ts @@ -0,0 +1,452 @@ +/** + * The `node:console` module provides a simple debugging console that is similar to + * the JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()`, and `console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to [`process.stdout`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstdout) and + * [`process.stderr`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstderr). The global `console` can be used without importing the `node:console` module. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the [`note on process I/O`](https://nodejs.org/docs/latest-v22.x/api/process.html#a-note-on-process-io) for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/console.js) + */ +declare module "console" { + import console = require("node:console"); + export = console; +} +declare module "node:console" { + import { InspectOptions } from "node:util"; + global { + // This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build + interface Console { + Console: console.ConsoleConstructor; + /** + * `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only + * writes a message and does not otherwise affect execution. The output always + * starts with `"Assertion failed"`. If provided, `message` is formatted using + * [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args). + * + * If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens. + * + * ```js + * console.assert(true, 'does nothing'); + * + * console.assert(false, 'Whoops %s work', 'didn\'t'); + * // Assertion failed: Whoops didn't work + * + * console.assert(); + * // Assertion failed + * ``` + * @since v0.1.101 + * @param value The value tested for being truthy. + * @param message All arguments besides `value` are used as error message. + */ + assert(value: any, message?: string, ...optionalParams: any[]): void; + /** + * When `stdout` is a TTY, calling `console.clear()` will attempt to clear the + * TTY. When `stdout` is not a TTY, this method does nothing. + * + * The specific operation of `console.clear()` can vary across operating systems + * and terminal types. For most Linux operating systems, `console.clear()` operates similarly to the `clear` shell command. On Windows, `console.clear()` will clear only the output in the + * current terminal viewport for the Node.js + * binary. + * @since v8.3.0 + */ + clear(): void; + /** + * Maintains an internal counter specific to `label` and outputs to `stdout` the + * number of times `console.count()` has been called with the given `label`. + * + * ```js + * > console.count() + * default: 1 + * undefined + * > console.count('default') + * default: 2 + * undefined + * > console.count('abc') + * abc: 1 + * undefined + * > console.count('xyz') + * xyz: 1 + * undefined + * > console.count('abc') + * abc: 2 + * undefined + * > console.count() + * default: 3 + * undefined + * > + * ``` + * @since v8.3.0 + * @param [label='default'] The display label for the counter. + */ + count(label?: string): void; + /** + * Resets the internal counter specific to `label`. + * + * ```js + * > console.count('abc'); + * abc: 1 + * undefined + * > console.countReset('abc'); + * undefined + * > console.count('abc'); + * abc: 1 + * undefined + * > + * ``` + * @since v8.3.0 + * @param [label='default'] The display label for the counter. + */ + countReset(label?: string): void; + /** + * The `console.debug()` function is an alias for {@link log}. + * @since v8.0.0 + */ + debug(message?: any, ...optionalParams: any[]): void; + /** + * Uses [`util.inspect()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilinspectobject-options) on `obj` and prints the resulting string to `stdout`. + * This function bypasses any custom `inspect()` function defined on `obj`. + * @since v0.1.101 + */ + dir(obj: any, options?: InspectOptions): void; + /** + * This method calls `console.log()` passing it the arguments received. + * This method does not produce any XML formatting. + * @since v8.0.0 + */ + dirxml(...data: any[]): void; + /** + * Prints to `stderr` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) + * (the arguments are all passed to [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args)). + * + * ```js + * const code = 5; + * console.error('error #%d', code); + * // Prints: error #5, to stderr + * console.error('error', code); + * // Prints: error 5, to stderr + * ``` + * + * If formatting elements (e.g. `%d`) are not found in the first string then + * [`util.inspect()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilinspectobject-options) is called on each argument and the + * resulting string values are concatenated. See [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args) + * for more information. + * @since v0.1.100 + */ + error(message?: any, ...optionalParams: any[]): void; + /** + * Increases indentation of subsequent lines by spaces for `groupIndentation` length. + * + * If one or more `label`s are provided, those are printed first without the + * additional indentation. + * @since v8.5.0 + */ + group(...label: any[]): void; + /** + * An alias for {@link group}. + * @since v8.5.0 + */ + groupCollapsed(...label: any[]): void; + /** + * Decreases indentation of subsequent lines by spaces for `groupIndentation` length. + * @since v8.5.0 + */ + groupEnd(): void; + /** + * The `console.info()` function is an alias for {@link log}. + * @since v0.1.100 + */ + info(message?: any, ...optionalParams: any[]): void; + /** + * Prints to `stdout` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) + * (the arguments are all passed to [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args)). + * + * ```js + * const count = 5; + * console.log('count: %d', count); + * // Prints: count: 5, to stdout + * console.log('count:', count); + * // Prints: count: 5, to stdout + * ``` + * + * See [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args) for more information. + * @since v0.1.100 + */ + log(message?: any, ...optionalParams: any[]): void; + /** + * Try to construct a table with the columns of the properties of `tabularData` (or use `properties`) and rows of `tabularData` and log it. Falls back to just + * logging the argument if it can't be parsed as tabular. + * + * ```js + * // These can't be parsed as tabular data + * console.table(Symbol()); + * // Symbol() + * + * console.table(undefined); + * // undefined + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]); + * // ┌─────────┬─────┬─────┐ + * // │ (index) │ a │ b │ + * // ├─────────┼─────┼─────┤ + * // │ 0 │ 1 │ 'Y' │ + * // │ 1 │ 'Z' │ 2 │ + * // └─────────┴─────┴─────┘ + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']); + * // ┌─────────┬─────┐ + * // │ (index) │ a │ + * // ├─────────┼─────┤ + * // │ 0 │ 1 │ + * // │ 1 │ 'Z' │ + * // └─────────┴─────┘ + * ``` + * @since v10.0.0 + * @param properties Alternate properties for constructing the table. + */ + table(tabularData: any, properties?: readonly string[]): void; + /** + * Starts a timer that can be used to compute the duration of an operation. Timers + * are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in + * suitable time units to `stdout`. For example, if the elapsed + * time is 3869ms, `console.timeEnd()` displays "3.869s". + * @since v0.1.104 + * @param [label='default'] + */ + time(label?: string): void; + /** + * Stops a timer that was previously started by calling {@link time} and + * prints the result to `stdout`: + * + * ```js + * console.time('bunch-of-stuff'); + * // Do a bunch of stuff. + * console.timeEnd('bunch-of-stuff'); + * // Prints: bunch-of-stuff: 225.438ms + * ``` + * @since v0.1.104 + * @param [label='default'] + */ + timeEnd(label?: string): void; + /** + * For a timer that was previously started by calling {@link time}, prints + * the elapsed time and other `data` arguments to `stdout`: + * + * ```js + * console.time('process'); + * const value = expensiveProcess1(); // Returns 42 + * console.timeLog('process', value); + * // Prints "process: 365.227ms 42". + * doExpensiveProcess2(value); + * console.timeEnd('process'); + * ``` + * @since v10.7.0 + * @param [label='default'] + */ + timeLog(label?: string, ...data: any[]): void; + /** + * Prints to `stderr` the string `'Trace: '`, followed by the [`util.format()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilformatformat-args) + * formatted message and stack trace to the current position in the code. + * + * ```js + * console.trace('Show me'); + * // Prints: (stack trace will vary based on where trace is called) + * // Trace: Show me + * // at repl:2:9 + * // at REPLServer.defaultEval (repl.js:248:27) + * // at bound (domain.js:287:14) + * // at REPLServer.runBound [as eval] (domain.js:300:12) + * // at REPLServer. (repl.js:412:12) + * // at emitOne (events.js:82:20) + * // at REPLServer.emit (events.js:169:7) + * // at REPLServer.Interface._onLine (readline.js:210:10) + * // at REPLServer.Interface._line (readline.js:549:8) + * // at REPLServer.Interface._ttyWrite (readline.js:826:14) + * ``` + * @since v0.1.104 + */ + trace(message?: any, ...optionalParams: any[]): void; + /** + * The `console.warn()` function is an alias for {@link error}. + * @since v0.1.100 + */ + warn(message?: any, ...optionalParams: any[]): void; + // --- Inspector mode only --- + /** + * This method does not display anything unless used in the inspector. The `console.profile()` + * method starts a JavaScript CPU profile with an optional label until {@link profileEnd} + * is called. The profile is then added to the Profile panel of the inspector. + * + * ```js + * console.profile('MyLabel'); + * // Some code + * console.profileEnd('MyLabel'); + * // Adds the profile 'MyLabel' to the Profiles panel of the inspector. + * ``` + * @since v8.0.0 + */ + profile(label?: string): void; + /** + * This method does not display anything unless used in the inspector. Stops the current + * JavaScript CPU profiling session if one has been started and prints the report to the + * Profiles panel of the inspector. See {@link profile} for an example. + * + * If this method is called without a label, the most recently started profile is stopped. + * @since v8.0.0 + */ + profileEnd(label?: string): void; + /** + * This method does not display anything unless used in the inspector. The `console.timeStamp()` + * method adds an event with the label `'label'` to the Timeline panel of the inspector. + * @since v8.0.0 + */ + timeStamp(label?: string): void; + } + /** + * The `console` module provides a simple debugging console that is similar to the + * JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()` and `console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to [`process.stdout`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstdout) and + * [`process.stderr`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstderr). The global `console` can be used without importing the `node:console` module. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the [`note on process I/O`](https://nodejs.org/docs/latest-v22.x/api/process.html#a-note-on-process-io) for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/console.js) + */ + namespace console { + interface ConsoleConstructorOptions { + stdout: NodeJS.WritableStream; + stderr?: NodeJS.WritableStream | undefined; + /** + * Ignore errors when writing to the underlying streams. + * @default true + */ + ignoreErrors?: boolean | undefined; + /** + * Set color support for this `Console` instance. Setting to true enables coloring while inspecting + * values. Setting to `false` disables coloring while inspecting values. Setting to `'auto'` makes color + * support depend on the value of the `isTTY` property and the value returned by `getColorDepth()` on the + * respective stream. This option can not be used, if `inspectOptions.colors` is set as well. + * @default auto + */ + colorMode?: boolean | "auto" | undefined; + /** + * Specifies options that are passed along to + * [`util.inspect()`](https://nodejs.org/docs/latest-v22.x/api/util.html#utilinspectobject-options). + */ + inspectOptions?: InspectOptions | undefined; + /** + * Set group indentation. + * @default 2 + */ + groupIndentation?: number | undefined; + } + interface ConsoleConstructor { + prototype: Console; + new(stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console; + new(options: ConsoleConstructorOptions): Console; + } + } + var console: Console; + } + export = globalThis.console; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/constants.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/constants.d.ts new file mode 100644 index 0000000..c3ac2b8 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/constants.d.ts @@ -0,0 +1,19 @@ +/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */ +declare module "constants" { + import { constants as osConstants, SignalConstants } from "node:os"; + import { constants as cryptoConstants } from "node:crypto"; + import { constants as fsConstants } from "node:fs"; + + const exp: + & typeof osConstants.errno + & typeof osConstants.priority + & SignalConstants + & typeof cryptoConstants + & typeof fsConstants; + export = exp; +} + +declare module "node:constants" { + import constants = require("constants"); + export = constants; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/crypto.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/crypto.d.ts new file mode 100644 index 0000000..f7ee5a0 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/crypto.d.ts @@ -0,0 +1,4475 @@ +/** + * The `node:crypto` module provides cryptographic functionality that includes a + * set of wrappers for OpenSSL's hash, HMAC, cipher, decipher, sign, and verify + * functions. + * + * ```js + * const { createHmac } = await import('node:crypto'); + * + * const secret = 'abcdefg'; + * const hash = createHmac('sha256', secret) + * .update('I love cupcakes') + * .digest('hex'); + * console.log(hash); + * // Prints: + * // c0fa1bc00531bd78ef38c628449c5102aeabd49b5dc3a2a516ea6ea959d6658e + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/crypto.js) + */ +declare module "crypto" { + import * as stream from "node:stream"; + import { PeerCertificate } from "node:tls"; + /** + * SPKAC is a Certificate Signing Request mechanism originally implemented by + * Netscape and was specified formally as part of HTML5's `keygen` element. + * + * `` is deprecated since [HTML 5.2](https://www.w3.org/TR/html52/changes.html#features-removed) and new projects + * should not use this element anymore. + * + * The `node:crypto` module provides the `Certificate` class for working with SPKAC + * data. The most common usage is handling output generated by the HTML5 `` element. Node.js uses [OpenSSL's SPKAC + * implementation](https://www.openssl.org/docs/man3.0/man1/openssl-spkac.html) internally. + * @since v0.11.8 + */ + class Certificate { + /** + * ```js + * const { Certificate } = await import('node:crypto'); + * const spkac = getSpkacSomehow(); + * const challenge = Certificate.exportChallenge(spkac); + * console.log(challenge.toString('utf8')); + * // Prints: the challenge as a UTF8 string + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The challenge component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportChallenge(spkac: BinaryLike): Buffer; + /** + * ```js + * const { Certificate } = await import('node:crypto'); + * const spkac = getSpkacSomehow(); + * const publicKey = Certificate.exportPublicKey(spkac); + * console.log(publicKey); + * // Prints: the public key as + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The public key component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * ```js + * import { Buffer } from 'node:buffer'; + * const { Certificate } = await import('node:crypto'); + * + * const spkac = getSpkacSomehow(); + * console.log(Certificate.verifySpkac(Buffer.from(spkac))); + * // Prints: true or false + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return `true` if the given `spkac` data structure is valid, `false` otherwise. + */ + static verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + /** + * @deprecated + * @param spkac + * @returns The challenge component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportChallenge(spkac: BinaryLike): Buffer; + /** + * @deprecated + * @param spkac + * @param encoding The encoding of the spkac string. + * @returns The public key component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * @deprecated + * @param spkac + * @returns `true` if the given `spkac` data structure is valid, + * `false` otherwise. + */ + verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + } + namespace constants { + // https://nodejs.org/dist/latest-v22.x/docs/api/crypto.html#crypto-constants + const OPENSSL_VERSION_NUMBER: number; + /** Applies multiple bug workarounds within OpenSSL. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html for detail. */ + const SSL_OP_ALL: number; + /** Instructs OpenSSL to allow a non-[EC]DHE-based key exchange mode for TLS v1.3 */ + const SSL_OP_ALLOW_NO_DHE_KEX: number; + /** Allows legacy insecure renegotiation between OpenSSL and unpatched clients or servers. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number; + /** Attempts to use the server's preferences instead of the client's when selecting a cipher. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_CIPHER_SERVER_PREFERENCE: number; + /** Instructs OpenSSL to use Cisco's version identifier of DTLS_BAD_VER. */ + const SSL_OP_CISCO_ANYCONNECT: number; + /** Instructs OpenSSL to turn on cookie exchange. */ + const SSL_OP_COOKIE_EXCHANGE: number; + /** Instructs OpenSSL to add server-hello extension from an early version of the cryptopro draft. */ + const SSL_OP_CRYPTOPRO_TLSEXT_BUG: number; + /** Instructs OpenSSL to disable a SSL 3.0/TLS 1.0 vulnerability workaround added in OpenSSL 0.9.6d. */ + const SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number; + /** Allows initial connection to servers that do not support RI. */ + const SSL_OP_LEGACY_SERVER_CONNECT: number; + /** Instructs OpenSSL to disable support for SSL/TLS compression. */ + const SSL_OP_NO_COMPRESSION: number; + /** Instructs OpenSSL to disable encrypt-then-MAC. */ + const SSL_OP_NO_ENCRYPT_THEN_MAC: number; + const SSL_OP_NO_QUERY_MTU: number; + /** Instructs OpenSSL to disable renegotiation. */ + const SSL_OP_NO_RENEGOTIATION: number; + /** Instructs OpenSSL to always start a new session when performing renegotiation. */ + const SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number; + /** Instructs OpenSSL to turn off SSL v2 */ + const SSL_OP_NO_SSLv2: number; + /** Instructs OpenSSL to turn off SSL v3 */ + const SSL_OP_NO_SSLv3: number; + /** Instructs OpenSSL to disable use of RFC4507bis tickets. */ + const SSL_OP_NO_TICKET: number; + /** Instructs OpenSSL to turn off TLS v1 */ + const SSL_OP_NO_TLSv1: number; + /** Instructs OpenSSL to turn off TLS v1.1 */ + const SSL_OP_NO_TLSv1_1: number; + /** Instructs OpenSSL to turn off TLS v1.2 */ + const SSL_OP_NO_TLSv1_2: number; + /** Instructs OpenSSL to turn off TLS v1.3 */ + const SSL_OP_NO_TLSv1_3: number; + /** Instructs OpenSSL server to prioritize ChaCha20-Poly1305 when the client does. This option has no effect if `SSL_OP_CIPHER_SERVER_PREFERENCE` is not enabled. */ + const SSL_OP_PRIORITIZE_CHACHA: number; + /** Instructs OpenSSL to disable version rollback attack detection. */ + const SSL_OP_TLS_ROLLBACK_BUG: number; + const ENGINE_METHOD_RSA: number; + const ENGINE_METHOD_DSA: number; + const ENGINE_METHOD_DH: number; + const ENGINE_METHOD_RAND: number; + const ENGINE_METHOD_EC: number; + const ENGINE_METHOD_CIPHERS: number; + const ENGINE_METHOD_DIGESTS: number; + const ENGINE_METHOD_PKEY_METHS: number; + const ENGINE_METHOD_PKEY_ASN1_METHS: number; + const ENGINE_METHOD_ALL: number; + const ENGINE_METHOD_NONE: number; + const DH_CHECK_P_NOT_SAFE_PRIME: number; + const DH_CHECK_P_NOT_PRIME: number; + const DH_UNABLE_TO_CHECK_GENERATOR: number; + const DH_NOT_SUITABLE_GENERATOR: number; + const RSA_PKCS1_PADDING: number; + const RSA_SSLV23_PADDING: number; + const RSA_NO_PADDING: number; + const RSA_PKCS1_OAEP_PADDING: number; + const RSA_X931_PADDING: number; + const RSA_PKCS1_PSS_PADDING: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the digest size when signing or verifying. */ + const RSA_PSS_SALTLEN_DIGEST: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the maximum permissible value when signing data. */ + const RSA_PSS_SALTLEN_MAX_SIGN: number; + /** Causes the salt length for RSA_PKCS1_PSS_PADDING to be determined automatically when verifying a signature. */ + const RSA_PSS_SALTLEN_AUTO: number; + const POINT_CONVERSION_COMPRESSED: number; + const POINT_CONVERSION_UNCOMPRESSED: number; + const POINT_CONVERSION_HYBRID: number; + /** Specifies the built-in default cipher list used by Node.js (colon-separated values). */ + const defaultCoreCipherList: string; + /** Specifies the active default cipher list used by the current Node.js process (colon-separated values). */ + const defaultCipherList: string; + } + interface HashOptions extends stream.TransformOptions { + /** + * For XOF hash functions such as `shake256`, the + * outputLength option can be used to specify the desired output length in bytes. + */ + outputLength?: number | undefined; + } + /** @deprecated since v10.0.0 */ + const fips: boolean; + /** + * Creates and returns a `Hash` object that can be used to generate hash digests + * using the given `algorithm`. Optional `options` argument controls stream + * behavior. For XOF hash functions such as `'shake256'`, the `outputLength` option + * can be used to specify the desired output length in bytes. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * Example: generating the sha256 sum of a file + * + * ```js + * import { + * createReadStream, + * } from 'node:fs'; + * import { argv } from 'node:process'; + * const { + * createHash, + * } = await import('node:crypto'); + * + * const filename = argv[2]; + * + * const hash = createHash('sha256'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hash.update(data); + * else { + * console.log(`${hash.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.92 + * @param options `stream.transform` options + */ + function createHash(algorithm: string, options?: HashOptions): Hash; + /** + * Creates and returns an `Hmac` object that uses the given `algorithm` and `key`. + * Optional `options` argument controls stream behavior. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * The `key` is the HMAC key used to generate the cryptographic HMAC hash. If it is + * a `KeyObject`, its type must be `secret`. If it is a string, please consider `caveats when using strings as inputs to cryptographic APIs`. If it was + * obtained from a cryptographically secure source of entropy, such as {@link randomBytes} or {@link generateKey}, its length should not + * exceed the block size of `algorithm` (e.g., 512 bits for SHA-256). + * + * Example: generating the sha256 HMAC of a file + * + * ```js + * import { + * createReadStream, + * } from 'node:fs'; + * import { argv } from 'node:process'; + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const filename = argv[2]; + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hmac.update(data); + * else { + * console.log(`${hmac.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createHmac(algorithm: string, key: BinaryLike | KeyObject, options?: stream.TransformOptions): Hmac; + // https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings + type BinaryToTextEncoding = "base64" | "base64url" | "hex" | "binary"; + type CharacterEncoding = "utf8" | "utf-8" | "utf16le" | "utf-16le" | "latin1"; + type LegacyCharacterEncoding = "ascii" | "binary" | "ucs2" | "ucs-2"; + type Encoding = BinaryToTextEncoding | CharacterEncoding | LegacyCharacterEncoding; + type ECDHKeyFormat = "compressed" | "uncompressed" | "hybrid"; + /** + * The `Hash` class is a utility for creating hash digests of data. It can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed hash digest on the readable side, or + * * Using the `hash.update()` and `hash.digest()` methods to produce the + * computed hash. + * + * The {@link createHash} method is used to create `Hash` instances. `Hash`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hash` objects as streams: + * + * ```js + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hash.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * } + * }); + * + * hash.write('some data to hash'); + * hash.end(); + * ``` + * + * Example: Using `Hash` and piped streams: + * + * ```js + * import { createReadStream } from 'node:fs'; + * import { stdout } from 'node:process'; + * const { createHash } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * const input = createReadStream('test.js'); + * input.pipe(hash).setEncoding('hex').pipe(stdout); + * ``` + * + * Example: Using the `hash.update()` and `hash.digest()` methods: + * + * ```js + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('some data to hash'); + * console.log(hash.digest('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * ``` + * @since v0.1.92 + */ + class Hash extends stream.Transform { + private constructor(); + /** + * Creates a new `Hash` object that contains a deep copy of the internal state + * of the current `Hash` object. + * + * The optional `options` argument controls stream behavior. For XOF hash + * functions such as `'shake256'`, the `outputLength` option can be used to + * specify the desired output length in bytes. + * + * An error is thrown when an attempt is made to copy the `Hash` object after + * its `hash.digest()` method has been called. + * + * ```js + * // Calculate a rolling hash. + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('one'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('two'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('three'); + * console.log(hash.copy().digest('hex')); + * + * // Etc. + * ``` + * @since v13.1.0 + * @param options `stream.transform` options + */ + copy(options?: HashOptions): Hash; + /** + * Updates the hash content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hash; + update(data: string, inputEncoding: Encoding): Hash; + /** + * Calculates the digest of all of the data passed to be hashed (using the `hash.update()` method). + * If `encoding` is provided a string will be returned; otherwise + * a `Buffer` is returned. + * + * The `Hash` object can not be used again after `hash.digest()` method has been + * called. Multiple calls will cause an error to be thrown. + * @since v0.1.92 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + /** + * The `Hmac` class is a utility for creating cryptographic HMAC digests. It can + * be used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed HMAC digest on the readable side, or + * * Using the `hmac.update()` and `hmac.digest()` methods to produce the + * computed HMAC digest. + * + * The {@link createHmac} method is used to create `Hmac` instances. `Hmac`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hmac` objects as streams: + * + * ```js + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hmac.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * } + * }); + * + * hmac.write('some data to hash'); + * hmac.end(); + * ``` + * + * Example: Using `Hmac` and piped streams: + * + * ```js + * import { createReadStream } from 'node:fs'; + * import { stdout } from 'node:process'; + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream('test.js'); + * input.pipe(hmac).pipe(stdout); + * ``` + * + * Example: Using the `hmac.update()` and `hmac.digest()` methods: + * + * ```js + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.update('some data to hash'); + * console.log(hmac.digest('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * ``` + * @since v0.1.94 + * @deprecated Since v20.13.0 Calling `Hmac` class directly with `Hmac()` or `new Hmac()` is deprecated due to being internals, not intended for public use. Please use the {@link createHmac} method to create Hmac instances. + */ + class Hmac extends stream.Transform { + private constructor(); + /** + * Updates the `Hmac` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hmac; + update(data: string, inputEncoding: Encoding): Hmac; + /** + * Calculates the HMAC digest of all of the data passed using `hmac.update()`. + * If `encoding` is + * provided a string is returned; otherwise a `Buffer` is returned; + * + * The `Hmac` object can not be used again after `hmac.digest()` has been + * called. Multiple calls to `hmac.digest()` will result in an error being thrown. + * @since v0.1.94 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + type KeyObjectType = "secret" | "public" | "private"; + interface KeyExportOptions { + type: "pkcs1" | "spki" | "pkcs8" | "sec1"; + format: T; + cipher?: string | undefined; + passphrase?: string | Buffer | undefined; + } + interface JwkKeyExportOptions { + format: "jwk"; + } + interface JsonWebKey { + crv?: string | undefined; + d?: string | undefined; + dp?: string | undefined; + dq?: string | undefined; + e?: string | undefined; + k?: string | undefined; + kty?: string | undefined; + n?: string | undefined; + p?: string | undefined; + q?: string | undefined; + qi?: string | undefined; + x?: string | undefined; + y?: string | undefined; + [key: string]: unknown; + } + interface AsymmetricKeyDetails { + /** + * Key size in bits (RSA, DSA). + */ + modulusLength?: number | undefined; + /** + * Public exponent (RSA). + */ + publicExponent?: bigint | undefined; + /** + * Name of the message digest (RSA-PSS). + */ + hashAlgorithm?: string | undefined; + /** + * Name of the message digest used by MGF1 (RSA-PSS). + */ + mgf1HashAlgorithm?: string | undefined; + /** + * Minimal salt length in bytes (RSA-PSS). + */ + saltLength?: number | undefined; + /** + * Size of q in bits (DSA). + */ + divisorLength?: number | undefined; + /** + * Name of the curve (EC). + */ + namedCurve?: string | undefined; + } + /** + * Node.js uses a `KeyObject` class to represent a symmetric or asymmetric key, + * and each kind of key exposes different functions. The {@link createSecretKey}, {@link createPublicKey} and {@link createPrivateKey} methods are used to create `KeyObject`instances. `KeyObject` + * objects are not to be created directly using the `new`keyword. + * + * Most applications should consider using the new `KeyObject` API instead of + * passing keys as strings or `Buffer`s due to improved security features. + * + * `KeyObject` instances can be passed to other threads via `postMessage()`. + * The receiver obtains a cloned `KeyObject`, and the `KeyObject` does not need to + * be listed in the `transferList` argument. + * @since v11.6.0 + */ + class KeyObject { + private constructor(); + /** + * Example: Converting a `CryptoKey` instance to a `KeyObject`: + * + * ```js + * const { KeyObject } = await import('node:crypto'); + * const { subtle } = globalThis.crypto; + * + * const key = await subtle.generateKey({ + * name: 'HMAC', + * hash: 'SHA-256', + * length: 256, + * }, true, ['sign', 'verify']); + * + * const keyObject = KeyObject.from(key); + * console.log(keyObject.symmetricKeySize); + * // Prints: 32 (symmetric key size in bytes) + * ``` + * @since v15.0.0 + */ + static from(key: webcrypto.CryptoKey): KeyObject; + /** + * For asymmetric keys, this property represents the type of the key. Supported key + * types are: + * + * * `'rsa'` (OID 1.2.840.113549.1.1.1) + * * `'rsa-pss'` (OID 1.2.840.113549.1.1.10) + * * `'dsa'` (OID 1.2.840.10040.4.1) + * * `'ec'` (OID 1.2.840.10045.2.1) + * * `'x25519'` (OID 1.3.101.110) + * * `'x448'` (OID 1.3.101.111) + * * `'ed25519'` (OID 1.3.101.112) + * * `'ed448'` (OID 1.3.101.113) + * * `'dh'` (OID 1.2.840.113549.1.3.1) + * + * This property is `undefined` for unrecognized `KeyObject` types and symmetric + * keys. + * @since v11.6.0 + */ + asymmetricKeyType?: KeyType | undefined; + /** + * This property exists only on asymmetric keys. Depending on the type of the key, + * this object contains information about the key. None of the information obtained + * through this property can be used to uniquely identify a key or to compromise + * the security of the key. + * + * For RSA-PSS keys, if the key material contains a `RSASSA-PSS-params` sequence, + * the `hashAlgorithm`, `mgf1HashAlgorithm`, and `saltLength` properties will be + * set. + * + * Other key details might be exposed via this API using additional attributes. + * @since v15.7.0 + */ + asymmetricKeyDetails?: AsymmetricKeyDetails | undefined; + /** + * For symmetric keys, the following encoding options can be used: + * + * For public keys, the following encoding options can be used: + * + * For private keys, the following encoding options can be used: + * + * The result type depends on the selected encoding format, when PEM the + * result is a string, when DER it will be a buffer containing the data + * encoded as DER, when [JWK](https://tools.ietf.org/html/rfc7517) it will be an object. + * + * When [JWK](https://tools.ietf.org/html/rfc7517) encoding format was selected, all other encoding options are + * ignored. + * + * PKCS#1, SEC1, and PKCS#8 type keys can be encrypted by using a combination of + * the `cipher` and `format` options. The PKCS#8 `type` can be used with any`format` to encrypt any key algorithm (RSA, EC, or DH) by specifying a`cipher`. PKCS#1 and SEC1 can only be + * encrypted by specifying a `cipher`when the PEM `format` is used. For maximum compatibility, use PKCS#8 for + * encrypted private keys. Since PKCS#8 defines its own + * encryption mechanism, PEM-level encryption is not supported when encrypting + * a PKCS#8 key. See [RFC 5208](https://www.rfc-editor.org/rfc/rfc5208.txt) for PKCS#8 encryption and [RFC 1421](https://www.rfc-editor.org/rfc/rfc1421.txt) for + * PKCS#1 and SEC1 encryption. + * @since v11.6.0 + */ + export(options: KeyExportOptions<"pem">): string | Buffer; + export(options?: KeyExportOptions<"der">): Buffer; + export(options?: JwkKeyExportOptions): JsonWebKey; + /** + * Returns `true` or `false` depending on whether the keys have exactly the same + * type, value, and parameters. This method is not [constant time](https://en.wikipedia.org/wiki/Timing_attack). + * @since v17.7.0, v16.15.0 + * @param otherKeyObject A `KeyObject` with which to compare `keyObject`. + */ + equals(otherKeyObject: KeyObject): boolean; + /** + * For secret keys, this property represents the size of the key in bytes. This + * property is `undefined` for asymmetric keys. + * @since v11.6.0 + */ + symmetricKeySize?: number | undefined; + /** + * Converts a `KeyObject` instance to a `CryptoKey`. + * @since 22.10.0 + */ + toCryptoKey( + algorithm: + | webcrypto.AlgorithmIdentifier + | webcrypto.RsaHashedImportParams + | webcrypto.EcKeyImportParams + | webcrypto.HmacImportParams, + extractable: boolean, + keyUsages: readonly webcrypto.KeyUsage[], + ): webcrypto.CryptoKey; + /** + * Depending on the type of this `KeyObject`, this property is either`'secret'` for secret (symmetric) keys, `'public'` for public (asymmetric) keys + * or `'private'` for private (asymmetric) keys. + * @since v11.6.0 + */ + type: KeyObjectType; + } + type CipherCCMTypes = "aes-128-ccm" | "aes-192-ccm" | "aes-256-ccm" | "chacha20-poly1305"; + type CipherGCMTypes = "aes-128-gcm" | "aes-192-gcm" | "aes-256-gcm"; + type CipherOCBTypes = "aes-128-ocb" | "aes-192-ocb" | "aes-256-ocb"; + type BinaryLike = string | NodeJS.ArrayBufferView; + type CipherKey = BinaryLike | KeyObject; + interface CipherCCMOptions extends stream.TransformOptions { + authTagLength: number; + } + interface CipherGCMOptions extends stream.TransformOptions { + authTagLength?: number | undefined; + } + interface CipherOCBOptions extends stream.TransformOptions { + authTagLength: number; + } + /** + * Creates and returns a `Cipher` object, with the given `algorithm`, `key` and + * initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a + * given IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createCipheriv( + algorithm: CipherCCMTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherCCMOptions, + ): CipherCCM; + function createCipheriv( + algorithm: CipherOCBTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherOCBOptions, + ): CipherOCB; + function createCipheriv( + algorithm: CipherGCMTypes, + key: CipherKey, + iv: BinaryLike, + options?: CipherGCMOptions, + ): CipherGCM; + function createCipheriv( + algorithm: string, + key: CipherKey, + iv: BinaryLike | null, + options?: stream.TransformOptions, + ): Cipher; + /** + * Instances of the `Cipher` class are used to encrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain unencrypted + * data is written to produce encrypted data on the readable side, or + * * Using the `cipher.update()` and `cipher.final()` methods to produce + * the encrypted data. + * + * The {@link createCipheriv} method is + * used to create `Cipher` instances. `Cipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Cipher` objects as streams: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * // Once we have the key and iv, we can create and use the cipher... + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = ''; + * cipher.setEncoding('hex'); + * + * cipher.on('data', (chunk) => encrypted += chunk); + * cipher.on('end', () => console.log(encrypted)); + * + * cipher.write('some clear text data'); + * cipher.end(); + * }); + * }); + * ``` + * + * Example: Using `Cipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'node:fs'; + * + * import { + * pipeline, + * } from 'node:stream'; + * + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.js'); + * const output = createWriteStream('test.enc'); + * + * pipeline(input, cipher, output, (err) => { + * if (err) throw err; + * }); + * }); + * }); + * ``` + * + * Example: Using the `cipher.update()` and `cipher.final()` methods: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = cipher.update('some clear text data', 'utf8', 'hex'); + * encrypted += cipher.final('hex'); + * console.log(encrypted); + * }); + * }); + * ``` + * @since v0.1.94 + */ + class Cipher extends stream.Transform { + private constructor(); + /** + * Updates the cipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`, `TypedArray`, or `DataView`. If `data` is a `Buffer`, + * `TypedArray`, or `DataView`, then `inputEncoding` is ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `cipher.update()` method can be called multiple times with new data until `cipher.final()` is called. Calling `cipher.update()` after `cipher.final()` will result in an error being + * thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the data. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: BinaryLike): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `cipher.final()` method has been called, the `Cipher` object can no + * longer be used to encrypt data. Attempts to call `cipher.final()` more than + * once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining enciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When using block encryption algorithms, the `Cipher` class will automatically + * add padding to the input data to the appropriate block size. To disable the + * default padding call `cipher.setAutoPadding(false)`. + * + * When `autoPadding` is `false`, the length of the entire input data must be a + * multiple of the cipher's block size or `cipher.final()` will throw an error. + * Disabling automatic padding is useful for non-standard padding, for instance + * using `0x0` instead of PKCS padding. + * + * The `cipher.setAutoPadding()` method must be called before `cipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(autoPadding?: boolean): this; + } + interface CipherCCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + interface CipherGCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + interface CipherOCB extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + /** + * Creates and returns a `Decipher` object that uses the given `algorithm`, `key` and initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the `authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength` option is not required but can be used to restrict accepted authentication tags + * to those with the specified length. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a given + * IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createDecipheriv( + algorithm: CipherCCMTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherCCMOptions, + ): DecipherCCM; + function createDecipheriv( + algorithm: CipherOCBTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherOCBOptions, + ): DecipherOCB; + function createDecipheriv( + algorithm: CipherGCMTypes, + key: CipherKey, + iv: BinaryLike, + options?: CipherGCMOptions, + ): DecipherGCM; + function createDecipheriv( + algorithm: string, + key: CipherKey, + iv: BinaryLike | null, + options?: stream.TransformOptions, + ): Decipher; + /** + * Instances of the `Decipher` class are used to decrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain encrypted + * data is written to produce unencrypted data on the readable side, or + * * Using the `decipher.update()` and `decipher.final()` methods to + * produce the unencrypted data. + * + * The {@link createDecipheriv} method is + * used to create `Decipher` instances. `Decipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Decipher` objects as streams: + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Key length is dependent on the algorithm. In this case for aes192, it is + * // 24 bytes (192 bits). + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * let decrypted = ''; + * decipher.on('readable', () => { + * let chunk; + * while (null !== (chunk = decipher.read())) { + * decrypted += chunk.toString('utf8'); + * } + * }); + * decipher.on('end', () => { + * console.log(decrypted); + * // Prints: some clear text data + * }); + * + * // Encrypted with same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * decipher.write(encrypted, 'hex'); + * decipher.end(); + * ``` + * + * Example: Using `Decipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.enc'); + * const output = createWriteStream('test.js'); + * + * input.pipe(decipher).pipe(output); + * ``` + * + * Example: Using the `decipher.update()` and `decipher.final()` methods: + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * // Encrypted using same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * let decrypted = decipher.update(encrypted, 'hex', 'utf8'); + * decrypted += decipher.final('utf8'); + * console.log(decrypted); + * // Prints: some clear text data + * ``` + * @since v0.1.94 + */ + class Decipher extends stream.Transform { + private constructor(); + /** + * Updates the decipher with `data`. If the `inputEncoding` argument is given, + * the `data` argument is a string using the specified encoding. If the `inputEncoding` argument is not given, `data` must be a `Buffer`. If `data` is a `Buffer` then `inputEncoding` is + * ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding` is specified, a string using the specified encoding is returned. If no `outputEncoding` is provided, a `Buffer` is returned. + * + * The `decipher.update()` method can be called multiple times with new data until `decipher.final()` is called. Calling `decipher.update()` after `decipher.final()` will result in an error + * being thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: NodeJS.ArrayBufferView): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `decipher.final()` method has been called, the `Decipher` object can + * no longer be used to decrypt data. Attempts to call `decipher.final()` more + * than once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining deciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When data has been encrypted without standard block padding, calling `decipher.setAutoPadding(false)` will disable automatic padding to prevent `decipher.final()` from checking for and + * removing padding. + * + * Turning auto padding off will only work if the input data's length is a + * multiple of the ciphers block size. + * + * The `decipher.setAutoPadding()` method must be called before `decipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(auto_padding?: boolean): this; + } + interface DecipherCCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + }, + ): this; + } + interface DecipherGCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + } + interface DecipherOCB extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + } + interface PrivateKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: "pkcs1" | "pkcs8" | "sec1" | undefined; + passphrase?: string | Buffer | undefined; + encoding?: string | undefined; + } + interface PublicKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: "pkcs1" | "spki" | undefined; + encoding?: string | undefined; + } + /** + * Asynchronously generates a new random secret key of the given `length`. The `type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKey, + * } = await import('node:crypto'); + * + * generateKey('hmac', { length: 512 }, (err, key) => { + * if (err) throw err; + * console.log(key.export().toString('hex')); // 46e..........620 + * }); + * ``` + * + * The size of a generated HMAC key should not exceed the block size of the + * underlying hash function. See {@link createHmac} for more information. + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKey( + type: "hmac" | "aes", + options: { + length: number; + }, + callback: (err: Error | null, key: KeyObject) => void, + ): void; + /** + * Synchronously generates a new random secret key of the given `length`. The `type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKeySync, + * } = await import('node:crypto'); + * + * const key = generateKeySync('hmac', { length: 512 }); + * console.log(key.export().toString('hex')); // e89..........41e + * ``` + * + * The size of a generated HMAC key should not exceed the block size of the + * underlying hash function. See {@link createHmac} for more information. + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKeySync( + type: "hmac" | "aes", + options: { + length: number; + }, + ): KeyObject; + interface JsonWebKeyInput { + key: JsonWebKey; + format: "jwk"; + } + /** + * Creates and returns a new key object containing a private key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; otherwise, `key` must be an object with the properties described above. + * + * If the private key is encrypted, a `passphrase` must be specified. The length + * of the passphrase is limited to 1024 bytes. + * @since v11.6.0 + */ + function createPrivateKey(key: PrivateKeyInput | string | Buffer | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a public key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; if `key` is a `KeyObject` with type `'private'`, the public key is derived from the given private key; + * otherwise, `key` must be an object with the properties described above. + * + * If the format is `'pem'`, the `'key'` may also be an X.509 certificate. + * + * Because public keys can be derived from private keys, a private key may be + * passed instead of a public key. In that case, this function behaves as if {@link createPrivateKey} had been called, except that the type of the + * returned `KeyObject` will be `'public'` and that the private key cannot be + * extracted from the returned `KeyObject`. Similarly, if a `KeyObject` with type `'private'` is given, a new `KeyObject` with type `'public'` will be returned + * and it will be impossible to extract the private key from the returned object. + * @since v11.6.0 + */ + function createPublicKey(key: PublicKeyInput | string | Buffer | KeyObject | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a secret key for symmetric + * encryption or `Hmac`. + * @since v11.6.0 + * @param encoding The string encoding when `key` is a string. + */ + function createSecretKey(key: NodeJS.ArrayBufferView): KeyObject; + function createSecretKey(key: string, encoding: BufferEncoding): KeyObject; + /** + * Creates and returns a `Sign` object that uses the given `algorithm`. Use {@link getHashes} to obtain the names of the available digest algorithms. + * Optional `options` argument controls the `stream.Writable` behavior. + * + * In some cases, a `Sign` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createSign(algorithm: string, options?: stream.WritableOptions): Sign; + type DSAEncoding = "der" | "ieee-p1363"; + interface SigningOptions { + /** + * @see crypto.constants.RSA_PKCS1_PADDING + */ + padding?: number | undefined; + saltLength?: number | undefined; + dsaEncoding?: DSAEncoding | undefined; + } + interface SignPrivateKeyInput extends PrivateKeyInput, SigningOptions {} + interface SignKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface SignJsonWebKeyInput extends JsonWebKeyInput, SigningOptions {} + interface VerifyPublicKeyInput extends PublicKeyInput, SigningOptions {} + interface VerifyKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface VerifyJsonWebKeyInput extends JsonWebKeyInput, SigningOptions {} + type KeyLike = string | Buffer | KeyObject; + /** + * The `Sign` class is a utility for generating signatures. It can be used in one + * of two ways: + * + * * As a writable `stream`, where data to be signed is written and the `sign.sign()` method is used to generate and return the signature, or + * * Using the `sign.update()` and `sign.sign()` methods to produce the + * signature. + * + * The {@link createSign} method is used to create `Sign` instances. The + * argument is the string name of the hash function to use. `Sign` objects are not + * to be created directly using the `new` keyword. + * + * Example: Using `Sign` and `Verify` objects as streams: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify, + * } = await import('node:crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('ec', { + * namedCurve: 'sect239k1', + * }); + * + * const sign = createSign('SHA256'); + * sign.write('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey, 'hex'); + * + * const verify = createVerify('SHA256'); + * verify.write('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature, 'hex')); + * // Prints: true + * ``` + * + * Example: Using the `sign.update()` and `verify.update()` methods: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify, + * } = await import('node:crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('rsa', { + * modulusLength: 2048, + * }); + * + * const sign = createSign('SHA256'); + * sign.update('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey); + * + * const verify = createVerify('SHA256'); + * verify.update('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature)); + * // Prints: true + * ``` + * @since v0.1.92 + */ + class Sign extends stream.Writable { + private constructor(); + /** + * Updates the `Sign` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): this; + update(data: string, inputEncoding: Encoding): this; + /** + * Calculates the signature on all the data passed through using either `sign.update()` or `sign.write()`. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if `privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the following additional properties can be passed: + * + * If `outputEncoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * The `Sign` object can not be again used after `sign.sign()` method has been + * called. Multiple calls to `sign.sign()` will result in an error being thrown. + * @since v0.1.92 + */ + sign(privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput | SignJsonWebKeyInput): Buffer; + sign( + privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput | SignJsonWebKeyInput, + outputFormat: BinaryToTextEncoding, + ): string; + } + /** + * Creates and returns a `Verify` object that uses the given algorithm. + * Use {@link getHashes} to obtain an array of names of the available + * signing algorithms. Optional `options` argument controls the `stream.Writable` behavior. + * + * In some cases, a `Verify` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createVerify(algorithm: string, options?: stream.WritableOptions): Verify; + /** + * The `Verify` class is a utility for verifying signatures. It can be used in one + * of two ways: + * + * * As a writable `stream` where written data is used to validate against the + * supplied signature, or + * * Using the `verify.update()` and `verify.verify()` methods to verify + * the signature. + * + * The {@link createVerify} method is used to create `Verify` instances. `Verify` objects are not to be created directly using the `new` keyword. + * + * See `Sign` for examples. + * @since v0.1.92 + */ + class Verify extends stream.Writable { + private constructor(); + /** + * Updates the `Verify` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `inputEncoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or `DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Verify; + update(data: string, inputEncoding: Encoding): Verify; + /** + * Verifies the provided data using the given `object` and `signature`. + * + * If `object` is not a `KeyObject`, this function behaves as if `object` had been passed to {@link createPublicKey}. If it is an + * object, the following additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the data, in + * the `signatureEncoding`. + * If a `signatureEncoding` is specified, the `signature` is expected to be a + * string; otherwise `signature` is expected to be a `Buffer`, `TypedArray`, or `DataView`. + * + * The `verify` object can not be used again after `verify.verify()` has been + * called. Multiple calls to `verify.verify()` will result in an error being + * thrown. + * + * Because public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.1.92 + */ + verify( + object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + ): boolean; + verify( + object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: string, + signature_format?: BinaryToTextEncoding, + ): boolean; + } + /** + * Creates a `DiffieHellman` key exchange object using the supplied `prime` and an + * optional specific `generator`. + * + * The `generator` argument can be a number, string, or `Buffer`. If `generator` is not specified, the value `2` is used. + * + * If `primeEncoding` is specified, `prime` is expected to be a string; otherwise + * a `Buffer`, `TypedArray`, or `DataView` is expected. + * + * If `generatorEncoding` is specified, `generator` is expected to be a string; + * otherwise a number, `Buffer`, `TypedArray`, or `DataView` is expected. + * @since v0.11.12 + * @param primeEncoding The `encoding` of the `prime` string. + * @param [generator=2] + * @param generatorEncoding The `encoding` of the `generator` string. + */ + function createDiffieHellman(primeLength: number, generator?: number): DiffieHellman; + function createDiffieHellman( + prime: ArrayBuffer | NodeJS.ArrayBufferView, + generator?: number | ArrayBuffer | NodeJS.ArrayBufferView, + ): DiffieHellman; + function createDiffieHellman( + prime: ArrayBuffer | NodeJS.ArrayBufferView, + generator: string, + generatorEncoding: BinaryToTextEncoding, + ): DiffieHellman; + function createDiffieHellman( + prime: string, + primeEncoding: BinaryToTextEncoding, + generator?: number | ArrayBuffer | NodeJS.ArrayBufferView, + ): DiffieHellman; + function createDiffieHellman( + prime: string, + primeEncoding: BinaryToTextEncoding, + generator: string, + generatorEncoding: BinaryToTextEncoding, + ): DiffieHellman; + /** + * The `DiffieHellman` class is a utility for creating Diffie-Hellman key + * exchanges. + * + * Instances of the `DiffieHellman` class can be created using the {@link createDiffieHellman} function. + * + * ```js + * import assert from 'node:assert'; + * + * const { + * createDiffieHellman, + * } = await import('node:crypto'); + * + * // Generate Alice's keys... + * const alice = createDiffieHellman(2048); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createDiffieHellman(alice.getPrime(), alice.getGenerator()); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * // OK + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * ``` + * @since v0.5.0 + */ + class DiffieHellman { + private constructor(); + /** + * Generates private and public Diffie-Hellman key values unless they have been + * generated or computed already, and returns + * the public key in the specified `encoding`. This key should be + * transferred to the other party. + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * This function is a thin wrapper around [`DH_generate_key()`](https://www.openssl.org/docs/man3.0/man3/DH_generate_key.html). In particular, + * once a private key has been generated or set, calling this function only updates + * the public key but does not generate a new private key. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using the specified `inputEncoding`, and secret is + * encoded using specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`, `TypedArray`, or `DataView`. + * + * If `outputEncoding` is given a string is returned; otherwise, a `Buffer` is returned. + * @since v0.5.0 + * @param inputEncoding The `encoding` of an `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, inputEncoding?: null, outputEncoding?: null): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding?: null): Buffer; + computeSecret( + otherPublicKey: NodeJS.ArrayBufferView, + inputEncoding: null, + outputEncoding: BinaryToTextEncoding, + ): string; + computeSecret( + otherPublicKey: string, + inputEncoding: BinaryToTextEncoding, + outputEncoding: BinaryToTextEncoding, + ): string; + /** + * Returns the Diffie-Hellman prime in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrime(): Buffer; + getPrime(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman generator in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getGenerator(): Buffer; + getGenerator(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman public key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPublicKey(): Buffer; + getPublicKey(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman private key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * Sets the Diffie-Hellman public key. If the `encoding` argument is provided, `publicKey` is expected + * to be a string. If no `encoding` is provided, `publicKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * @since v0.5.0 + * @param encoding The `encoding` of the `publicKey` string. + */ + setPublicKey(publicKey: NodeJS.ArrayBufferView): void; + setPublicKey(publicKey: string, encoding: BufferEncoding): void; + /** + * Sets the Diffie-Hellman private key. If the `encoding` argument is provided,`privateKey` is expected + * to be a string. If no `encoding` is provided, `privateKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * + * This function does not automatically compute the associated public key. Either `diffieHellman.setPublicKey()` or `diffieHellman.generateKeys()` can be + * used to manually provide the public key or to automatically derive it. + * @since v0.5.0 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BufferEncoding): void; + /** + * A bit field containing any warnings and/or errors resulting from a check + * performed during initialization of the `DiffieHellman` object. + * + * The following values are valid for this property (as defined in `node:constants` module): + * + * * `DH_CHECK_P_NOT_SAFE_PRIME` + * * `DH_CHECK_P_NOT_PRIME` + * * `DH_UNABLE_TO_CHECK_GENERATOR` + * * `DH_NOT_SUITABLE_GENERATOR` + * @since v0.11.12 + */ + verifyError: number; + } + /** + * The `DiffieHellmanGroup` class takes a well-known modp group as its argument. + * It works the same as `DiffieHellman`, except that it does not allow changing its keys after creation. + * In other words, it does not implement `setPublicKey()` or `setPrivateKey()` methods. + * + * ```js + * const { createDiffieHellmanGroup } = await import('node:crypto'); + * const dh = createDiffieHellmanGroup('modp1'); + * ``` + * The name (e.g. `'modp1'`) is taken from [RFC 2412](https://www.rfc-editor.org/rfc/rfc2412.txt) (modp1 and 2) and [RFC 3526](https://www.rfc-editor.org/rfc/rfc3526.txt): + * ```bash + * $ perl -ne 'print "$1\n" if /"(modp\d+)"/' src/node_crypto_groups.h + * modp1 # 768 bits + * modp2 # 1024 bits + * modp5 # 1536 bits + * modp14 # 2048 bits + * modp15 # etc. + * modp16 + * modp17 + * modp18 + * ``` + * @since v0.7.5 + */ + const DiffieHellmanGroup: DiffieHellmanGroupConstructor; + interface DiffieHellmanGroupConstructor { + new(name: string): DiffieHellmanGroup; + (name: string): DiffieHellmanGroup; + readonly prototype: DiffieHellmanGroup; + } + type DiffieHellmanGroup = Omit; + /** + * Creates a predefined `DiffieHellmanGroup` key exchange object. The + * supported groups are listed in the documentation for `DiffieHellmanGroup`. + * + * The returned object mimics the interface of objects created by {@link createDiffieHellman}, but will not allow changing + * the keys (with `diffieHellman.setPublicKey()`, for example). The + * advantage of using this method is that the parties do not have to + * generate nor exchange a group modulus beforehand, saving both processor + * and communication time. + * + * Example (obtaining a shared secret): + * + * ```js + * const { + * getDiffieHellman, + * } = await import('node:crypto'); + * const alice = getDiffieHellman('modp14'); + * const bob = getDiffieHellman('modp14'); + * + * alice.generateKeys(); + * bob.generateKeys(); + * + * const aliceSecret = alice.computeSecret(bob.getPublicKey(), null, 'hex'); + * const bobSecret = bob.computeSecret(alice.getPublicKey(), null, 'hex'); + * + * // aliceSecret and bobSecret should be the same + * console.log(aliceSecret === bobSecret); + * ``` + * @since v0.7.5 + */ + function getDiffieHellman(groupName: string): DiffieHellmanGroup; + /** + * An alias for {@link getDiffieHellman} + * @since v0.9.3 + */ + function createDiffieHellmanGroup(name: string): DiffieHellmanGroup; + /** + * Provides an asynchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the `password`, `salt` and `iterations`. + * + * The supplied `callback` function is called with two arguments: `err` and `derivedKey`. If an error occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. By default, the successfully generated `derivedKey` will be passed to the callback as a `Buffer`. An error will be + * thrown if any of the input arguments specify invalid values or types. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2, + * } = await import('node:crypto'); + * + * pbkdf2('secret', 'salt', 100000, 64, 'sha512', (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * @since v0.5.5 + */ + function pbkdf2( + password: BinaryLike, + salt: BinaryLike, + iterations: number, + keylen: number, + digest: string, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + /** + * Provides a synchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the `password`, `salt` and `iterations`. + * + * If an error occurs an `Error` will be thrown, otherwise the derived key will be + * returned as a `Buffer`. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2Sync, + * } = await import('node:crypto'); + * + * const key = pbkdf2Sync('secret', 'salt', 100000, 64, 'sha512'); + * console.log(key.toString('hex')); // '3745e48...08d59ae' + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * @since v0.9.3 + */ + function pbkdf2Sync( + password: BinaryLike, + salt: BinaryLike, + iterations: number, + keylen: number, + digest: string, + ): Buffer; + /** + * Generates cryptographically strong pseudorandom data. The `size` argument + * is a number indicating the number of bytes to generate. + * + * If a `callback` function is provided, the bytes are generated asynchronously + * and the `callback` function is invoked with two arguments: `err` and `buf`. + * If an error occurs, `err` will be an `Error` object; otherwise it is `null`. The `buf` argument is a `Buffer` containing the generated bytes. + * + * ```js + * // Asynchronous + * const { + * randomBytes, + * } = await import('node:crypto'); + * + * randomBytes(256, (err, buf) => { + * if (err) throw err; + * console.log(`${buf.length} bytes of random data: ${buf.toString('hex')}`); + * }); + * ``` + * + * If the `callback` function is not provided, the random bytes are generated + * synchronously and returned as a `Buffer`. An error will be thrown if + * there is a problem generating the bytes. + * + * ```js + * // Synchronous + * const { + * randomBytes, + * } = await import('node:crypto'); + * + * const buf = randomBytes(256); + * console.log( + * `${buf.length} bytes of random data: ${buf.toString('hex')}`); + * ``` + * + * The `crypto.randomBytes()` method will not complete until there is + * sufficient entropy available. + * This should normally never take longer than a few milliseconds. The only time + * when generating the random bytes may conceivably block for a longer period of + * time is right after boot, when the whole system is still low on entropy. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomBytes()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomBytes` requests when doing so as part of fulfilling a client + * request. + * @since v0.5.8 + * @param size The number of bytes to generate. The `size` must not be larger than `2**31 - 1`. + * @return if the `callback` function is not provided. + */ + function randomBytes(size: number): Buffer; + function randomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + function pseudoRandomBytes(size: number): Buffer; + function pseudoRandomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + /** + * Return a random integer `n` such that `min <= n < max`. This + * implementation avoids [modulo bias](https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#Modulo_bias). + * + * The range (`max - min`) must be less than 2**48. `min` and `max` must + * be [safe integers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger). + * + * If the `callback` function is not provided, the random integer is + * generated synchronously. + * + * ```js + * // Asynchronous + * const { + * randomInt, + * } = await import('node:crypto'); + * + * randomInt(3, (err, n) => { + * if (err) throw err; + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * }); + * ``` + * + * ```js + * // Synchronous + * const { + * randomInt, + * } = await import('node:crypto'); + * + * const n = randomInt(3); + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * ``` + * + * ```js + * // With `min` argument + * const { + * randomInt, + * } = await import('node:crypto'); + * + * const n = randomInt(1, 7); + * console.log(`The dice rolled: ${n}`); + * ``` + * @since v14.10.0, v12.19.0 + * @param [min=0] Start of random range (inclusive). + * @param max End of random range (exclusive). + * @param callback `function(err, n) {}`. + */ + function randomInt(max: number): number; + function randomInt(min: number, max: number): number; + function randomInt(max: number, callback: (err: Error | null, value: number) => void): void; + function randomInt(min: number, max: number, callback: (err: Error | null, value: number) => void): void; + /** + * Synchronous version of {@link randomFill}. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFillSync } = await import('node:crypto'); + * + * const buf = Buffer.alloc(10); + * console.log(randomFillSync(buf).toString('hex')); + * + * randomFillSync(buf, 5); + * console.log(buf.toString('hex')); + * + * // The above is equivalent to the following: + * randomFillSync(buf, 5, 5); + * console.log(buf.toString('hex')); + * ``` + * + * Any `ArrayBuffer`, `TypedArray` or `DataView` instance may be passed as`buffer`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFillSync } = await import('node:crypto'); + * + * const a = new Uint32Array(10); + * console.log(Buffer.from(randomFillSync(a).buffer, + * a.byteOffset, a.byteLength).toString('hex')); + * + * const b = new DataView(new ArrayBuffer(10)); + * console.log(Buffer.from(randomFillSync(b).buffer, + * b.byteOffset, b.byteLength).toString('hex')); + * + * const c = new ArrayBuffer(10); + * console.log(Buffer.from(randomFillSync(c)).toString('hex')); + * ``` + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @return The object passed as `buffer` argument. + */ + function randomFillSync(buffer: T, offset?: number, size?: number): T; + /** + * This function is similar to {@link randomBytes} but requires the first + * argument to be a `Buffer` that will be filled. It also + * requires that a callback is passed in. + * + * If the `callback` function is not provided, an error will be thrown. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFill } = await import('node:crypto'); + * + * const buf = Buffer.alloc(10); + * randomFill(buf, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * randomFill(buf, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * // The above is equivalent to the following: + * randomFill(buf, 5, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * ``` + * + * Any `ArrayBuffer`, `TypedArray`, or `DataView` instance may be passed as `buffer`. + * + * While this includes instances of `Float32Array` and `Float64Array`, this + * function should not be used to generate random floating-point numbers. The + * result may contain `+Infinity`, `-Infinity`, and `NaN`, and even if the array + * contains finite numbers only, they are not drawn from a uniform random + * distribution and have no meaningful lower or upper bounds. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFill } = await import('node:crypto'); + * + * const a = new Uint32Array(10); + * randomFill(a, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const b = new DataView(new ArrayBuffer(10)); + * randomFill(b, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const c = new ArrayBuffer(10); + * randomFill(c, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf).toString('hex')); + * }); + * ``` + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomFill()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomFill` requests when doing so as part of fulfilling a client + * request. + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @param callback `function(err, buf) {}`. + */ + function randomFill( + buffer: T, + callback: (err: Error | null, buf: T) => void, + ): void; + function randomFill( + buffer: T, + offset: number, + callback: (err: Error | null, buf: T) => void, + ): void; + function randomFill( + buffer: T, + offset: number, + size: number, + callback: (err: Error | null, buf: T) => void, + ): void; + interface ScryptOptions { + cost?: number | undefined; + blockSize?: number | undefined; + parallelization?: number | undefined; + N?: number | undefined; + r?: number | undefined; + p?: number | undefined; + maxmem?: number | undefined; + } + /** + * Provides an asynchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * The `callback` function is called with two arguments: `err` and `derivedKey`. `err` is an exception object when key derivation fails, otherwise `err` is `null`. `derivedKey` is passed to the + * callback as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scrypt, + * } = await import('node:crypto'); + * + * // Using the factory defaults. + * scrypt('password', 'salt', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * // Using a custom N parameter. Must be a power of two. + * scrypt('password', 'salt', 64, { N: 1024 }, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...aa39b34' + * }); + * ``` + * @since v10.5.0 + */ + function scrypt( + password: BinaryLike, + salt: BinaryLike, + keylen: number, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + function scrypt( + password: BinaryLike, + salt: BinaryLike, + keylen: number, + options: ScryptOptions, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + /** + * Provides a synchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * An exception is thrown when key derivation fails, otherwise the derived key is + * returned as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scryptSync, + * } = await import('node:crypto'); + * // Using the factory defaults. + * + * const key1 = scryptSync('password', 'salt', 64); + * console.log(key1.toString('hex')); // '3745e48...08d59ae' + * // Using a custom N parameter. Must be a power of two. + * const key2 = scryptSync('password', 'salt', 64, { N: 1024 }); + * console.log(key2.toString('hex')); // '3745e48...aa39b34' + * ``` + * @since v10.5.0 + */ + function scryptSync(password: BinaryLike, salt: BinaryLike, keylen: number, options?: ScryptOptions): Buffer; + interface RsaPublicKey { + key: KeyLike; + padding?: number | undefined; + } + interface RsaPrivateKey { + key: KeyLike; + passphrase?: string | undefined; + /** + * @default 'sha1' + */ + oaepHash?: string | undefined; + oaepLabel?: NodeJS.TypedArray | undefined; + padding?: number | undefined; + } + /** + * Encrypts the content of `buffer` with `key` and returns a new `Buffer` with encrypted content. The returned data can be decrypted using + * the corresponding private key, for example using {@link privateDecrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses `RSA_PKCS1_OAEP_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.11.14 + */ + function publicEncrypt( + key: RsaPublicKey | RsaPrivateKey | KeyLike, + buffer: NodeJS.ArrayBufferView | string, + ): Buffer; + /** + * Decrypts `buffer` with `key`.`buffer` was previously encrypted using + * the corresponding private key, for example using {@link privateEncrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses `RSA_PKCS1_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v1.1.0 + */ + function publicDecrypt( + key: RsaPublicKey | RsaPrivateKey | KeyLike, + buffer: NodeJS.ArrayBufferView | string, + ): Buffer; + /** + * Decrypts `buffer` with `privateKey`. `buffer` was previously encrypted using + * the corresponding public key, for example using {@link publicEncrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if `privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses `RSA_PKCS1_OAEP_PADDING`. + * @since v0.11.14 + */ + function privateDecrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView | string): Buffer; + /** + * Encrypts `buffer` with `privateKey`. The returned data can be decrypted using + * the corresponding public key, for example using {@link publicDecrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if `privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses `RSA_PKCS1_PADDING`. + * @since v1.1.0 + */ + function privateEncrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView | string): Buffer; + /** + * ```js + * const { + * getCiphers, + * } = await import('node:crypto'); + * + * console.log(getCiphers()); // ['aes-128-cbc', 'aes-128-ccm', ...] + * ``` + * @since v0.9.3 + * @return An array with the names of the supported cipher algorithms. + */ + function getCiphers(): string[]; + /** + * ```js + * const { + * getCurves, + * } = await import('node:crypto'); + * + * console.log(getCurves()); // ['Oakley-EC2N-3', 'Oakley-EC2N-4', ...] + * ``` + * @since v2.3.0 + * @return An array with the names of the supported elliptic curves. + */ + function getCurves(): string[]; + /** + * @since v10.0.0 + * @return `1` if and only if a FIPS compliant crypto provider is currently in use, `0` otherwise. A future semver-major release may change the return type of this API to a {boolean}. + */ + function getFips(): 1 | 0; + /** + * Enables the FIPS compliant crypto provider in a FIPS-enabled Node.js build. + * Throws an error if FIPS mode is not available. + * @since v10.0.0 + * @param bool `true` to enable FIPS mode. + */ + function setFips(bool: boolean): void; + /** + * ```js + * const { + * getHashes, + * } = await import('node:crypto'); + * + * console.log(getHashes()); // ['DSA', 'DSA-SHA', 'DSA-SHA1', ...] + * ``` + * @since v0.9.3 + * @return An array of the names of the supported hash algorithms, such as `'RSA-SHA256'`. Hash algorithms are also called "digest" algorithms. + */ + function getHashes(): string[]; + /** + * The `ECDH` class is a utility for creating Elliptic Curve Diffie-Hellman (ECDH) + * key exchanges. + * + * Instances of the `ECDH` class can be created using the {@link createECDH} function. + * + * ```js + * import assert from 'node:assert'; + * + * const { + * createECDH, + * } = await import('node:crypto'); + * + * // Generate Alice's keys... + * const alice = createECDH('secp521r1'); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createECDH('secp521r1'); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * // OK + * ``` + * @since v0.11.14 + */ + class ECDH { + private constructor(); + /** + * Converts the EC Diffie-Hellman public key specified by `key` and `curve` to the + * format specified by `format`. The `format` argument specifies point encoding + * and can be `'compressed'`, `'uncompressed'` or `'hybrid'`. The supplied key is + * interpreted using the specified `inputEncoding`, and the returned key is encoded + * using the specified `outputEncoding`. + * + * Use {@link getCurves} to obtain a list of available curve names. + * On recent OpenSSL releases, `openssl ecparam -list_curves` will also display + * the name and description of each available elliptic curve. + * + * If `format` is not specified the point will be returned in `'uncompressed'` format. + * + * If the `inputEncoding` is not provided, `key` is expected to be a `Buffer`, `TypedArray`, or `DataView`. + * + * Example (uncompressing a key): + * + * ```js + * const { + * createECDH, + * ECDH, + * } = await import('node:crypto'); + * + * const ecdh = createECDH('secp256k1'); + * ecdh.generateKeys(); + * + * const compressedKey = ecdh.getPublicKey('hex', 'compressed'); + * + * const uncompressedKey = ECDH.convertKey(compressedKey, + * 'secp256k1', + * 'hex', + * 'hex', + * 'uncompressed'); + * + * // The converted key and the uncompressed public key should be the same + * console.log(uncompressedKey === ecdh.getPublicKey('hex')); + * ``` + * @since v10.0.0 + * @param inputEncoding The `encoding` of the `key` string. + * @param outputEncoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + static convertKey( + key: BinaryLike, + curve: string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: "latin1" | "hex" | "base64" | "base64url", + format?: "uncompressed" | "compressed" | "hybrid", + ): Buffer | string; + /** + * Generates private and public EC Diffie-Hellman key values, and returns + * the public key in the specified `format` and `encoding`. This key should be + * transferred to the other party. + * + * The `format` argument specifies point encoding and can be `'compressed'` or `'uncompressed'`. If `format` is not specified, the point will be returned in`'uncompressed'` format. + * + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using specified `inputEncoding`, and the returned secret + * is encoded using the specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`, `TypedArray`, or `DataView`. + * + * If `outputEncoding` is given a string will be returned; otherwise a `Buffer` is returned. + * + * `ecdh.computeSecret` will throw an`ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY` error when `otherPublicKey` lies outside of the elliptic curve. Since `otherPublicKey` is + * usually supplied from a remote user over an insecure network, + * be sure to handle this exception accordingly. + * @since v0.11.14 + * @param inputEncoding The `encoding` of the `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding): Buffer; + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, outputEncoding: BinaryToTextEncoding): string; + computeSecret( + otherPublicKey: string, + inputEncoding: BinaryToTextEncoding, + outputEncoding: BinaryToTextEncoding, + ): string; + /** + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @return The EC Diffie-Hellman in the specified `encoding`. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * The `format` argument specifies point encoding and can be `'compressed'` or `'uncompressed'`. If `format` is not specified the point will be returned in`'uncompressed'` format. + * + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + * @return The EC Diffie-Hellman public key in the specified `encoding` and `format`. + */ + getPublicKey(encoding?: null, format?: ECDHKeyFormat): Buffer; + getPublicKey(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Sets the EC Diffie-Hellman private key. + * If `encoding` is provided, `privateKey` is expected + * to be a string; otherwise `privateKey` is expected to be a `Buffer`, `TypedArray`, or `DataView`. + * + * If `privateKey` is not valid for the curve specified when the `ECDH` object was + * created, an error is thrown. Upon setting the private key, the associated + * public point (key) is also generated and set in the `ECDH` object. + * @since v0.11.14 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BinaryToTextEncoding): void; + } + /** + * Creates an Elliptic Curve Diffie-Hellman (`ECDH`) key exchange object using a + * predefined curve specified by the `curveName` string. Use {@link getCurves} to obtain a list of available curve names. On recent + * OpenSSL releases, `openssl ecparam -list_curves` will also display the name + * and description of each available elliptic curve. + * @since v0.11.14 + */ + function createECDH(curveName: string): ECDH; + /** + * This function compares the underlying bytes that represent the given `ArrayBuffer`, `TypedArray`, or `DataView` instances using a constant-time + * algorithm. + * + * This function does not leak timing information that + * would allow an attacker to guess one of the values. This is suitable for + * comparing HMAC digests or secret values like authentication cookies or [capability urls](https://www.w3.org/TR/capability-urls/). + * + * `a` and `b` must both be `Buffer`s, `TypedArray`s, or `DataView`s, and they + * must have the same byte length. An error is thrown if `a` and `b` have + * different byte lengths. + * + * If at least one of `a` and `b` is a `TypedArray` with more than one byte per + * entry, such as `Uint16Array`, the result will be computed using the platform + * byte order. + * + * **When both of the inputs are `Float32Array`s or `Float64Array`s, this function might return unexpected results due to IEEE 754** + * **encoding of floating-point numbers. In particular, neither `x === y` nor `Object.is(x, y)` implies that the byte representations of two floating-point** + * **numbers `x` and `y` are equal.** + * + * Use of `crypto.timingSafeEqual` does not guarantee that the _surrounding_ code + * is timing-safe. Care should be taken to ensure that the surrounding code does + * not introduce timing vulnerabilities. + * @since v6.6.0 + */ + function timingSafeEqual(a: NodeJS.ArrayBufferView, b: NodeJS.ArrayBufferView): boolean; + type KeyType = "rsa" | "rsa-pss" | "dsa" | "ec" | "ed25519" | "ed448" | "x25519" | "x448"; + type KeyFormat = "pem" | "der" | "jwk"; + interface BasePrivateKeyEncodingOptions { + format: T; + cipher?: string | undefined; + passphrase?: string | undefined; + } + interface KeyPairKeyObjectResult { + publicKey: KeyObject; + privateKey: KeyObject; + } + interface ED25519KeyPairKeyObjectOptions {} + interface ED448KeyPairKeyObjectOptions {} + interface X25519KeyPairKeyObjectOptions {} + interface X448KeyPairKeyObjectOptions {} + interface ECKeyPairKeyObjectOptions { + /** + * Name of the curve to use + */ + namedCurve: string; + /** + * Must be `'named'` or `'explicit'`. Default: `'named'`. + */ + paramEncoding?: "explicit" | "named" | undefined; + } + interface RSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + } + interface RSAPSSKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + } + interface DSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + } + interface RSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + publicKeyEncoding: { + type: "pkcs1" | "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs1" | "pkcs8"; + }; + } + interface RSAPSSKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface DSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface ECKeyPairOptions extends ECKeyPairKeyObjectOptions { + publicKeyEncoding: { + type: "pkcs1" | "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "sec1" | "pkcs8"; + }; + } + interface ED25519KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface ED448KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface X25519KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface X448KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface KeyPairSyncResult { + publicKey: T1; + privateKey: T2; + } + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * When encoding public keys, it is recommended to use `'spki'`. When encoding + * private keys, it is recommended to use `'pkcs8'` with a strong passphrase, + * and to keep the passphrase confidential. + * + * ```js + * const { + * generateKeyPairSync, + * } = await import('node:crypto'); + * + * const { + * publicKey, + * privateKey, + * } = generateKeyPairSync('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem', + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret', + * }, + * }); + * ``` + * + * The return value `{ publicKey, privateKey }` represents the generated key pair. + * When PEM encoding was selected, the respective key will be a string, otherwise + * it will be a buffer containing the data encoded as DER. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "rsa", options: RSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "rsa-pss", options: RSAPSSKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "dsa", options: DSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ec", options: ECKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ed25519", options?: ED25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ed448", options?: ED448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "x25519", options?: X25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "x448", options?: X448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * It is recommended to encode public keys as `'spki'` and private keys as `'pkcs8'` with encryption for long-term storage: + * + * ```js + * const { + * generateKeyPair, + * } = await import('node:crypto'); + * + * generateKeyPair('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem', + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret', + * }, + * }, (err, publicKey, privateKey) => { + * // Handle errors and use the generated key pair. + * }); + * ``` + * + * On completion, `callback` will be called with `err` set to `undefined` and `publicKey` / `privateKey` representing the generated key pair. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `publicKey` and `privateKey` properties. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + namespace generateKeyPair { + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "rsa", options: RSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "dsa", options: DSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "ec", options: ECKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed25519", + options?: ED25519KeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "ed448", options?: ED448KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "x25519", + options?: X25519KeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "x448", options?: X448KeyPairKeyObjectOptions): Promise; + } + /** + * Calculates and returns the signature for `data` using the given private key and + * algorithm. If `algorithm` is `null` or `undefined`, then the algorithm is + * dependent upon the key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPrivateKey}. If it is an object, the following + * additional properties can be passed: + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput | SignJsonWebKeyInput, + ): Buffer; + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput | SignJsonWebKeyInput, + callback: (error: Error | null, data: Buffer) => void, + ): void; + /** + * Verifies the given signature for `data` using the given key and algorithm. If `algorithm` is `null` or `undefined`, then the algorithm is dependent upon the + * key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPublicKey}. If it is an object, the following + * additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the `data`. + * + * Because public keys can be derived from private keys, a private key or a public + * key may be passed for `key`. + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + ): boolean; + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + callback: (error: Error | null, result: boolean) => void, + ): void; + /** + * Computes the Diffie-Hellman secret based on a `privateKey` and a `publicKey`. + * Both keys must have the same `asymmetricKeyType`, which must be one of `'dh'` (for Diffie-Hellman), `'ec'` (for ECDH), `'x448'`, or `'x25519'` (for ECDH-ES). + * @since v13.9.0, v12.17.0 + */ + function diffieHellman(options: { privateKey: KeyObject; publicKey: KeyObject }): Buffer; + /** + * A utility for creating one-shot hash digests of data. It can be faster than the object-based `crypto.createHash()` when hashing a smaller amount of data + * (<= 5MB) that's readily available. If the data can be big or if it is streamed, it's still recommended to use `crypto.createHash()` instead. The `algorithm` + * is dependent on the available algorithms supported by the version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. On recent releases + * of OpenSSL, `openssl list -digest-algorithms` will display the available digest algorithms. + * + * Example: + * + * ```js + * import crypto from 'node:crypto'; + * import { Buffer } from 'node:buffer'; + * + * // Hashing a string and return the result as a hex-encoded string. + * const string = 'Node.js'; + * // 10b3493287f831e81a438811a1ffba01f8cec4b7 + * console.log(crypto.hash('sha1', string)); + * + * // Encode a base64-encoded string into a Buffer, hash it and return + * // the result as a buffer. + * const base64 = 'Tm9kZS5qcw=='; + * // + * console.log(crypto.hash('sha1', Buffer.from(base64, 'base64'), 'buffer')); + * ``` + * @since v21.7.0, v20.12.0 + * @param data When `data` is a string, it will be encoded as UTF-8 before being hashed. If a different input encoding is desired for a string input, user + * could encode the string into a `TypedArray` using either `TextEncoder` or `Buffer.from()` and passing the encoded `TypedArray` into this API instead. + * @param [outputEncoding='hex'] [Encoding](https://nodejs.org/docs/latest-v22.x/api/buffer.html#buffers-and-character-encodings) used to encode the returned digest. + */ + function hash(algorithm: string, data: BinaryLike, outputEncoding?: BinaryToTextEncoding): string; + function hash(algorithm: string, data: BinaryLike, outputEncoding: "buffer"): Buffer; + function hash( + algorithm: string, + data: BinaryLike, + outputEncoding?: BinaryToTextEncoding | "buffer", + ): string | Buffer; + type CipherMode = "cbc" | "ccm" | "cfb" | "ctr" | "ecb" | "gcm" | "ocb" | "ofb" | "stream" | "wrap" | "xts"; + interface CipherInfoOptions { + /** + * A test key length. + */ + keyLength?: number | undefined; + /** + * A test IV length. + */ + ivLength?: number | undefined; + } + interface CipherInfo { + /** + * The name of the cipher. + */ + name: string; + /** + * The nid of the cipher. + */ + nid: number; + /** + * The block size of the cipher in bytes. + * This property is omitted when mode is 'stream'. + */ + blockSize?: number | undefined; + /** + * The expected or default initialization vector length in bytes. + * This property is omitted if the cipher does not use an initialization vector. + */ + ivLength?: number | undefined; + /** + * The expected or default key length in bytes. + */ + keyLength: number; + /** + * The cipher mode. + */ + mode: CipherMode; + } + /** + * Returns information about a given cipher. + * + * Some ciphers accept variable length keys and initialization vectors. By default, + * the `crypto.getCipherInfo()` method will return the default values for these + * ciphers. To test if a given key length or iv length is acceptable for given + * cipher, use the `keyLength` and `ivLength` options. If the given values are + * unacceptable, `undefined` will be returned. + * @since v15.0.0 + * @param nameOrNid The name or nid of the cipher to query. + */ + function getCipherInfo(nameOrNid: string | number, options?: CipherInfoOptions): CipherInfo | undefined; + /** + * HKDF is a simple key derivation function defined in RFC 5869\. The given `ikm`, `salt` and `info` are used with the `digest` to derive a key of `keylen` bytes. + * + * The supplied `callback` function is called with two arguments: `err` and `derivedKey`. If an errors occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. The successfully generated `derivedKey` will + * be passed to the callback as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). An error will be thrown if any + * of the input arguments specify invalid values or types. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * hkdf, + * } = await import('node:crypto'); + * + * hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * }); + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. Must be provided but can be zero-length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdf( + digest: string, + irm: BinaryLike | KeyObject, + salt: BinaryLike, + info: BinaryLike, + keylen: number, + callback: (err: Error | null, derivedKey: ArrayBuffer) => void, + ): void; + /** + * Provides a synchronous HKDF key derivation function as defined in RFC 5869\. The + * given `ikm`, `salt` and `info` are used with the `digest` to derive a key of `keylen` bytes. + * + * The successfully generated `derivedKey` will be returned as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). + * + * An error will be thrown if any of the input arguments specify invalid values or + * types, or if the derived key cannot be generated. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * hkdfSync, + * } = await import('node:crypto'); + * + * const derivedKey = hkdfSync('sha512', 'key', 'salt', 'info', 64); + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. Must be provided but can be zero-length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdfSync( + digest: string, + ikm: BinaryLike | KeyObject, + salt: BinaryLike, + info: BinaryLike, + keylen: number, + ): ArrayBuffer; + interface SecureHeapUsage { + /** + * The total allocated secure heap size as specified using the `--secure-heap=n` command-line flag. + */ + total: number; + /** + * The minimum allocation from the secure heap as specified using the `--secure-heap-min` command-line flag. + */ + min: number; + /** + * The total number of bytes currently allocated from the secure heap. + */ + used: number; + /** + * The calculated ratio of `used` to `total` allocated bytes. + */ + utilization: number; + } + /** + * @since v15.6.0 + */ + function secureHeapUsed(): SecureHeapUsage; + interface RandomUUIDOptions { + /** + * By default, to improve performance, + * Node.js will pre-emptively generate and persistently cache enough + * random data to generate up to 128 random UUIDs. To generate a UUID + * without using the cache, set `disableEntropyCache` to `true`. + * + * @default `false` + */ + disableEntropyCache?: boolean | undefined; + } + type UUID = `${string}-${string}-${string}-${string}-${string}`; + /** + * Generates a random [RFC 4122](https://www.rfc-editor.org/rfc/rfc4122.txt) version 4 UUID. The UUID is generated using a + * cryptographic pseudorandom number generator. + * @since v15.6.0, v14.17.0 + */ + function randomUUID(options?: RandomUUIDOptions): UUID; + interface X509CheckOptions { + /** + * @default 'always' + */ + subject?: "always" | "default" | "never"; + /** + * @default true + */ + wildcards?: boolean; + /** + * @default true + */ + partialWildcards?: boolean; + /** + * @default false + */ + multiLabelWildcards?: boolean; + /** + * @default false + */ + singleLabelSubdomains?: boolean; + } + /** + * Encapsulates an X509 certificate and provides read-only access to + * its information. + * + * ```js + * const { X509Certificate } = await import('node:crypto'); + * + * const x509 = new X509Certificate('{... pem encoded cert ...}'); + * + * console.log(x509.subject); + * ``` + * @since v15.6.0 + */ + class X509Certificate { + /** + * Will be \`true\` if this is a Certificate Authority (CA) certificate. + * @since v15.6.0 + */ + readonly ca: boolean; + /** + * The SHA-1 fingerprint of this certificate. + * + * Because SHA-1 is cryptographically broken and because the security of SHA-1 is + * significantly worse than that of algorithms that are commonly used to sign + * certificates, consider using `x509.fingerprint256` instead. + * @since v15.6.0 + */ + readonly fingerprint: string; + /** + * The SHA-256 fingerprint of this certificate. + * @since v15.6.0 + */ + readonly fingerprint256: string; + /** + * The SHA-512 fingerprint of this certificate. + * + * Because computing the SHA-256 fingerprint is usually faster and because it is + * only half the size of the SHA-512 fingerprint, `x509.fingerprint256` may be + * a better choice. While SHA-512 presumably provides a higher level of security in + * general, the security of SHA-256 matches that of most algorithms that are + * commonly used to sign certificates. + * @since v17.2.0, v16.14.0 + */ + readonly fingerprint512: string; + /** + * The complete subject of this certificate. + * @since v15.6.0 + */ + readonly subject: string; + /** + * The subject alternative name specified for this certificate. + * + * This is a comma-separated list of subject alternative names. Each entry begins + * with a string identifying the kind of the subject alternative name followed by + * a colon and the value associated with the entry. + * + * Earlier versions of Node.js incorrectly assumed that it is safe to split this + * property at the two-character sequence `', '` (see [CVE-2021-44532](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44532)). However, + * both malicious and legitimate certificates can contain subject alternative names + * that include this sequence when represented as a string. + * + * After the prefix denoting the type of the entry, the remainder of each entry + * might be enclosed in quotes to indicate that the value is a JSON string literal. + * For backward compatibility, Node.js only uses JSON string literals within this + * property when necessary to avoid ambiguity. Third-party code should be prepared + * to handle both possible entry formats. + * @since v15.6.0 + */ + readonly subjectAltName: string | undefined; + /** + * A textual representation of the certificate's authority information access + * extension. + * + * This is a line feed separated list of access descriptions. Each line begins with + * the access method and the kind of the access location, followed by a colon and + * the value associated with the access location. + * + * After the prefix denoting the access method and the kind of the access location, + * the remainder of each line might be enclosed in quotes to indicate that the + * value is a JSON string literal. For backward compatibility, Node.js only uses + * JSON string literals within this property when necessary to avoid ambiguity. + * Third-party code should be prepared to handle both possible entry formats. + * @since v15.6.0 + */ + readonly infoAccess: string | undefined; + /** + * An array detailing the key usages for this certificate. + * @since v15.6.0 + */ + readonly keyUsage: string[]; + /** + * The issuer identification included in this certificate. + * @since v15.6.0 + */ + readonly issuer: string; + /** + * The issuer certificate or `undefined` if the issuer certificate is not + * available. + * @since v15.9.0 + */ + readonly issuerCertificate?: X509Certificate | undefined; + /** + * The public key `KeyObject` for this certificate. + * @since v15.6.0 + */ + readonly publicKey: KeyObject; + /** + * A `Buffer` containing the DER encoding of this certificate. + * @since v15.6.0 + */ + readonly raw: Buffer; + /** + * The serial number of this certificate. + * + * Serial numbers are assigned by certificate authorities and do not uniquely + * identify certificates. Consider using `x509.fingerprint256` as a unique + * identifier instead. + * @since v15.6.0 + */ + readonly serialNumber: string; + /** + * The date/time from which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validFrom: string; + /** + * The date/time from which this certificate is valid, encapsulated in a `Date` object. + * @since v22.10.0 + */ + readonly validFromDate: Date; + /** + * The date/time until which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validTo: string; + /** + * The date/time until which this certificate is valid, encapsulated in a `Date` object. + * @since v22.10.0 + */ + readonly validToDate: Date; + constructor(buffer: BinaryLike); + /** + * Checks whether the certificate matches the given email address. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any email addresses. + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching email + * address, the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns `email` if the certificate matches, `undefined` if it does not. + */ + checkEmail(email: string, options?: Pick): string | undefined; + /** + * Checks whether the certificate matches the given host name. + * + * If the certificate matches the given host name, the matching subject name is + * returned. The returned name might be an exact match (e.g., `foo.example.com`) + * or it might contain wildcards (e.g., `*.example.com`). Because host name + * comparisons are case-insensitive, the returned subject name might also differ + * from the given `name` in capitalization. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any DNS names. This behavior is consistent with [RFC 2818](https://www.rfc-editor.org/rfc/rfc2818.txt) ("HTTP Over TLS"). + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching DNS name, + * the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns a subject name that matches `name`, or `undefined` if no subject name matches `name`. + */ + checkHost(name: string, options?: X509CheckOptions): string | undefined; + /** + * Checks whether the certificate matches the given IP address (IPv4 or IPv6). + * + * Only [RFC 5280](https://www.rfc-editor.org/rfc/rfc5280.txt) `iPAddress` subject alternative names are considered, and they + * must match the given `ip` address exactly. Other subject alternative names as + * well as the subject field of the certificate are ignored. + * @since v15.6.0 + * @return Returns `ip` if the certificate matches, `undefined` if it does not. + */ + checkIP(ip: string): string | undefined; + /** + * Checks whether this certificate was issued by the given `otherCert`. + * @since v15.6.0 + */ + checkIssued(otherCert: X509Certificate): boolean; + /** + * Checks whether the public key for this certificate is consistent with + * the given private key. + * @since v15.6.0 + * @param privateKey A private key. + */ + checkPrivateKey(privateKey: KeyObject): boolean; + /** + * There is no standard JSON encoding for X509 certificates. The`toJSON()` method returns a string containing the PEM encoded + * certificate. + * @since v15.6.0 + */ + toJSON(): string; + /** + * Returns information about this certificate using the legacy `certificate object` encoding. + * @since v15.6.0 + */ + toLegacyObject(): PeerCertificate; + /** + * Returns the PEM-encoded certificate. + * @since v15.6.0 + */ + toString(): string; + /** + * Verifies that this certificate was signed by the given public key. + * Does not perform any other validation checks on the certificate. + * @since v15.6.0 + * @param publicKey A public key. + */ + verify(publicKey: KeyObject): boolean; + } + type LargeNumberLike = NodeJS.ArrayBufferView | SharedArrayBuffer | ArrayBuffer | bigint; + interface GeneratePrimeOptions { + add?: LargeNumberLike | undefined; + rem?: LargeNumberLike | undefined; + /** + * @default false + */ + safe?: boolean | undefined; + bigint?: boolean | undefined; + } + interface GeneratePrimeOptionsBigInt extends GeneratePrimeOptions { + bigint: true; + } + interface GeneratePrimeOptionsArrayBuffer extends GeneratePrimeOptions { + bigint?: false | undefined; + } + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is, `(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or `DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrime(size: number, callback: (err: Error | null, prime: ArrayBuffer) => void): void; + function generatePrime( + size: number, + options: GeneratePrimeOptionsBigInt, + callback: (err: Error | null, prime: bigint) => void, + ): void; + function generatePrime( + size: number, + options: GeneratePrimeOptionsArrayBuffer, + callback: (err: Error | null, prime: ArrayBuffer) => void, + ): void; + function generatePrime( + size: number, + options: GeneratePrimeOptions, + callback: (err: Error | null, prime: ArrayBuffer | bigint) => void, + ): void; + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is, `(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or `DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrimeSync(size: number): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsBigInt): bigint; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsArrayBuffer): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptions): ArrayBuffer | bigint; + interface CheckPrimeOptions { + /** + * The number of Miller-Rabin probabilistic primality iterations to perform. + * When the value is 0 (zero), a number of checks is used that yields a false positive rate of at most `2**-64` for random input. + * Care must be used when selecting a number of checks. + * Refer to the OpenSSL documentation for the BN_is_prime_ex function nchecks options for more details. + * + * @default 0 + */ + checks?: number | undefined; + } + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + */ + function checkPrime(value: LargeNumberLike, callback: (err: Error | null, result: boolean) => void): void; + function checkPrime( + value: LargeNumberLike, + options: CheckPrimeOptions, + callback: (err: Error | null, result: boolean) => void, + ): void; + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + * @return `true` if the candidate is a prime with an error probability less than `0.25 ** options.checks`. + */ + function checkPrimeSync(candidate: LargeNumberLike, options?: CheckPrimeOptions): boolean; + /** + * Load and set the `engine` for some or all OpenSSL functions (selected by flags). + * + * `engine` could be either an id or a path to the engine's shared library. + * + * The optional `flags` argument uses `ENGINE_METHOD_ALL` by default. The `flags` is a bit field taking one of or a mix of the following flags (defined in `crypto.constants`): + * + * * `crypto.constants.ENGINE_METHOD_RSA` + * * `crypto.constants.ENGINE_METHOD_DSA` + * * `crypto.constants.ENGINE_METHOD_DH` + * * `crypto.constants.ENGINE_METHOD_RAND` + * * `crypto.constants.ENGINE_METHOD_EC` + * * `crypto.constants.ENGINE_METHOD_CIPHERS` + * * `crypto.constants.ENGINE_METHOD_DIGESTS` + * * `crypto.constants.ENGINE_METHOD_PKEY_METHS` + * * `crypto.constants.ENGINE_METHOD_PKEY_ASN1_METHS` + * * `crypto.constants.ENGINE_METHOD_ALL` + * * `crypto.constants.ENGINE_METHOD_NONE` + * @since v0.11.11 + * @param flags + */ + function setEngine(engine: string, flags?: number): void; + /** + * A convenient alias for {@link webcrypto.getRandomValues}. This + * implementation is not compliant with the Web Crypto spec, to write + * web-compatible code use {@link webcrypto.getRandomValues} instead. + * @since v17.4.0 + * @return Returns `typedArray`. + */ + function getRandomValues(typedArray: T): T; + /** + * A convenient alias for `crypto.webcrypto.subtle`. + * @since v17.4.0 + */ + const subtle: webcrypto.SubtleCrypto; + /** + * An implementation of the Web Crypto API standard. + * + * See the {@link https://nodejs.org/docs/latest/api/webcrypto.html Web Crypto API documentation} for details. + * @since v15.0.0 + */ + const webcrypto: webcrypto.Crypto; + namespace webcrypto { + type BufferSource = ArrayBufferView | ArrayBuffer; + type KeyFormat = "jwk" | "pkcs8" | "raw" | "spki"; + type KeyType = "private" | "public" | "secret"; + type KeyUsage = + | "decrypt" + | "deriveBits" + | "deriveKey" + | "encrypt" + | "sign" + | "unwrapKey" + | "verify" + | "wrapKey"; + type AlgorithmIdentifier = Algorithm | string; + type HashAlgorithmIdentifier = AlgorithmIdentifier; + type NamedCurve = string; + type BigInteger = Uint8Array; + interface AesCbcParams extends Algorithm { + iv: BufferSource; + } + interface AesCtrParams extends Algorithm { + counter: BufferSource; + length: number; + } + interface AesDerivedKeyParams extends Algorithm { + length: number; + } + interface AesGcmParams extends Algorithm { + additionalData?: BufferSource; + iv: BufferSource; + tagLength?: number; + } + interface AesKeyAlgorithm extends KeyAlgorithm { + length: number; + } + interface AesKeyGenParams extends Algorithm { + length: number; + } + interface Algorithm { + name: string; + } + interface EcKeyAlgorithm extends KeyAlgorithm { + namedCurve: NamedCurve; + } + interface EcKeyGenParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcKeyImportParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcdhKeyDeriveParams extends Algorithm { + public: CryptoKey; + } + interface EcdsaParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface Ed448Params extends Algorithm { + context?: BufferSource; + } + interface HkdfParams extends Algorithm { + hash: HashAlgorithmIdentifier; + info: BufferSource; + salt: BufferSource; + } + interface HmacImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface HmacKeyAlgorithm extends KeyAlgorithm { + hash: KeyAlgorithm; + length: number; + } + interface HmacKeyGenParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface JsonWebKey { + alg?: string; + crv?: string; + d?: string; + dp?: string; + dq?: string; + e?: string; + ext?: boolean; + k?: string; + key_ops?: string[]; + kty?: string; + n?: string; + oth?: RsaOtherPrimesInfo[]; + p?: string; + q?: string; + qi?: string; + use?: string; + x?: string; + y?: string; + } + interface KeyAlgorithm { + name: string; + } + interface Pbkdf2Params extends Algorithm { + hash: HashAlgorithmIdentifier; + iterations: number; + salt: BufferSource; + } + interface RsaHashedImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface RsaHashedKeyAlgorithm extends RsaKeyAlgorithm { + hash: KeyAlgorithm; + } + interface RsaHashedKeyGenParams extends RsaKeyGenParams { + hash: HashAlgorithmIdentifier; + } + interface RsaKeyAlgorithm extends KeyAlgorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaKeyGenParams extends Algorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaOaepParams extends Algorithm { + label?: BufferSource; + } + interface RsaOtherPrimesInfo { + d?: string; + r?: string; + t?: string; + } + interface RsaPssParams extends Algorithm { + saltLength: number; + } + /** + * Importing the `webcrypto` object (`import { webcrypto } from 'node:crypto'`) gives an instance of the `Crypto` class. + * `Crypto` is a singleton that provides access to the remainder of the crypto API. + * @since v15.0.0 + */ + interface Crypto { + /** + * Provides access to the `SubtleCrypto` API. + * @since v15.0.0 + */ + readonly subtle: SubtleCrypto; + /** + * Generates cryptographically strong random values. + * The given `typedArray` is filled with random values, and a reference to `typedArray` is returned. + * + * The given `typedArray` must be an integer-based instance of {@link NodeJS.TypedArray}, i.e. `Float32Array` and `Float64Array` are not accepted. + * + * An error will be thrown if the given `typedArray` is larger than 65,536 bytes. + * @since v15.0.0 + */ + getRandomValues>(typedArray: T): T; + /** + * Generates a random {@link https://www.rfc-editor.org/rfc/rfc4122.txt RFC 4122} version 4 UUID. + * The UUID is generated using a cryptographic pseudorandom number generator. + * @since v16.7.0 + */ + randomUUID(): UUID; + CryptoKey: CryptoKeyConstructor; + } + // This constructor throws ILLEGAL_CONSTRUCTOR so it should not be newable. + interface CryptoKeyConstructor { + /** Illegal constructor */ + (_: { readonly _: unique symbol }): never; // Allows instanceof to work but not be callable by the user. + readonly length: 0; + readonly name: "CryptoKey"; + readonly prototype: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface CryptoKey { + /** + * An object detailing the algorithm for which the key can be used along with additional algorithm-specific parameters. + * @since v15.0.0 + */ + readonly algorithm: KeyAlgorithm; + /** + * When `true`, the {@link CryptoKey} can be extracted using either `subtleCrypto.exportKey()` or `subtleCrypto.wrapKey()`. + * @since v15.0.0 + */ + readonly extractable: boolean; + /** + * A string identifying whether the key is a symmetric (`'secret'`) or asymmetric (`'private'` or `'public'`) key. + * @since v15.0.0 + */ + readonly type: KeyType; + /** + * An array of strings identifying the operations for which the key may be used. + * + * The possible usages are: + * - `'encrypt'` - The key may be used to encrypt data. + * - `'decrypt'` - The key may be used to decrypt data. + * - `'sign'` - The key may be used to generate digital signatures. + * - `'verify'` - The key may be used to verify digital signatures. + * - `'deriveKey'` - The key may be used to derive a new key. + * - `'deriveBits'` - The key may be used to derive bits. + * - `'wrapKey'` - The key may be used to wrap another key. + * - `'unwrapKey'` - The key may be used to unwrap another key. + * + * Valid key usages depend on the key algorithm (identified by `cryptokey.algorithm.name`). + * @since v15.0.0 + */ + readonly usages: KeyUsage[]; + } + /** + * The `CryptoKeyPair` is a simple dictionary object with `publicKey` and `privateKey` properties, representing an asymmetric key pair. + * @since v15.0.0 + */ + interface CryptoKeyPair { + /** + * A {@link CryptoKey} whose type will be `'private'`. + * @since v15.0.0 + */ + privateKey: CryptoKey; + /** + * A {@link CryptoKey} whose type will be `'public'`. + * @since v15.0.0 + */ + publicKey: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface SubtleCrypto { + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `key`, + * `subtle.decrypt()` attempts to decipher the provided `data`. If successful, + * the returned promise will be resolved with an `` containing the plaintext result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + decrypt( + algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `baseKey`, + * `subtle.deriveBits()` attempts to generate `length` bits. + * The Node.js implementation requires that when `length` is a number it must be multiple of `8`. + * When `length` is `null` the maximum number of bits for a given algorithm is generated. This is allowed + * for the `'ECDH'`, `'X25519'`, and `'X448'` algorithms. + * If successful, the returned promise will be resolved with an `` containing the generated data. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @since v15.0.0 + */ + deriveBits(algorithm: EcdhKeyDeriveParams, baseKey: CryptoKey, length: number | null): Promise; + deriveBits( + algorithm: AlgorithmIdentifier | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + length: number, + ): Promise; + /** + * Using the method and parameters specified in `algorithm`, and the keying material provided by `baseKey`, + * `subtle.deriveKey()` attempts to generate a new ` based on the method and parameters in `derivedKeyAlgorithm`. + * + * Calling `subtle.deriveKey()` is equivalent to calling `subtle.deriveBits()` to generate raw keying material, + * then passing the result into the `subtle.importKey()` method using the `deriveKeyAlgorithm`, `extractable`, and `keyUsages` parameters as input. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + deriveKey( + algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + derivedKeyAlgorithm: + | AlgorithmIdentifier + | AesDerivedKeyParams + | HmacImportParams + | HkdfParams + | Pbkdf2Params, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + /** + * Using the method identified by `algorithm`, `subtle.digest()` attempts to generate a digest of `data`. + * If successful, the returned promise is resolved with an `` containing the computed digest. + * + * If `algorithm` is provided as a ``, it must be one of: + * + * - `'SHA-1'` + * - `'SHA-256'` + * - `'SHA-384'` + * - `'SHA-512'` + * + * If `algorithm` is provided as an ``, it must have a `name` property whose value is one of the above. + * @since v15.0.0 + */ + digest(algorithm: AlgorithmIdentifier, data: BufferSource): Promise; + /** + * Using the method and parameters specified by `algorithm` and the keying material provided by `key`, + * `subtle.encrypt()` attempts to encipher `data`. If successful, + * the returned promise is resolved with an `` containing the encrypted result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + encrypt( + algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * Exports the given key into the specified format, if supported. + * + * If the `` is not extractable, the returned promise will reject. + * + * When `format` is either `'pkcs8'` or `'spki'` and the export is successful, + * the returned promise will be resolved with an `` containing the exported key data. + * + * When `format` is `'jwk'` and the export is successful, the returned promise will be resolved with a + * JavaScript object conforming to the {@link https://tools.ietf.org/html/rfc7517 JSON Web Key} specification. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @returns `` containing ``. + * @since v15.0.0 + */ + exportKey(format: "jwk", key: CryptoKey): Promise; + exportKey(format: Exclude, key: CryptoKey): Promise; + /** + * Using the method and parameters provided in `algorithm`, + * `subtle.generateKey()` attempts to generate new keying material. + * Depending the method used, the method may generate either a single `` or a ``. + * + * The `` (public and private key) generating algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * The `` (secret key) generating algorithms supported include: + * + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + generateKey( + algorithm: RsaHashedKeyGenParams | EcKeyGenParams, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + generateKey( + algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + generateKey( + algorithm: AlgorithmIdentifier, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * The `subtle.importKey()` method attempts to interpret the provided `keyData` as the given `format` + * to create a `` instance using the provided `algorithm`, `extractable`, and `keyUsages` arguments. + * If the import is successful, the returned promise will be resolved with the created ``. + * + * If importing a `'PBKDF2'` key, `extractable` must be `false`. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + importKey( + format: "jwk", + keyData: JsonWebKey, + algorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + importKey( + format: Exclude, + keyData: BufferSource, + algorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * Using the method and parameters given by `algorithm` and the keying material provided by `key`, + * `subtle.sign()` attempts to generate a cryptographic signature of `data`. If successful, + * the returned promise is resolved with an `` containing the generated signature. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + sign( + algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.unwrapKey()` method attempts to decrypt a wrapped key and create a `` instance. + * It is equivalent to calling `subtle.decrypt()` first on the encrypted key data (using the `wrappedKey`, `unwrapAlgo`, and `unwrappingKey` arguments as input) + * then passing the results in to the `subtle.importKey()` method using the `unwrappedKeyAlgo`, `extractable`, and `keyUsages` arguments as inputs. + * If successful, the returned promise is resolved with a `` object. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * + * The unwrapped key algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + unwrapKey( + format: KeyFormat, + wrappedKey: BufferSource, + unwrappingKey: CryptoKey, + unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + unwrappedKeyAlgorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * Using the method and parameters given in `algorithm` and the keying material provided by `key`, + * `subtle.verify()` attempts to verify that `signature` is a valid cryptographic signature of `data`. + * The returned promise is resolved with either `true` or `false`. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + verify( + algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, + key: CryptoKey, + signature: BufferSource, + data: BufferSource, + ): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.wrapKey()` method exports the keying material into the format identified by `format`, + * then encrypts it using the method and parameters specified by `wrapAlgo` and the keying material provided by `wrappingKey`. + * It is the equivalent to calling `subtle.exportKey()` using `format` and `key` as the arguments, + * then passing the result to the `subtle.encrypt()` method using `wrappingKey` and `wrapAlgo` as inputs. + * If successful, the returned promise will be resolved with an `` containing the encrypted key data. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @since v15.0.0 + */ + wrapKey( + format: KeyFormat, + key: CryptoKey, + wrappingKey: CryptoKey, + wrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + ): Promise; + } + } + + global { + var crypto: typeof globalThis extends { + crypto: infer T; + onmessage: any; + } ? T + : webcrypto.Crypto; + } +} +declare module "node:crypto" { + export * from "crypto"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/dgram.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/dgram.d.ts new file mode 100644 index 0000000..8c2ac9b --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/dgram.d.ts @@ -0,0 +1,596 @@ +/** + * The `node:dgram` module provides an implementation of UDP datagram sockets. + * + * ```js + * import dgram from 'node:dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.error(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/dgram.js) + */ +declare module "dgram" { + import { AddressInfo } from "node:net"; + import * as dns from "node:dns"; + import { Abortable, EventEmitter } from "node:events"; + interface RemoteInfo { + address: string; + family: "IPv4" | "IPv6"; + port: number; + size: number; + } + interface BindOptions { + port?: number | undefined; + address?: string | undefined; + exclusive?: boolean | undefined; + fd?: number | undefined; + } + type SocketType = "udp4" | "udp6"; + interface SocketOptions extends Abortable { + type: SocketType; + reuseAddr?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + recvBufferSize?: number | undefined; + sendBufferSize?: number | undefined; + lookup?: + | (( + hostname: string, + options: dns.LookupOneOptions, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ) => void) + | undefined; + } + /** + * Creates a `dgram.Socket` object. Once the socket is created, calling `socket.bind()` will instruct the socket to begin listening for datagram + * messages. When `address` and `port` are not passed to `socket.bind()` the + * method will bind the socket to the "all interfaces" address on a random port + * (it does the right thing for both `udp4` and `udp6` sockets). The bound address + * and port can be retrieved using `socket.address().address` and `socket.address().port`. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding `AbortController` is similar to calling `.close()` on the socket: + * + * ```js + * const controller = new AbortController(); + * const { signal } = controller; + * const server = dgram.createSocket({ type: 'udp4', signal }); + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * // Later, when you want to close the server. + * controller.abort(); + * ``` + * @since v0.11.13 + * @param options Available options are: + * @param callback Attached as a listener for `'message'` events. Optional. + */ + function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + /** + * Encapsulates the datagram functionality. + * + * New instances of `dgram.Socket` are created using {@link createSocket}. + * The `new` keyword is not to be used to create `dgram.Socket` instances. + * @since v0.1.99 + */ + class Socket extends EventEmitter { + /** + * Tells the kernel to join a multicast group at the given `multicastAddress` and `multicastInterface` using the `IP_ADD_MEMBERSHIP` socket option. If the `multicastInterface` argument is not + * specified, the operating system will choose + * one interface and will add membership to it. To add membership to every + * available interface, call `addMembership` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * + * When sharing a UDP socket across multiple `cluster` workers, the`socket.addMembership()` function must be called only once or an`EADDRINUSE` error will occur: + * + * ```js + * import cluster from 'node:cluster'; + * import dgram from 'node:dgram'; + * + * if (cluster.isPrimary) { + * cluster.fork(); // Works ok. + * cluster.fork(); // Fails with EADDRINUSE. + * } else { + * const s = dgram.createSocket('udp4'); + * s.bind(1234, () => { + * s.addMembership('224.0.0.114'); + * }); + * } + * ``` + * @since v0.6.9 + */ + addMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * Returns an object containing the address information for a socket. + * For UDP sockets, this object will contain `address`, `family`, and `port` properties. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.99 + */ + address(): AddressInfo; + /** + * For UDP sockets, causes the `dgram.Socket` to listen for datagram + * messages on a named `port` and optional `address`. If `port` is not + * specified or is `0`, the operating system will attempt to bind to a + * random port. If `address` is not specified, the operating system will + * attempt to listen on all addresses. Once binding is complete, a `'listening'` event is emitted and the optional `callback` function is + * called. + * + * Specifying both a `'listening'` event listener and passing a `callback` to the `socket.bind()` method is not harmful but not very + * useful. + * + * A bound datagram socket keeps the Node.js process running to receive + * datagram messages. + * + * If binding fails, an `'error'` event is generated. In rare case (e.g. + * attempting to bind with a closed socket), an `Error` may be thrown. + * + * Example of a UDP server listening on port 41234: + * + * ```js + * import dgram from 'node:dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.error(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @since v0.1.99 + * @param callback with no parameters. Called when binding is complete. + */ + bind(port?: number, address?: string, callback?: () => void): this; + bind(port?: number, callback?: () => void): this; + bind(callback?: () => void): this; + bind(options: BindOptions, callback?: () => void): this; + /** + * Close the underlying socket and stop listening for data on it. If a callback is + * provided, it is added as a listener for the `'close'` event. + * @since v0.1.99 + * @param callback Called when the socket has been closed. + */ + close(callback?: () => void): this; + /** + * Associates the `dgram.Socket` to a remote address and port. Every + * message sent by this handle is automatically sent to that destination. Also, + * the socket will only receive messages from that remote peer. + * Trying to call `connect()` on an already connected socket will result + * in an `ERR_SOCKET_DGRAM_IS_CONNECTED` exception. If `address` is not + * provided, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets) + * will be used by default. Once the connection is complete, a `'connect'` event + * is emitted and the optional `callback` function is called. In case of failure, + * the `callback` is called or, failing this, an `'error'` event is emitted. + * @since v12.0.0 + * @param callback Called when the connection is completed or on error. + */ + connect(port: number, address?: string, callback?: () => void): void; + connect(port: number, callback: () => void): void; + /** + * A synchronous function that disassociates a connected `dgram.Socket` from + * its remote address. Trying to call `disconnect()` on an unbound or already + * disconnected socket will result in an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception. + * @since v12.0.0 + */ + disconnect(): void; + /** + * Instructs the kernel to leave a multicast group at `multicastAddress` using the `IP_DROP_MEMBERSHIP` socket option. This method is automatically called by the + * kernel when the socket is closed or the process terminates, so most apps will + * never have reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v0.6.9 + */ + dropMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_RCVBUF` socket receive buffer size in bytes. + */ + getRecvBufferSize(): number; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_SNDBUF` socket send buffer size in bytes. + */ + getSendBufferSize(): number; + /** + * @since v18.8.0, v16.19.0 + * @return Number of bytes queued for sending. + */ + getSendQueueSize(): number; + /** + * @since v18.8.0, v16.19.0 + * @return Number of send requests currently in the queue awaiting to be processed. + */ + getSendQueueCount(): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active. The `socket.ref()` method adds the socket back to the reference + * counting and restores the default behavior. + * + * Calling `socket.ref()` multiples times will have no additional effect. + * + * The `socket.ref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + ref(): this; + /** + * Returns an object containing the `address`, `family`, and `port` of the remote + * endpoint. This method throws an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception + * if the socket is not connected. + * @since v12.0.0 + */ + remoteAddress(): AddressInfo; + /** + * Broadcasts a datagram on the socket. + * For connectionless sockets, the destination `port` and `address` must be + * specified. Connected sockets, on the other hand, will use their associated + * remote endpoint, so the `port` and `address` arguments must not be set. + * + * The `msg` argument contains the message to be sent. + * Depending on its type, different behavior can apply. If `msg` is a `Buffer`, + * any `TypedArray` or a `DataView`, + * the `offset` and `length` specify the offset within the `Buffer` where the + * message begins and the number of bytes in the message, respectively. + * If `msg` is a `String`, then it is automatically converted to a `Buffer` with `'utf8'` encoding. With messages that + * contain multi-byte characters, `offset` and `length` will be calculated with + * respect to `byte length` and not the character position. + * If `msg` is an array, `offset` and `length` must not be specified. + * + * The `address` argument is a string. If the value of `address` is a host name, + * DNS will be used to resolve the address of the host. If `address` is not + * provided or otherwise nullish, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets) will be used by default. + * + * If the socket has not been previously bound with a call to `bind`, the socket + * is assigned a random port number and is bound to the "all interfaces" address + * (`'0.0.0.0'` for `udp4` sockets, `'::0'` for `udp6` sockets.) + * + * An optional `callback` function may be specified to as a way of reporting + * DNS errors or for determining when it is safe to reuse the `buf` object. + * DNS lookups delay the time to send for at least one tick of the + * Node.js event loop. + * + * The only way to know for sure that the datagram has been sent is by using a `callback`. If an error occurs and a `callback` is given, the error will be + * passed as the first argument to the `callback`. If a `callback` is not given, + * the error is emitted as an `'error'` event on the `socket` object. + * + * Offset and length are optional but both _must_ be set if either are used. + * They are supported only when the first argument is a `Buffer`, a `TypedArray`, + * or a `DataView`. + * + * This method throws `ERR_SOCKET_BAD_PORT` if called on an unbound socket. + * + * Example of sending a UDP packet to a port on `localhost`; + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.send(message, 41234, 'localhost', (err) => { + * client.close(); + * }); + * ``` + * + * Example of sending a UDP packet composed of multiple buffers to a port on`127.0.0.1`; + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('Some '); + * const buf2 = Buffer.from('bytes'); + * const client = dgram.createSocket('udp4'); + * client.send([buf1, buf2], 41234, (err) => { + * client.close(); + * }); + * ``` + * + * Sending multiple buffers might be faster or slower depending on the + * application and operating system. Run benchmarks to + * determine the optimal strategy on a case-by-case basis. Generally speaking, + * however, sending multiple buffers is faster. + * + * Example of sending a UDP packet using a socket connected to a port on `localhost`: + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.connect(41234, 'localhost', (err) => { + * client.send(message, (err) => { + * client.close(); + * }); + * }); + * ``` + * @since v0.1.99 + * @param msg Message to be sent. + * @param offset Offset in the buffer where the message starts. + * @param length Number of bytes in the message. + * @param port Destination port. + * @param address Destination host name or IP address. + * @param callback Called when the message has been sent. + */ + send( + msg: string | Uint8Array | readonly any[], + port?: number, + address?: string, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array | readonly any[], + port?: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array | readonly any[], + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + port?: number, + address?: string, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + port?: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + /** + * Sets or clears the `SO_BROADCAST` socket option. When set to `true`, UDP + * packets may be sent to a local interface's broadcast address. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.6.9 + */ + setBroadcast(flag: boolean): void; + /** + * _All references to scope in this section are referring to [IPv6 Zone Indices](https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses), which are defined by [RFC + * 4007](https://tools.ietf.org/html/rfc4007). In string form, an IP_ + * _with a scope index is written as `'IP%scope'` where scope is an interface name_ + * _or interface number._ + * + * Sets the default outgoing multicast interface of the socket to a chosen + * interface or back to system interface selection. The `multicastInterface` must + * be a valid string representation of an IP from the socket's family. + * + * For IPv4 sockets, this should be the IP configured for the desired physical + * interface. All packets sent to multicast on the socket will be sent on the + * interface determined by the most recent successful use of this call. + * + * For IPv6 sockets, `multicastInterface` should include a scope to indicate the + * interface as in the examples that follow. In IPv6, individual `send` calls can + * also use explicit scope in addresses, so only packets sent to a multicast + * address without specifying an explicit scope are affected by the most recent + * successful use of this call. + * + * This method throws `EBADF` if called on an unbound socket. + * + * #### Example: IPv6 outgoing multicast interface + * + * On most systems, where scope format uses the interface name: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%eth1'); + * }); + * ``` + * + * On Windows, where scope format uses an interface number: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%2'); + * }); + * ``` + * + * #### Example: IPv4 outgoing multicast interface + * + * All systems use an IP of the host on the desired physical interface: + * + * ```js + * const socket = dgram.createSocket('udp4'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('10.0.0.2'); + * }); + * ``` + * @since v8.6.0 + */ + setMulticastInterface(multicastInterface: string): void; + /** + * Sets or clears the `IP_MULTICAST_LOOP` socket option. When set to `true`, + * multicast packets will also be received on the local interface. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastLoopback(flag: boolean): boolean; + /** + * Sets the `IP_MULTICAST_TTL` socket option. While TTL generally stands for + * "Time to Live", in this context it specifies the number of IP hops that a + * packet is allowed to travel through, specifically for multicast traffic. Each + * router or gateway that forwards a packet decrements the TTL. If the TTL is + * decremented to 0 by a router, it will not be forwarded. + * + * The `ttl` argument may be between 0 and 255\. The default on most systems is `1`. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastTTL(ttl: number): number; + /** + * Sets the `SO_RCVBUF` socket option. Sets the maximum socket receive buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setRecvBufferSize(size: number): void; + /** + * Sets the `SO_SNDBUF` socket option. Sets the maximum socket send buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setSendBufferSize(size: number): void; + /** + * Sets the `IP_TTL` socket option. While TTL generally stands for "Time to Live", + * in this context it specifies the number of IP hops that a packet is allowed to + * travel through. Each router or gateway that forwards a packet decrements the + * TTL. If the TTL is decremented to 0 by a router, it will not be forwarded. + * Changing TTL values is typically done for network probes or when multicasting. + * + * The `ttl` argument may be between 1 and 255\. The default on most systems + * is 64. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.101 + */ + setTTL(ttl: number): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active, allowing the process to exit even if the socket is still + * listening. + * + * Calling `socket.unref()` multiple times will have no additional effect. + * + * The `socket.unref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + unref(): this; + /** + * Tells the kernel to join a source-specific multicast channel at the given `sourceAddress` and `groupAddress`, using the `multicastInterface` with the `IP_ADD_SOURCE_MEMBERSHIP` socket + * option. If the `multicastInterface` argument + * is not specified, the operating system will choose one interface and will add + * membership to it. To add membership to every available interface, call `socket.addSourceSpecificMembership()` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * @since v13.1.0, v12.16.0 + */ + addSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * Instructs the kernel to leave a source-specific multicast channel at the given `sourceAddress` and `groupAddress` using the `IP_DROP_SOURCE_MEMBERSHIP` socket option. This method is + * automatically called by the kernel when the + * socket is closed or the process terminates, so most apps will never have + * reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v13.1.0, v12.16.0 + */ + dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. error + * 4. listening + * 5. message + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connect", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connect"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit(event: "message", msg: Buffer, rinfo: RemoteInfo): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connect", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connect", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connect", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connect", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + /** + * Calls `socket.close()` and returns a promise that fulfills when the socket has closed. + * @since v20.5.0 + */ + [Symbol.asyncDispose](): Promise; + } +} +declare module "node:dgram" { + export * from "dgram"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/diagnostics_channel.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/diagnostics_channel.d.ts new file mode 100644 index 0000000..1cc7486 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/diagnostics_channel.d.ts @@ -0,0 +1,554 @@ +/** + * The `node:diagnostics_channel` module provides an API to create named channels + * to report arbitrary message data for diagnostics purposes. + * + * It can be accessed using: + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * ``` + * + * It is intended that a module writer wanting to report diagnostics messages + * will create one or many top-level channels to report messages through. + * Channels may also be acquired at runtime but it is not encouraged + * due to the additional overhead of doing so. Channels may be exported for + * convenience, but as long as the name is known it can be acquired anywhere. + * + * If you intend for your module to produce diagnostics data for others to + * consume it is recommended that you include documentation of what named + * channels are used along with the shape of the message data. Channel names + * should generally include the module name to avoid collisions with data from + * other modules. + * @since v15.1.0, v14.17.0 + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/diagnostics_channel.js) + */ +declare module "diagnostics_channel" { + import { AsyncLocalStorage } from "node:async_hooks"; + /** + * Check if there are active subscribers to the named channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * if (diagnostics_channel.hasSubscribers('my-channel')) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return If there are active subscribers + */ + function hasSubscribers(name: string | symbol): boolean; + /** + * This is the primary entry-point for anyone wanting to publish to a named + * channel. It produces a channel object which is optimized to reduce overhead at + * publish time as much as possible. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return The named channel object + */ + function channel(name: string | symbol): Channel; + type ChannelListener = (message: unknown, name: string | symbol) => void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * diagnostics_channel.subscribe('my-channel', (message, name) => { + * // Received data + * }); + * ``` + * @since v18.7.0, v16.17.0 + * @param name The channel name + * @param onMessage The handler to receive channel messages + */ + function subscribe(name: string | symbol, onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with {@link subscribe}. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * function onMessage(message, name) { + * // Received data + * } + * + * diagnostics_channel.subscribe('my-channel', onMessage); + * + * diagnostics_channel.unsubscribe('my-channel', onMessage); + * ``` + * @since v18.7.0, v16.17.0 + * @param name The channel name + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + function unsubscribe(name: string | symbol, onMessage: ChannelListener): boolean; + /** + * Creates a `TracingChannel` wrapper for the given `TracingChannel Channels`. If a name is given, the corresponding tracing + * channels will be created in the form of `tracing:${name}:${eventType}` where `eventType` corresponds to the types of `TracingChannel Channels`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channelsByName = diagnostics_channel.tracingChannel('my-channel'); + * + * // or... + * + * const channelsByCollection = diagnostics_channel.tracingChannel({ + * start: diagnostics_channel.channel('tracing:my-channel:start'), + * end: diagnostics_channel.channel('tracing:my-channel:end'), + * asyncStart: diagnostics_channel.channel('tracing:my-channel:asyncStart'), + * asyncEnd: diagnostics_channel.channel('tracing:my-channel:asyncEnd'), + * error: diagnostics_channel.channel('tracing:my-channel:error'), + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param nameOrChannels Channel name or object containing all the `TracingChannel Channels` + * @return Collection of channels to trace with + */ + function tracingChannel< + StoreType = unknown, + ContextType extends object = StoreType extends object ? StoreType : object, + >( + nameOrChannels: string | TracingChannelCollection, + ): TracingChannel; + /** + * The class `Channel` represents an individual named channel within the data + * pipeline. It is used to track subscribers and to publish messages when there + * are subscribers present. It exists as a separate object to avoid channel + * lookups at publish time, enabling very fast publish speeds and allowing + * for heavy use while incurring very minimal cost. Channels are created with {@link channel}, constructing a channel directly + * with `new Channel(name)` is not supported. + * @since v15.1.0, v14.17.0 + */ + class Channel { + readonly name: string | symbol; + /** + * Check if there are active subscribers to this channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * if (channel.hasSubscribers) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + */ + readonly hasSubscribers: boolean; + private constructor(name: string | symbol); + /** + * Publish a message to any subscribers to the channel. This will trigger + * message handlers synchronously so they will execute within the same context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.publish({ + * some: 'message', + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @param message The message to send to the channel subscribers + */ + publish(message: unknown): void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.subscribe((message, name) => { + * // Received data + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @deprecated Since v18.7.0,v16.17.0 - Use {@link subscribe(name, onMessage)} + * @param onMessage The handler to receive channel messages + */ + subscribe(onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with `channel.subscribe(onMessage)`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * function onMessage(message, name) { + * // Received data + * } + * + * channel.subscribe(onMessage); + * + * channel.unsubscribe(onMessage); + * ``` + * @since v15.1.0, v14.17.0 + * @deprecated Since v18.7.0,v16.17.0 - Use {@link unsubscribe(name, onMessage)} + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + unsubscribe(onMessage: ChannelListener): void; + /** + * When `channel.runStores(context, ...)` is called, the given context data + * will be applied to any store bound to the channel. If the store has already been + * bound the previous `transform` function will be replaced with the new one. + * The `transform` function may be omitted to set the given context data as the + * context directly. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store, (data) => { + * return { data }; + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param store The store to which to bind the context data + * @param transform Transform context data before setting the store context + */ + bindStore(store: AsyncLocalStorage, transform?: (context: ContextType) => StoreType): void; + /** + * Remove a message handler previously registered to this channel with `channel.bindStore(store)`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store); + * channel.unbindStore(store); + * ``` + * @since v19.9.0 + * @experimental + * @param store The store to unbind from the channel. + * @return `true` if the store was found, `false` otherwise. + */ + unbindStore(store: any): void; + /** + * Applies the given data to any AsyncLocalStorage instances bound to the channel + * for the duration of the given function, then publishes to the channel within + * the scope of that data is applied to the stores. + * + * If a transform function was given to `channel.bindStore(store)` it will be + * applied to transform the message data before it becomes the context value for + * the store. The prior storage context is accessible from within the transform + * function in cases where context linking is required. + * + * The context applied to the store should be accessible in any async code which + * continues from execution which began during the given function, however + * there are some situations in which `context loss` may occur. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store, (message) => { + * const parent = store.getStore(); + * return new Span(message, parent); + * }); + * channel.runStores({ some: 'message' }, () => { + * store.getStore(); // Span({ some: 'message' }) + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param context Message to send to subscribers and bind to stores + * @param fn Handler to run within the entered storage context + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runStores(): void; + } + interface TracingChannelSubscribers { + start: (message: ContextType) => void; + end: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + asyncStart: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + asyncEnd: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + error: ( + message: ContextType & { + error: unknown; + }, + ) => void; + } + interface TracingChannelCollection { + start: Channel; + end: Channel; + asyncStart: Channel; + asyncEnd: Channel; + error: Channel; + } + /** + * The class `TracingChannel` is a collection of `TracingChannel Channels` which + * together express a single traceable action. It is used to formalize and + * simplify the process of producing events for tracing application flow. {@link tracingChannel} is used to construct a `TracingChannel`. As with `Channel` it is recommended to create and reuse a + * single `TracingChannel` at the top-level of the file rather than creating them + * dynamically. + * @since v19.9.0 + * @experimental + */ + class TracingChannel implements TracingChannelCollection { + start: Channel; + end: Channel; + asyncStart: Channel; + asyncEnd: Channel; + error: Channel; + /** + * Helper to subscribe a collection of functions to the corresponding channels. + * This is the same as calling `channel.subscribe(onMessage)` on each channel + * individually. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.subscribe({ + * start(message) { + * // Handle start message + * }, + * end(message) { + * // Handle end message + * }, + * asyncStart(message) { + * // Handle asyncStart message + * }, + * asyncEnd(message) { + * // Handle asyncEnd message + * }, + * error(message) { + * // Handle error message + * }, + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param subscribers Set of `TracingChannel Channels` subscribers + */ + subscribe(subscribers: TracingChannelSubscribers): void; + /** + * Helper to unsubscribe a collection of functions from the corresponding channels. + * This is the same as calling `channel.unsubscribe(onMessage)` on each channel + * individually. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.unsubscribe({ + * start(message) { + * // Handle start message + * }, + * end(message) { + * // Handle end message + * }, + * asyncStart(message) { + * // Handle asyncStart message + * }, + * asyncEnd(message) { + * // Handle asyncEnd message + * }, + * error(message) { + * // Handle error message + * }, + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param subscribers Set of `TracingChannel Channels` subscribers + * @return `true` if all handlers were successfully unsubscribed, and `false` otherwise. + */ + unsubscribe(subscribers: TracingChannelSubscribers): void; + /** + * Trace a synchronous function call. This will always produce a `start event` and `end event` around the execution and may produce an `error event` if the given function throws an error. + * This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * To ensure only correct trace graphs are formed, events will only be published if subscribers are present prior to starting the trace. Subscriptions + * which are added after the trace begins will not receive future events from that trace, only future traces will be seen. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.traceSync(() => { + * // Do something + * }, { + * some: 'thing', + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn Function to wrap a trace around + * @param context Shared object to correlate events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return The return value of the given function + */ + traceSync( + fn: (this: ThisArg, ...args: Args) => any, + context?: ContextType, + thisArg?: ThisArg, + ...args: Args + ): void; + /** + * Trace a promise-returning function call. This will always produce a `start event` and `end event` around the synchronous portion of the + * function execution, and will produce an `asyncStart event` and `asyncEnd event` when a promise continuation is reached. It may also + * produce an `error event` if the given function throws an error or the + * returned promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * To ensure only correct trace graphs are formed, events will only be published if subscribers are present prior to starting the trace. Subscriptions + * which are added after the trace begins will not receive future events from that trace, only future traces will be seen. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.tracePromise(async () => { + * // Do something + * }, { + * some: 'thing', + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn Promise-returning function to wrap a trace around + * @param context Shared object to correlate trace events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return Chained from promise returned by the given function + */ + tracePromise( + fn: (this: ThisArg, ...args: Args) => Promise, + context?: ContextType, + thisArg?: ThisArg, + ...args: Args + ): void; + /** + * Trace a callback-receiving function call. This will always produce a `start event` and `end event` around the synchronous portion of the + * function execution, and will produce a `asyncStart event` and `asyncEnd event` around the callback execution. It may also produce an `error event` if the given function throws an error or + * the returned + * promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * The `position` will be -1 by default to indicate the final argument should + * be used as the callback. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.traceCallback((arg1, callback) => { + * // Do something + * callback(null, 'result'); + * }, 1, { + * some: 'thing', + * }, thisArg, arg1, callback); + * ``` + * + * The callback will also be run with `channel.runStores(context, ...)` which + * enables context loss recovery in some cases. + * + * To ensure only correct trace graphs are formed, events will only be published if subscribers are present prior to starting the trace. Subscriptions + * which are added after the trace begins will not receive future events from that trace, only future traces will be seen. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * const myStore = new AsyncLocalStorage(); + * + * // The start channel sets the initial store data to something + * // and stores that store data value on the trace context object + * channels.start.bindStore(myStore, (data) => { + * const span = new Span(data); + * data.span = span; + * return span; + * }); + * + * // Then asyncStart can restore from that data it stored previously + * channels.asyncStart.bindStore(myStore, (data) => { + * return data.span; + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn callback using function to wrap a trace around + * @param position Zero-indexed argument position of expected callback + * @param context Shared object to correlate trace events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return The return value of the given function + */ + traceCallback any>( + fn: Fn, + position?: number, + context?: ContextType, + thisArg?: any, + ...args: Parameters + ): void; + } +} +declare module "node:diagnostics_channel" { + export * from "diagnostics_channel"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/dns.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/dns.d.ts new file mode 100644 index 0000000..af10fd9 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/dns.d.ts @@ -0,0 +1,865 @@ +/** + * The `node:dns` module enables name resolution. For example, use it to look up IP + * addresses of host names. + * + * Although named for the [Domain Name System (DNS)](https://en.wikipedia.org/wiki/Domain_Name_System), it does not always use the + * DNS protocol for lookups. {@link lookup} uses the operating system + * facilities to perform name resolution. It may not need to perform any network + * communication. To perform name resolution the way other applications on the same + * system do, use {@link lookup}. + * + * ```js + * import dns from 'node:dns'; + * + * dns.lookup('example.org', (err, address, family) => { + * console.log('address: %j family: IPv%s', address, family); + * }); + * // address: "93.184.216.34" family: IPv4 + * ``` + * + * All other functions in the `node:dns` module connect to an actual DNS server to + * perform name resolution. They will always use the network to perform DNS + * queries. These functions do not use the same set of configuration files used by {@link lookup} (e.g. `/etc/hosts`). Use these functions to always perform + * DNS queries, bypassing other name-resolution facilities. + * + * ```js + * import dns from 'node:dns'; + * + * dns.resolve4('archive.org', (err, addresses) => { + * if (err) throw err; + * + * console.log(`addresses: ${JSON.stringify(addresses)}`); + * + * addresses.forEach((a) => { + * dns.reverse(a, (err, hostnames) => { + * if (err) { + * throw err; + * } + * console.log(`reverse for ${a}: ${JSON.stringify(hostnames)}`); + * }); + * }); + * }); + * ``` + * + * See the [Implementation considerations section](https://nodejs.org/docs/latest-v22.x/api/dns.html#implementation-considerations) for more information. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/dns.js) + */ +declare module "dns" { + import * as dnsPromises from "node:dns/promises"; + // Supported getaddrinfo flags. + /** + * Limits returned address types to the types of non-loopback addresses configured on the system. For example, IPv4 addresses are + * only returned if the current system has at least one IPv4 address configured. + */ + export const ADDRCONFIG: number; + /** + * If the IPv6 family was specified, but no IPv6 addresses were found, then return IPv4 mapped IPv6 addresses. It is not supported + * on some operating systems (e.g. FreeBSD 10.1). + */ + export const V4MAPPED: number; + /** + * If `dns.V4MAPPED` is specified, return resolved IPv6 addresses as + * well as IPv4 mapped IPv6 addresses. + */ + export const ALL: number; + export interface LookupOptions { + /** + * The record family. Must be `4`, `6`, or `0`. For backward compatibility reasons, `'IPv4'` and `'IPv6'` are interpreted + * as `4` and `6` respectively. The value 0 indicates that either an IPv4 or IPv6 address is returned. If the value `0` is used + * with `{ all: true } (see below)`, both IPv4 and IPv6 addresses are returned. + * @default 0 + */ + family?: number | "IPv4" | "IPv6" | undefined; + /** + * One or more [supported `getaddrinfo`](https://nodejs.org/docs/latest-v22.x/api/dns.html#supported-getaddrinfo-flags) flags. Multiple flags may be + * passed by bitwise `OR`ing their values. + */ + hints?: number | undefined; + /** + * When `true`, the callback returns all resolved addresses in an array. Otherwise, returns a single address. + * @default false + */ + all?: boolean | undefined; + /** + * When `verbatim`, the resolved addresses are return unsorted. When `ipv4first`, the resolved addresses are sorted + * by placing IPv4 addresses before IPv6 addresses. When `ipv6first`, the resolved addresses are sorted by placing IPv6 + * addresses before IPv4 addresses. Default value is configurable using + * {@link setDefaultResultOrder} or [`--dns-result-order`](https://nodejs.org/docs/latest-v22.x/api/cli.html#--dns-result-orderorder). + * @default `verbatim` (addresses are not reordered) + * @since v22.1.0 + */ + order?: "ipv4first" | "ipv6first" | "verbatim" | undefined; + /** + * When `true`, the callback receives IPv4 and IPv6 addresses in the order the DNS resolver returned them. When `false`, IPv4 + * addresses are placed before IPv6 addresses. This option will be deprecated in favor of `order`. When both are specified, + * `order` has higher precedence. New code should only use `order`. Default value is configurable using {@link setDefaultResultOrder} + * @default true (addresses are not reordered) + * @deprecated Please use `order` option + */ + verbatim?: boolean | undefined; + } + export interface LookupOneOptions extends LookupOptions { + all?: false | undefined; + } + export interface LookupAllOptions extends LookupOptions { + all: true; + } + export interface LookupAddress { + /** + * A string representation of an IPv4 or IPv6 address. + */ + address: string; + /** + * `4` or `6`, denoting the family of `address`, or `0` if the address is not an IPv4 or IPv6 address. `0` is a likely indicator of a + * bug in the name resolution service used by the operating system. + */ + family: number; + } + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is `0` or not provided, then + * IPv4 and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the arguments for `callback` change to `(err, addresses)`, with `addresses` being an array of objects with the + * properties `address` and `family`. + * + * On error, `err` is an `Error` object, where `err.code` is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dns.lookup()` does not necessarily have anything to do with the DNS protocol. + * The implementation uses an operating system facility that can associate names + * with addresses and vice versa. This implementation can have subtle but + * important consequences on the behavior of any Node.js program. Please take some + * time to consult the [Implementation considerations section](https://nodejs.org/docs/latest-v22.x/api/dns.html#implementation-considerations) + * before using `dns.lookup()`. + * + * Example usage: + * + * ```js + * import dns from 'node:dns'; + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * dns.lookup('example.com', options, (err, address, family) => + * console.log('address: %j family: IPv%s', address, family)); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dns.lookup('example.com', options, (err, addresses) => + * console.log('addresses: %j', addresses)); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * ``` + * + * If this method is invoked as its [util.promisify()](https://nodejs.org/docs/latest-v22.x/api/util.html#utilpromisifyoriginal) ed + * version, and `all` is not set to `true`, it returns a `Promise` for an `Object` with `address` and `family` properties. + * @since v0.1.90 + */ + export function lookup( + hostname: string, + family: number, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export function lookup( + hostname: string, + options: LookupOneOptions, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export function lookup( + hostname: string, + options: LookupAllOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: LookupAddress[]) => void, + ): void; + export function lookup( + hostname: string, + options: LookupOptions, + callback: (err: NodeJS.ErrnoException | null, address: string | LookupAddress[], family: number) => void, + ): void; + export function lookup( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export namespace lookup { + function __promisify__(hostname: string, options: LookupAllOptions): Promise; + function __promisify__(hostname: string, options?: LookupOneOptions | number): Promise; + function __promisify__(hostname: string, options: LookupOptions): Promise; + } + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError` will be thrown. + * + * On an error, `err` is an [`Error`](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-error) object, + * where `err.code` is the error code. + * + * ```js + * import dns from 'node:dns'; + * dns.lookupService('127.0.0.1', 22, (err, hostname, service) => { + * console.log(hostname, service); + * // Prints: localhost ssh + * }); + * ``` + * + * If this method is invoked as its [util.promisify()](https://nodejs.org/docs/latest-v22.x/api/util.html#utilpromisifyoriginal) ed + * version, it returns a `Promise` for an `Object` with `hostname` and `service` properties. + * @since v0.11.14 + */ + export function lookupService( + address: string, + port: number, + callback: (err: NodeJS.ErrnoException | null, hostname: string, service: string) => void, + ): void; + export namespace lookupService { + function __promisify__( + address: string, + port: number, + ): Promise<{ + hostname: string; + service: string; + }>; + } + export interface ResolveOptions { + ttl: boolean; + } + export interface ResolveWithTtlOptions extends ResolveOptions { + ttl: true; + } + export interface RecordWithTtl { + address: string; + ttl: number; + } + /** @deprecated Use `AnyARecord` or `AnyAaaaRecord` instead. */ + export type AnyRecordWithTtl = AnyARecord | AnyAaaaRecord; + export interface AnyARecord extends RecordWithTtl { + type: "A"; + } + export interface AnyAaaaRecord extends RecordWithTtl { + type: "AAAA"; + } + export interface CaaRecord { + critical: number; + issue?: string | undefined; + issuewild?: string | undefined; + iodef?: string | undefined; + contactemail?: string | undefined; + contactphone?: string | undefined; + } + export interface MxRecord { + priority: number; + exchange: string; + } + export interface AnyMxRecord extends MxRecord { + type: "MX"; + } + export interface NaptrRecord { + flags: string; + service: string; + regexp: string; + replacement: string; + order: number; + preference: number; + } + export interface AnyNaptrRecord extends NaptrRecord { + type: "NAPTR"; + } + export interface SoaRecord { + nsname: string; + hostmaster: string; + serial: number; + refresh: number; + retry: number; + expire: number; + minttl: number; + } + export interface AnySoaRecord extends SoaRecord { + type: "SOA"; + } + export interface SrvRecord { + priority: number; + weight: number; + port: number; + name: string; + } + export interface AnySrvRecord extends SrvRecord { + type: "SRV"; + } + export interface AnyTxtRecord { + type: "TXT"; + entries: string[]; + } + export interface AnyNsRecord { + type: "NS"; + value: string; + } + export interface AnyPtrRecord { + type: "PTR"; + value: string; + } + export interface AnyCnameRecord { + type: "CNAME"; + value: string; + } + export type AnyRecord = + | AnyARecord + | AnyAaaaRecord + | AnyCnameRecord + | AnyMxRecord + | AnyNaptrRecord + | AnyNsRecord + | AnyPtrRecord + | AnySoaRecord + | AnySrvRecord + | AnyTxtRecord; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. The `callback` function has arguments `(err, records)`. When successful, `records` will be an array of resource + * records. The type and structure of individual results varies based on `rrtype`: + * + * + * + * On error, `err` is an [`Error`](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-error) object, + * where `err.code` is one of the `DNS error codes`. + * @since v0.1.27 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + export function resolve( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "A", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "AAAA", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "ANY", + callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "CNAME", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "MX", + callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "NAPTR", + callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "NS", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "PTR", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "SOA", + callback: (err: NodeJS.ErrnoException | null, addresses: SoaRecord) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "SRV", + callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "TXT", + callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: string, + callback: ( + err: NodeJS.ErrnoException | null, + addresses: string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[], + ) => void, + ): void; + export namespace resolve { + function __promisify__(hostname: string, rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR"): Promise; + function __promisify__(hostname: string, rrtype: "ANY"): Promise; + function __promisify__(hostname: string, rrtype: "MX"): Promise; + function __promisify__(hostname: string, rrtype: "NAPTR"): Promise; + function __promisify__(hostname: string, rrtype: "SOA"): Promise; + function __promisify__(hostname: string, rrtype: "SRV"): Promise; + function __promisify__(hostname: string, rrtype: "TXT"): Promise; + function __promisify__( + hostname: string, + rrtype: string, + ): Promise; + } + /** + * Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the `hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve4( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve4( + hostname: string, + options: ResolveWithTtlOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void, + ): void; + export function resolve4( + hostname: string, + options: ResolveOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void, + ): void; + export namespace resolve4 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the `hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv6 addresses. + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve6( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve6( + hostname: string, + options: ResolveWithTtlOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void, + ): void; + export function resolve6( + hostname: string, + options: ResolveOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void, + ): void; + export namespace resolve6 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of canonical name records available for the `hostname` (e.g. `['bar.example.com']`). + * @since v0.3.2 + */ + export function resolveCname( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolveCname { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of certification authority authorization records + * available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + export function resolveCaa( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, records: CaaRecord[]) => void, + ): void; + export namespace resolveCaa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the `hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of objects containing both a `priority` and `exchange` property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v0.1.27 + */ + export function resolveMx( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void, + ): void; + export namespace resolveMx { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve regular expression-based records (`NAPTR` records) for the `hostname`. The `addresses` argument passed to the `callback` function will contain an array of + * objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v0.9.12 + */ + export function resolveNaptr( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void, + ): void; + export namespace resolveNaptr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the `hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of name server records available for `hostname` (e.g. `['ns1.example.com', 'ns2.example.com']`). + * @since v0.1.90 + */ + export function resolveNs( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolveNs { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the `hostname`. The `addresses` argument passed to the `callback` function will + * be an array of strings containing the reply records. + * @since v6.0.0 + */ + export function resolvePtr( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolvePtr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. The `address` argument passed to the `callback` function will + * be an object with the following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v0.11.10 + */ + export function resolveSoa( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, address: SoaRecord) => void, + ): void; + export namespace resolveSoa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the `hostname`. The `addresses` argument passed to the `callback` function will + * be an array of objects with the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v0.1.27 + */ + export function resolveSrv( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void, + ): void; + export namespace resolveSrv { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the `hostname`. The `records` argument passed to the `callback` function is a + * two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v0.1.27 + */ + export function resolveTxt( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void, + ): void; + export namespace resolveTxt { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * The `ret` argument passed to the `callback` function will be an array containing + * various types of records. Each object has a property `type` that indicates the + * type of the current record. And depending on the `type`, additional properties + * will be present on the object: + * + * + * + * Here is an example of the `ret` object passed to the callback: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * + * DNS server operators may choose not to respond to `ANY` queries. It may be better to call individual methods like {@link resolve4}, {@link resolveMx}, and so on. For more details, see + * [RFC 8482](https://tools.ietf.org/html/rfc8482). + */ + export function resolveAny( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void, + ): void; + export namespace resolveAny { + function __promisify__(hostname: string): Promise; + } + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, `err` is an [`Error`](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-error) object, where `err.code` is + * one of the [DNS error codes](https://nodejs.org/docs/latest-v22.x/api/dns.html#error-codes). + * @since v0.1.16 + */ + export function reverse( + ip: string, + callback: (err: NodeJS.ErrnoException | null, hostnames: string[]) => void, + ): void; + /** + * Get the default value for `order` in {@link lookup} and [`dnsPromises.lookup()`](https://nodejs.org/docs/latest-v22.x/api/dns.html#dnspromiseslookuphostname-options). + * The value could be: + * + * * `ipv4first`: for `order` defaulting to `ipv4first`. + * * `ipv6first`: for `order` defaulting to `ipv6first`. + * * `verbatim`: for `order` defaulting to `verbatim`. + * @since v18.17.0 + */ + export function getDefaultResultOrder(): "ipv4first" | "ipv6first" | "verbatim"; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dns.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dns.setServers()` method must not be called while a DNS query is in + * progress. + * + * The {@link setServers} method affects only {@link resolve}, `dns.resolve*()` and {@link reverse} (and specifically _not_ {@link lookup}). + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a `NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v0.11.3 + * @param servers array of [RFC 5952](https://datatracker.ietf.org/doc/html/rfc5952#section-6) formatted addresses + */ + export function setServers(servers: readonly string[]): void; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v0.11.3 + */ + export function getServers(): string[]; + /** + * Set the default value of `order` in {@link lookup} and [`dnsPromises.lookup()`](https://nodejs.org/docs/latest-v22.x/api/dns.html#dnspromiseslookuphostname-options). + * The value could be: + * + * * `ipv4first`: sets default `order` to `ipv4first`. + * * `ipv6first`: sets default `order` to `ipv6first`. + * * `verbatim`: sets default `order` to `verbatim`. + * + * The default is `verbatim` and {@link setDefaultResultOrder} have higher + * priority than [`--dns-result-order`](https://nodejs.org/docs/latest-v22.x/api/cli.html#--dns-result-orderorder). When using + * [worker threads](https://nodejs.org/docs/latest-v22.x/api/worker_threads.html), {@link setDefaultResultOrder} from the main + * thread won't affect the default dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'`, `'ipv6first'` or `'verbatim'`. + */ + export function setDefaultResultOrder(order: "ipv4first" | "ipv6first" | "verbatim"): void; + // Error codes + export const NODATA: "ENODATA"; + export const FORMERR: "EFORMERR"; + export const SERVFAIL: "ESERVFAIL"; + export const NOTFOUND: "ENOTFOUND"; + export const NOTIMP: "ENOTIMP"; + export const REFUSED: "EREFUSED"; + export const BADQUERY: "EBADQUERY"; + export const BADNAME: "EBADNAME"; + export const BADFAMILY: "EBADFAMILY"; + export const BADRESP: "EBADRESP"; + export const CONNREFUSED: "ECONNREFUSED"; + export const TIMEOUT: "ETIMEOUT"; + export const EOF: "EOF"; + export const FILE: "EFILE"; + export const NOMEM: "ENOMEM"; + export const DESTRUCTION: "EDESTRUCTION"; + export const BADSTR: "EBADSTR"; + export const BADFLAGS: "EBADFLAGS"; + export const NONAME: "ENONAME"; + export const BADHINTS: "EBADHINTS"; + export const NOTINITIALIZED: "ENOTINITIALIZED"; + export const LOADIPHLPAPI: "ELOADIPHLPAPI"; + export const ADDRGETNETWORKPARAMS: "EADDRGETNETWORKPARAMS"; + export const CANCELLED: "ECANCELLED"; + export interface ResolverOptions { + /** + * Query timeout in milliseconds, or `-1` to use the default timeout. + */ + timeout?: number | undefined; + /** + * The number of tries the resolver will try contacting each name server before giving up. + * @default 4 + */ + tries?: number; + } + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using [`resolver.setServers()`](https://nodejs.org/docs/latest-v22.x/api/dns.html#dnssetserversservers) does not affect + * other resolvers: + * + * ```js + * import { Resolver } from 'node:dns'; + * const resolver = new Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org', (err, addresses) => { + * // ... + * }); + * ``` + * + * The following methods from the `node:dns` module are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v8.3.0 + */ + export class Resolver { + constructor(options?: ResolverOptions); + /** + * Cancel all outstanding DNS queries made by this resolver. The corresponding + * callbacks will be called with an error with code `ECANCELLED`. + * @since v8.3.0 + */ + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCaa: typeof resolveCaa; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + /** + * The resolver instance will send its requests from the specified IP address. + * This allows programs to specify outbound interfaces when used on multi-homed + * systems. + * + * If a v4 or v6 address is not specified, it is set to the default and the + * operating system will choose a local address automatically. + * + * The resolver will use the v4 local address when making requests to IPv4 DNS + * servers, and the v6 local address when making requests to IPv6 DNS servers. + * The `rrtype` of resolution requests has no impact on the local address used. + * @since v15.1.0, v14.17.0 + * @param [ipv4='0.0.0.0'] A string representation of an IPv4 address. + * @param [ipv6='::0'] A string representation of an IPv6 address. + */ + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } + export { dnsPromises as promises }; +} +declare module "node:dns" { + export * from "dns"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/dns/promises.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/dns/promises.d.ts new file mode 100644 index 0000000..2b5dff0 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/dns/promises.d.ts @@ -0,0 +1,476 @@ +/** + * The `dns.promises` API provides an alternative set of asynchronous DNS methods + * that return `Promise` objects rather than using callbacks. The API is accessible + * via `import { promises as dnsPromises } from 'node:dns'` or `import dnsPromises from 'node:dns/promises'`. + * @since v10.6.0 + */ +declare module "dns/promises" { + import { + AnyRecord, + CaaRecord, + LookupAddress, + LookupAllOptions, + LookupOneOptions, + LookupOptions, + MxRecord, + NaptrRecord, + RecordWithTtl, + ResolveOptions, + ResolverOptions, + ResolveWithTtlOptions, + SoaRecord, + SrvRecord, + } from "node:dns"; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v10.6.0 + */ + function getServers(): string[]; + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is not provided, then IPv4 + * and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the `Promise` is resolved with `addresses` being an array of objects with the properties `address` and `family`. + * + * On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * [`dnsPromises.lookup()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromiseslookuphostname-options) does not necessarily have anything to do with the DNS + * protocol. The implementation uses an operating system facility that can + * associate names with addresses and vice versa. This implementation can have + * subtle but important consequences on the behavior of any Node.js program. Please + * take some time to consult the [Implementation considerations section](https://nodejs.org/docs/latest-v20.x/api/dns.html#implementation-considerations) before + * using `dnsPromises.lookup()`. + * + * Example usage: + * + * ```js + * import dns from 'node:dns'; + * const dnsPromises = dns.promises; + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('address: %j family: IPv%s', result.address, result.family); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * }); + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('addresses: %j', result); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * }); + * ``` + * @since v10.6.0 + */ + function lookup(hostname: string, family: number): Promise; + function lookup(hostname: string, options: LookupOneOptions): Promise; + function lookup(hostname: string, options: LookupAllOptions): Promise; + function lookup(hostname: string, options: LookupOptions): Promise; + function lookup(hostname: string): Promise; + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError` will be thrown. + * + * On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` is the error code. + * + * ```js + * import dnsPromises from 'node:dns'; + * dnsPromises.lookupService('127.0.0.1', 22).then((result) => { + * console.log(result.hostname, result.service); + * // Prints: localhost ssh + * }); + * ``` + * @since v10.6.0 + */ + function lookupService( + address: string, + port: number, + ): Promise<{ + hostname: string; + service: string; + }>; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. When successful, the `Promise` is resolved with an + * array of resource records. The type and structure of individual results vary + * based on `rrtype`: + * + * + * + * On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` + * is one of the [DNS error codes](https://nodejs.org/docs/latest-v20.x/api/dns.html#error-codes). + * @since v10.6.0 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + function resolve(hostname: string): Promise; + function resolve(hostname: string, rrtype: "A"): Promise; + function resolve(hostname: string, rrtype: "AAAA"): Promise; + function resolve(hostname: string, rrtype: "ANY"): Promise; + function resolve(hostname: string, rrtype: "CAA"): Promise; + function resolve(hostname: string, rrtype: "CNAME"): Promise; + function resolve(hostname: string, rrtype: "MX"): Promise; + function resolve(hostname: string, rrtype: "NAPTR"): Promise; + function resolve(hostname: string, rrtype: "NS"): Promise; + function resolve(hostname: string, rrtype: "PTR"): Promise; + function resolve(hostname: string, rrtype: "SOA"): Promise; + function resolve(hostname: string, rrtype: "SRV"): Promise; + function resolve(hostname: string, rrtype: "TXT"): Promise; + function resolve( + hostname: string, + rrtype: string, + ): Promise; + /** + * Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the `hostname`. On success, the `Promise` is resolved with an array of IPv4 + * addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve4(hostname: string): Promise; + function resolve4(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve4(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the `hostname`. On success, the `Promise` is resolved with an array of IPv6 + * addresses. + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve6(hostname: string): Promise; + function resolve6(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve6(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * On success, the `Promise` is resolved with an array containing various types of + * records. Each object has a property `type` that indicates the type of the + * current record. And depending on the `type`, additional properties will be + * present on the object: + * + * + * + * Here is an example of the result object: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * @since v10.6.0 + */ + function resolveAny(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. On success, + * the `Promise` is resolved with an array of objects containing available + * certification authority authorization records available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + function resolveCaa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success, + * the `Promise` is resolved with an array of canonical name records available for + * the `hostname` (e.g. `['bar.example.com']`). + * @since v10.6.0 + */ + function resolveCname(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the `hostname`. On success, the `Promise` is resolved with an array of objects + * containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v10.6.0 + */ + function resolveMx(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve regular expression-based records (`NAPTR` records) for the `hostname`. On success, the `Promise` is resolved with an array + * of objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v10.6.0 + */ + function resolveNaptr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the `hostname`. On success, the `Promise` is resolved with an array of name server + * records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`). + * @since v10.6.0 + */ + function resolveNs(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the `hostname`. On success, the `Promise` is resolved with an array of strings + * containing the reply records. + * @since v10.6.0 + */ + function resolvePtr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. On success, the `Promise` is resolved with an object with the + * following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v10.6.0 + */ + function resolveSoa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the `hostname`. On success, the `Promise` is resolved with an array of objects with + * the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v10.6.0 + */ + function resolveSrv(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the `hostname`. On success, the `Promise` is resolved with a two-dimensional array + * of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v10.6.0 + */ + function resolveTxt(hostname: string): Promise; + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` + * is one of the [DNS error codes](https://nodejs.org/docs/latest-v20.x/api/dns.html#error-codes). + * @since v10.6.0 + */ + function reverse(ip: string): Promise; + /** + * Get the default value for `verbatim` in {@link lookup} and [dnsPromises.lookup()](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromiseslookuphostname-options). + * The value could be: + * + * * `ipv4first`: for `verbatim` defaulting to `false`. + * * `verbatim`: for `verbatim` defaulting to `true`. + * @since v20.1.0 + */ + function getDefaultResultOrder(): "ipv4first" | "verbatim"; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dnsPromises.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dnsPromises.setServers()` method must not be called while a DNS query is in + * progress. + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a `NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v10.6.0 + * @param servers array of `RFC 5952` formatted addresses + */ + function setServers(servers: readonly string[]): void; + /** + * Set the default value of `order` in `dns.lookup()` and `{@link lookup}`. The value could be: + * + * * `ipv4first`: sets default `order` to `ipv4first`. + * * `ipv6first`: sets default `order` to `ipv6first`. + * * `verbatim`: sets default `order` to `verbatim`. + * + * The default is `verbatim` and [dnsPromises.setDefaultResultOrder()](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromisessetdefaultresultorderorder) + * have higher priority than [`--dns-result-order`](https://nodejs.org/docs/latest-v20.x/api/cli.html#--dns-result-orderorder). + * When using [worker threads](https://nodejs.org/docs/latest-v20.x/api/worker_threads.html), [`dnsPromises.setDefaultResultOrder()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromisessetdefaultresultorderorder) + * from the main thread won't affect the default dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'`, `'ipv6first'` or `'verbatim'`. + */ + function setDefaultResultOrder(order: "ipv4first" | "ipv6first" | "verbatim"): void; + // Error codes + const NODATA: "ENODATA"; + const FORMERR: "EFORMERR"; + const SERVFAIL: "ESERVFAIL"; + const NOTFOUND: "ENOTFOUND"; + const NOTIMP: "ENOTIMP"; + const REFUSED: "EREFUSED"; + const BADQUERY: "EBADQUERY"; + const BADNAME: "EBADNAME"; + const BADFAMILY: "EBADFAMILY"; + const BADRESP: "EBADRESP"; + const CONNREFUSED: "ECONNREFUSED"; + const TIMEOUT: "ETIMEOUT"; + const EOF: "EOF"; + const FILE: "EFILE"; + const NOMEM: "ENOMEM"; + const DESTRUCTION: "EDESTRUCTION"; + const BADSTR: "EBADSTR"; + const BADFLAGS: "EBADFLAGS"; + const NONAME: "ENONAME"; + const BADHINTS: "EBADHINTS"; + const NOTINITIALIZED: "ENOTINITIALIZED"; + const LOADIPHLPAPI: "ELOADIPHLPAPI"; + const ADDRGETNETWORKPARAMS: "EADDRGETNETWORKPARAMS"; + const CANCELLED: "ECANCELLED"; + + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using [`resolver.setServers()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromisessetserversservers) does not affect + * other resolvers: + * + * ```js + * import { promises } from 'node:dns'; + * const resolver = new promises.Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org').then((addresses) => { + * // ... + * }); + * + * // Alternatively, the same code can be written using async-await style. + * (async function() { + * const addresses = await resolver.resolve4('example.org'); + * })(); + * ``` + * + * The following methods from the `dnsPromises` API are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v10.6.0 + */ + class Resolver { + constructor(options?: ResolverOptions); + /** + * Cancel all outstanding DNS queries made by this resolver. The corresponding + * callbacks will be called with an error with code `ECANCELLED`. + * @since v8.3.0 + */ + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCaa: typeof resolveCaa; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + /** + * The resolver instance will send its requests from the specified IP address. + * This allows programs to specify outbound interfaces when used on multi-homed + * systems. + * + * If a v4 or v6 address is not specified, it is set to the default and the + * operating system will choose a local address automatically. + * + * The resolver will use the v4 local address when making requests to IPv4 DNS + * servers, and the v6 local address when making requests to IPv6 DNS servers. + * The `rrtype` of resolution requests has no impact on the local address used. + * @since v15.1.0, v14.17.0 + * @param [ipv4='0.0.0.0'] A string representation of an IPv4 address. + * @param [ipv6='::0'] A string representation of an IPv6 address. + */ + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } +} +declare module "node:dns/promises" { + export * from "dns/promises"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/dom-events.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/dom-events.d.ts new file mode 100644 index 0000000..f47f71d --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/dom-events.d.ts @@ -0,0 +1,124 @@ +export {}; // Don't export anything! + +//// DOM-like Events +// NB: The Event / EventTarget / EventListener implementations below were copied +// from lib.dom.d.ts, then edited to reflect Node's documentation at +// https://nodejs.org/api/events.html#class-eventtarget. +// Please read that link to understand important implementation differences. + +// This conditional type will be the existing global Event in a browser, or +// the copy below in a Node environment. +type __Event = typeof globalThis extends { onmessage: any; Event: any } ? {} + : { + /** This is not used in Node.js and is provided purely for completeness. */ + readonly bubbles: boolean; + /** Alias for event.stopPropagation(). This is not used in Node.js and is provided purely for completeness. */ + cancelBubble: () => void; + /** True if the event was created with the cancelable option */ + readonly cancelable: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly composed: boolean; + /** Returns an array containing the current EventTarget as the only entry or empty if the event is not being dispatched. This is not used in Node.js and is provided purely for completeness. */ + composedPath(): [EventTarget?]; + /** Alias for event.target. */ + readonly currentTarget: EventTarget | null; + /** Is true if cancelable is true and event.preventDefault() has been called. */ + readonly defaultPrevented: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly eventPhase: 0 | 2; + /** The `AbortSignal` "abort" event is emitted with `isTrusted` set to `true`. The value is `false` in all other cases. */ + readonly isTrusted: boolean; + /** Sets the `defaultPrevented` property to `true` if `cancelable` is `true`. */ + preventDefault(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + returnValue: boolean; + /** Alias for event.target. */ + readonly srcElement: EventTarget | null; + /** Stops the invocation of event listeners after the current one completes. */ + stopImmediatePropagation(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + stopPropagation(): void; + /** The `EventTarget` dispatching the event */ + readonly target: EventTarget | null; + /** The millisecond timestamp when the Event object was created. */ + readonly timeStamp: number; + /** Returns the type of event, e.g. "click", "hashchange", or "submit". */ + readonly type: string; + }; + +// See comment above explaining conditional type +type __EventTarget = typeof globalThis extends { onmessage: any; EventTarget: any } ? {} + : { + /** + * Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value. + * + * If the `once` option is true, the `listener` is removed after the next time a `type` event is dispatched. + * + * The `capture` option is not used by Node.js in any functional way other than tracking registered event listeners per the `EventTarget` specification. + * Specifically, the `capture` option is used as part of the key when registering a `listener`. + * Any individual `listener` may be added once with `capture = false`, and once with `capture = true`. + */ + addEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: AddEventListenerOptions | boolean, + ): void; + /** Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. */ + dispatchEvent(event: Event): boolean; + /** Removes the event listener in target's event listener list with the same type, callback, and options. */ + removeEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: EventListenerOptions | boolean, + ): void; + }; + +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} + +interface EventListenerOptions { + /** Not directly used by Node.js. Added for API completeness. Default: `false`. */ + capture?: boolean; +} + +interface AddEventListenerOptions extends EventListenerOptions { + /** When `true`, the listener is automatically removed when it is first invoked. Default: `false`. */ + once?: boolean; + /** When `true`, serves as a hint that the listener will not call the `Event` object's `preventDefault()` method. Default: false. */ + passive?: boolean; + /** The listener will be removed when the given AbortSignal object's `abort()` method is called. */ + signal?: AbortSignal; +} + +interface EventListener { + (evt: Event): void; +} + +interface EventListenerObject { + handleEvent(object: Event): void; +} + +import {} from "events"; // Make this an ambient declaration +declare global { + /** An event which takes place in the DOM. */ + interface Event extends __Event {} + var Event: typeof globalThis extends { onmessage: any; Event: infer T } ? T + : { + prototype: __Event; + new(type: string, eventInitDict?: EventInit): __Event; + }; + + /** + * EventTarget is a DOM interface implemented by objects that can + * receive events and may have listeners for them. + */ + interface EventTarget extends __EventTarget {} + var EventTarget: typeof globalThis extends { onmessage: any; EventTarget: infer T } ? T + : { + prototype: __EventTarget; + new(): __EventTarget; + }; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/domain.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/domain.d.ts new file mode 100644 index 0000000..ba8a02c --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/domain.d.ts @@ -0,0 +1,170 @@ +/** + * **This module is pending deprecation.** Once a replacement API has been + * finalized, this module will be fully deprecated. Most developers should + * **not** have cause to use this module. Users who absolutely must have + * the functionality that domains provide may rely on it for the time being + * but should expect to have to migrate to a different solution + * in the future. + * + * Domains provide a way to handle multiple different IO operations as a + * single group. If any of the event emitters or callbacks registered to a + * domain emit an `'error'` event, or throw an error, then the domain object + * will be notified, rather than losing the context of the error in the `process.on('uncaughtException')` handler, or causing the program to + * exit immediately with an error code. + * @deprecated Since v1.4.2 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/domain.js) + */ +declare module "domain" { + import EventEmitter = require("node:events"); + /** + * The `Domain` class encapsulates the functionality of routing errors and + * uncaught exceptions to the active `Domain` object. + * + * To handle the errors that it catches, listen to its `'error'` event. + */ + class Domain extends EventEmitter { + /** + * An array of timers and event emitters that have been explicitly added + * to the domain. + */ + members: Array; + /** + * The `enter()` method is plumbing used by the `run()`, `bind()`, and `intercept()` methods to set the active domain. It sets `domain.active` and `process.domain` to the domain, and implicitly + * pushes the domain onto the domain + * stack managed by the domain module (see {@link exit} for details on the + * domain stack). The call to `enter()` delimits the beginning of a chain of + * asynchronous calls and I/O operations bound to a domain. + * + * Calling `enter()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + enter(): void; + /** + * The `exit()` method exits the current domain, popping it off the domain stack. + * Any time execution is going to switch to the context of a different chain of + * asynchronous calls, it's important to ensure that the current domain is exited. + * The call to `exit()` delimits either the end of or an interruption to the chain + * of asynchronous calls and I/O operations bound to a domain. + * + * If there are multiple, nested domains bound to the current execution context, `exit()` will exit any domains nested within this domain. + * + * Calling `exit()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + exit(): void; + /** + * Run the supplied function in the context of the domain, implicitly + * binding all event emitters, timers, and low-level requests that are + * created in that context. Optionally, arguments can be passed to + * the function. + * + * This is the most basic way to use a domain. + * + * ```js + * import domain from 'node:domain'; + * import fs from 'node:fs'; + * const d = domain.create(); + * d.on('error', (er) => { + * console.error('Caught error!', er); + * }); + * d.run(() => { + * process.nextTick(() => { + * setTimeout(() => { // Simulating some various async stuff + * fs.open('non-existent file', 'r', (er, fd) => { + * if (er) throw er; + * // proceed... + * }); + * }, 100); + * }); + * }); + * ``` + * + * In this example, the `d.on('error')` handler will be triggered, rather + * than crashing the program. + */ + run(fn: (...args: any[]) => T, ...args: any[]): T; + /** + * Explicitly adds an emitter to the domain. If any event handlers called by + * the emitter throw an error, or if the emitter emits an `'error'` event, it + * will be routed to the domain's `'error'` event, just like with implicit + * binding. + * + * This also works with timers that are returned from `setInterval()` and `setTimeout()`. If their callback function throws, it will be caught by + * the domain `'error'` handler. + * + * If the Timer or `EventEmitter` was already bound to a domain, it is removed + * from that one, and bound to this one instead. + * @param emitter emitter or timer to be added to the domain + */ + add(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The opposite of {@link add}. Removes domain handling from the + * specified emitter. + * @param emitter emitter or timer to be removed from the domain + */ + remove(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The returned function will be a wrapper around the supplied callback + * function. When the returned function is called, any errors that are + * thrown will be routed to the domain's `'error'` event. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.bind((er, data) => { + * // If this throws, it will also be passed to the domain. + * return cb(er, data ? JSON.parse(data) : null); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The bound function + */ + bind(callback: T): T; + /** + * This method is almost identical to {@link bind}. However, in + * addition to catching thrown errors, it will also intercept `Error` objects sent as the first argument to the function. + * + * In this way, the common `if (err) return callback(err);` pattern can be replaced + * with a single error handler in a single place. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.intercept((data) => { + * // Note, the first argument is never passed to the + * // callback since it is assumed to be the 'Error' argument + * // and thus intercepted by the domain. + * + * // If this throws, it will also be passed to the domain + * // so the error-handling logic can be moved to the 'error' + * // event on the domain instead of being repeated throughout + * // the program. + * return cb(null, JSON.parse(data)); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The intercepted function + */ + intercept(callback: T): T; + } + function create(): Domain; +} +declare module "node:domain" { + export * from "domain"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/events.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/events.d.ts new file mode 100644 index 0000000..63b9433 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/events.d.ts @@ -0,0 +1,931 @@ +/** + * Much of the Node.js core API is built around an idiomatic asynchronous + * event-driven architecture in which certain kinds of objects (called "emitters") + * emit named events that cause `Function` objects ("listeners") to be called. + * + * For instance: a `net.Server` object emits an event each time a peer + * connects to it; a `fs.ReadStream` emits an event when the file is opened; + * a `stream` emits an event whenever data is available to be read. + * + * All objects that emit events are instances of the `EventEmitter` class. These + * objects expose an `eventEmitter.on()` function that allows one or more + * functions to be attached to named events emitted by the object. Typically, + * event names are camel-cased strings but any valid JavaScript property key + * can be used. + * + * When the `EventEmitter` object emits an event, all of the functions attached + * to that specific event are called _synchronously_. Any values returned by the + * called listeners are _ignored_ and discarded. + * + * The following example shows a simple `EventEmitter` instance with a single + * listener. The `eventEmitter.on()` method is used to register listeners, while + * the `eventEmitter.emit()` method is used to trigger the event. + * + * ```js + * import { EventEmitter } from 'node:events'; + * + * class MyEmitter extends EventEmitter {} + * + * const myEmitter = new MyEmitter(); + * myEmitter.on('event', () => { + * console.log('an event occurred!'); + * }); + * myEmitter.emit('event'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/events.js) + */ +declare module "events" { + import { AsyncResource, AsyncResourceOptions } from "node:async_hooks"; + // NOTE: This class is in the docs but is **not actually exported** by Node. + // If https://github.com/nodejs/node/issues/39903 gets resolved and Node + // actually starts exporting the class, uncomment below. + // import { EventListener, EventListenerObject } from '__dom-events'; + // /** The NodeEventTarget is a Node.js-specific extension to EventTarget that emulates a subset of the EventEmitter API. */ + // interface NodeEventTarget extends EventTarget { + // /** + // * Node.js-specific extension to the `EventTarget` class that emulates the equivalent `EventEmitter` API. + // * The only difference between `addListener()` and `addEventListener()` is that addListener() will return a reference to the EventTarget. + // */ + // addListener(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that returns an array of event `type` names for which event listeners are registered. */ + // eventNames(): string[]; + // /** Node.js-specific extension to the `EventTarget` class that returns the number of event listeners registered for the `type`. */ + // listenerCount(type: string): number; + // /** Node.js-specific alias for `eventTarget.removeListener()`. */ + // off(type: string, listener: EventListener | EventListenerObject): this; + // /** Node.js-specific alias for `eventTarget.addListener()`. */ + // on(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that adds a `once` listener for the given event `type`. This is equivalent to calling `on` with the `once` option set to `true`. */ + // once(type: string, listener: EventListener | EventListenerObject): this; + // /** + // * Node.js-specific extension to the `EventTarget` class. + // * If `type` is specified, removes all registered listeners for `type`, + // * otherwise removes all registered listeners. + // */ + // removeAllListeners(type: string): this; + // /** + // * Node.js-specific extension to the `EventTarget` class that removes the listener for the given `type`. + // * The only difference between `removeListener()` and `removeEventListener()` is that `removeListener()` will return a reference to the `EventTarget`. + // */ + // removeListener(type: string, listener: EventListener | EventListenerObject): this; + // } + interface EventEmitterOptions { + /** + * Enables automatic capturing of promise rejection. + */ + captureRejections?: boolean | undefined; + } + interface StaticEventEmitterOptions { + /** + * Can be used to cancel awaiting events. + */ + signal?: AbortSignal | undefined; + } + interface StaticEventEmitterIteratorOptions extends StaticEventEmitterOptions { + /** + * Names of events that will end the iteration. + */ + close?: string[] | undefined; + /** + * The high watermark. The emitter is paused every time the size of events being buffered is higher than it. + * Supported only on emitters implementing `pause()` and `resume()` methods. + * @default Number.MAX_SAFE_INTEGER + */ + highWaterMark?: number | undefined; + /** + * The low watermark. The emitter is resumed every time the size of events being buffered is lower than it. + * Supported only on emitters implementing `pause()` and `resume()` methods. + * @default 1 + */ + lowWaterMark?: number | undefined; + } + interface EventEmitter = DefaultEventMap> extends NodeJS.EventEmitter {} + type EventMap = Record | DefaultEventMap; + type DefaultEventMap = [never]; + type AnyRest = [...args: any[]]; + type Args = T extends DefaultEventMap ? AnyRest : ( + K extends keyof T ? T[K] : never + ); + type Key = T extends DefaultEventMap ? string | symbol : K | keyof T; + type Key2 = T extends DefaultEventMap ? string | symbol : K & keyof T; + type Listener = T extends DefaultEventMap ? F : ( + K extends keyof T ? ( + T[K] extends unknown[] ? (...args: T[K]) => void : never + ) + : never + ); + type Listener1 = Listener void>; + type Listener2 = Listener; + + /** + * The `EventEmitter` class is defined and exposed by the `node:events` module: + * + * ```js + * import { EventEmitter } from 'node:events'; + * ``` + * + * All `EventEmitter`s emit the event `'newListener'` when new listeners are + * added and `'removeListener'` when existing listeners are removed. + * + * It supports the following option: + * @since v0.1.26 + */ + class EventEmitter = DefaultEventMap> { + constructor(options?: EventEmitterOptions); + + [EventEmitter.captureRejectionSymbol]?(error: Error, event: Key, ...args: Args): void; + + /** + * Creates a `Promise` that is fulfilled when the `EventEmitter` emits the given + * event or that is rejected if the `EventEmitter` emits `'error'` while waiting. + * The `Promise` will resolve with an array of all the arguments emitted to the + * given event. + * + * This method is intentionally generic and works with the web platform [EventTarget](https://dom.spec.whatwg.org/#interface-eventtarget) interface, which has no special`'error'` event + * semantics and does not listen to the `'error'` event. + * + * ```js + * import { once, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ee = new EventEmitter(); + * + * process.nextTick(() => { + * ee.emit('myevent', 42); + * }); + * + * const [value] = await once(ee, 'myevent'); + * console.log(value); + * + * const err = new Error('kaboom'); + * process.nextTick(() => { + * ee.emit('error', err); + * }); + * + * try { + * await once(ee, 'myevent'); + * } catch (err) { + * console.error('error happened', err); + * } + * ``` + * + * The special handling of the `'error'` event is only used when `events.once()` is used to wait for another event. If `events.once()` is used to wait for the + * '`error'` event itself, then it is treated as any other kind of event without + * special handling: + * + * ```js + * import { EventEmitter, once } from 'node:events'; + * + * const ee = new EventEmitter(); + * + * once(ee, 'error') + * .then(([err]) => console.log('ok', err.message)) + * .catch((err) => console.error('error', err.message)); + * + * ee.emit('error', new Error('boom')); + * + * // Prints: ok boom + * ``` + * + * An `AbortSignal` can be used to cancel waiting for the event: + * + * ```js + * import { EventEmitter, once } from 'node:events'; + * + * const ee = new EventEmitter(); + * const ac = new AbortController(); + * + * async function foo(emitter, event, signal) { + * try { + * await once(emitter, event, { signal }); + * console.log('event emitted!'); + * } catch (error) { + * if (error.name === 'AbortError') { + * console.error('Waiting for the event was canceled!'); + * } else { + * console.error('There was an error', error.message); + * } + * } + * } + * + * foo(ee, 'foo', ac.signal); + * ac.abort(); // Abort waiting for the event + * ee.emit('foo'); // Prints: Waiting for the event was canceled! + * ``` + * @since v11.13.0, v10.16.0 + */ + static once( + emitter: NodeJS.EventEmitter, + eventName: string | symbol, + options?: StaticEventEmitterOptions, + ): Promise; + static once(emitter: EventTarget, eventName: string, options?: StaticEventEmitterOptions): Promise; + /** + * ```js + * import { on, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo')) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * ``` + * + * Returns an `AsyncIterator` that iterates `eventName` events. It will throw + * if the `EventEmitter` emits `'error'`. It removes all listeners when + * exiting the loop. The `value` returned by each iteration is an array + * composed of the emitted event arguments. + * + * An `AbortSignal` can be used to cancel waiting on events: + * + * ```js + * import { on, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ac = new AbortController(); + * + * (async () => { + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo', { signal: ac.signal })) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * })(); + * + * process.nextTick(() => ac.abort()); + * ``` + * + * Use the `close` option to specify an array of event names that will end the iteration: + * + * ```js + * import { on, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * ee.emit('close'); + * }); + * + * for await (const event of on(ee, 'foo', { close: ['close'] })) { + * console.log(event); // prints ['bar'] [42] + * } + * // the loop will exit after 'close' is emitted + * console.log('done'); // prints 'done' + * ``` + * @since v13.6.0, v12.16.0 + * @return An `AsyncIterator` that iterates `eventName` events emitted by the `emitter` + */ + static on( + emitter: NodeJS.EventEmitter, + eventName: string | symbol, + options?: StaticEventEmitterIteratorOptions, + ): NodeJS.AsyncIterator; + static on( + emitter: EventTarget, + eventName: string, + options?: StaticEventEmitterIteratorOptions, + ): NodeJS.AsyncIterator; + /** + * A class method that returns the number of listeners for the given `eventName` registered on the given `emitter`. + * + * ```js + * import { EventEmitter, listenerCount } from 'node:events'; + * + * const myEmitter = new EventEmitter(); + * myEmitter.on('event', () => {}); + * myEmitter.on('event', () => {}); + * console.log(listenerCount(myEmitter, 'event')); + * // Prints: 2 + * ``` + * @since v0.9.12 + * @deprecated Since v3.2.0 - Use `listenerCount` instead. + * @param emitter The emitter to query + * @param eventName The event name + */ + static listenerCount(emitter: NodeJS.EventEmitter, eventName: string | symbol): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * For `EventEmitter`s this behaves exactly the same as calling `.listeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the event listeners for the + * event target. This is useful for debugging and diagnostic purposes. + * + * ```js + * import { getEventListeners, EventEmitter } from 'node:events'; + * + * { + * const ee = new EventEmitter(); + * const listener = () => console.log('Events are fun'); + * ee.on('foo', listener); + * console.log(getEventListeners(ee, 'foo')); // [ [Function: listener] ] + * } + * { + * const et = new EventTarget(); + * const listener = () => console.log('Events are fun'); + * et.addEventListener('foo', listener); + * console.log(getEventListeners(et, 'foo')); // [ [Function: listener] ] + * } + * ``` + * @since v15.2.0, v14.17.0 + */ + static getEventListeners(emitter: EventTarget | NodeJS.EventEmitter, name: string | symbol): Function[]; + /** + * Returns the currently set max amount of listeners. + * + * For `EventEmitter`s this behaves exactly the same as calling `.getMaxListeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the max event listeners for the + * event target. If the number of event handlers on a single EventTarget exceeds + * the max set, the EventTarget will print a warning. + * + * ```js + * import { getMaxListeners, setMaxListeners, EventEmitter } from 'node:events'; + * + * { + * const ee = new EventEmitter(); + * console.log(getMaxListeners(ee)); // 10 + * setMaxListeners(11, ee); + * console.log(getMaxListeners(ee)); // 11 + * } + * { + * const et = new EventTarget(); + * console.log(getMaxListeners(et)); // 10 + * setMaxListeners(11, et); + * console.log(getMaxListeners(et)); // 11 + * } + * ``` + * @since v19.9.0 + */ + static getMaxListeners(emitter: EventTarget | NodeJS.EventEmitter): number; + /** + * ```js + * import { setMaxListeners, EventEmitter } from 'node:events'; + * + * const target = new EventTarget(); + * const emitter = new EventEmitter(); + * + * setMaxListeners(5, target, emitter); + * ``` + * @since v15.4.0 + * @param n A non-negative number. The maximum number of listeners per `EventTarget` event. + * @param eventTargets Zero or more {EventTarget} or {EventEmitter} instances. If none are specified, `n` is set as the default max for all newly created {EventTarget} and {EventEmitter} + * objects. + */ + static setMaxListeners(n?: number, ...eventTargets: Array): void; + /** + * Listens once to the `abort` event on the provided `signal`. + * + * Listening to the `abort` event on abort signals is unsafe and may + * lead to resource leaks since another third party with the signal can + * call `e.stopImmediatePropagation()`. Unfortunately Node.js cannot change + * this since it would violate the web standard. Additionally, the original + * API makes it easy to forget to remove listeners. + * + * This API allows safely using `AbortSignal`s in Node.js APIs by solving these + * two issues by listening to the event such that `stopImmediatePropagation` does + * not prevent the listener from running. + * + * Returns a disposable so that it may be unsubscribed from more easily. + * + * ```js + * import { addAbortListener } from 'node:events'; + * + * function example(signal) { + * let disposable; + * try { + * signal.addEventListener('abort', (e) => e.stopImmediatePropagation()); + * disposable = addAbortListener(signal, (e) => { + * // Do something when signal is aborted. + * }); + * } finally { + * disposable?.[Symbol.dispose](); + * } + * } + * ``` + * @since v20.5.0 + * @experimental + * @return Disposable that removes the `abort` listener. + */ + static addAbortListener(signal: AbortSignal, resource: (event: Event) => void): Disposable; + /** + * This symbol shall be used to install a listener for only monitoring `'error'` events. Listeners installed using this symbol are called before the regular `'error'` listeners are called. + * + * Installing a listener using this symbol does not change the behavior once an `'error'` event is emitted. Therefore, the process will still crash if no + * regular `'error'` listener is installed. + * @since v13.6.0, v12.17.0 + */ + static readonly errorMonitor: unique symbol; + /** + * Value: `Symbol.for('nodejs.rejection')` + * + * See how to write a custom `rejection handler`. + * @since v13.4.0, v12.16.0 + */ + static readonly captureRejectionSymbol: unique symbol; + /** + * Value: [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) + * + * Change the default `captureRejections` option on all new `EventEmitter` objects. + * @since v13.4.0, v12.16.0 + */ + static captureRejections: boolean; + /** + * By default, a maximum of `10` listeners can be registered for any single + * event. This limit can be changed for individual `EventEmitter` instances + * using the `emitter.setMaxListeners(n)` method. To change the default + * for _all_`EventEmitter` instances, the `events.defaultMaxListeners` property + * can be used. If this value is not a positive number, a `RangeError` is thrown. + * + * Take caution when setting the `events.defaultMaxListeners` because the + * change affects _all_ `EventEmitter` instances, including those created before + * the change is made. However, calling `emitter.setMaxListeners(n)` still has + * precedence over `events.defaultMaxListeners`. + * + * This is not a hard limit. The `EventEmitter` instance will allow + * more listeners to be added but will output a trace warning to stderr indicating + * that a "possible EventEmitter memory leak" has been detected. For any single + * `EventEmitter`, the `emitter.getMaxListeners()` and `emitter.setMaxListeners()` methods can be used to + * temporarily avoid this warning: + * + * ```js + * import { EventEmitter } from 'node:events'; + * const emitter = new EventEmitter(); + * emitter.setMaxListeners(emitter.getMaxListeners() + 1); + * emitter.once('event', () => { + * // do stuff + * emitter.setMaxListeners(Math.max(emitter.getMaxListeners() - 1, 0)); + * }); + * ``` + * + * The `--trace-warnings` command-line flag can be used to display the + * stack trace for such warnings. + * + * The emitted warning can be inspected with `process.on('warning')` and will + * have the additional `emitter`, `type`, and `count` properties, referring to + * the event emitter instance, the event's name and the number of attached + * listeners, respectively. + * Its `name` property is set to `'MaxListenersExceededWarning'`. + * @since v0.11.2 + */ + static defaultMaxListeners: number; + } + import internal = require("node:events"); + namespace EventEmitter { + // Should just be `export { EventEmitter }`, but that doesn't work in TypeScript 3.4 + export { internal as EventEmitter }; + export interface Abortable { + /** + * When provided the corresponding `AbortController` can be used to cancel an asynchronous action. + */ + signal?: AbortSignal | undefined; + } + + export interface EventEmitterReferencingAsyncResource extends AsyncResource { + readonly eventEmitter: EventEmitterAsyncResource; + } + + export interface EventEmitterAsyncResourceOptions extends AsyncResourceOptions, EventEmitterOptions { + /** + * The type of async event, this is required when instantiating `EventEmitterAsyncResource` + * directly rather than as a child class. + * @default new.target.name if instantiated as a child class. + */ + name?: string; + } + + /** + * Integrates `EventEmitter` with `AsyncResource` for `EventEmitter`s that + * require manual async tracking. Specifically, all events emitted by instances + * of `events.EventEmitterAsyncResource` will run within its `async context`. + * + * ```js + * import { EventEmitterAsyncResource, EventEmitter } from 'node:events'; + * import { notStrictEqual, strictEqual } from 'node:assert'; + * import { executionAsyncId, triggerAsyncId } from 'node:async_hooks'; + * + * // Async tracking tooling will identify this as 'Q'. + * const ee1 = new EventEmitterAsyncResource({ name: 'Q' }); + * + * // 'foo' listeners will run in the EventEmitters async context. + * ee1.on('foo', () => { + * strictEqual(executionAsyncId(), ee1.asyncId); + * strictEqual(triggerAsyncId(), ee1.triggerAsyncId); + * }); + * + * const ee2 = new EventEmitter(); + * + * // 'foo' listeners on ordinary EventEmitters that do not track async + * // context, however, run in the same async context as the emit(). + * ee2.on('foo', () => { + * notStrictEqual(executionAsyncId(), ee2.asyncId); + * notStrictEqual(triggerAsyncId(), ee2.triggerAsyncId); + * }); + * + * Promise.resolve().then(() => { + * ee1.emit('foo'); + * ee2.emit('foo'); + * }); + * ``` + * + * The `EventEmitterAsyncResource` class has the same methods and takes the + * same options as `EventEmitter` and `AsyncResource` themselves. + * @since v17.4.0, v16.14.0 + */ + export class EventEmitterAsyncResource extends EventEmitter { + /** + * @param options Only optional in child class. + */ + constructor(options?: EventEmitterAsyncResourceOptions); + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + */ + emitDestroy(): void; + /** + * The unique `asyncId` assigned to the resource. + */ + readonly asyncId: number; + /** + * The same triggerAsyncId that is passed to the AsyncResource constructor. + */ + readonly triggerAsyncId: number; + /** + * The returned `AsyncResource` object has an additional `eventEmitter` property + * that provides a reference to this `EventEmitterAsyncResource`. + */ + readonly asyncResource: EventEmitterReferencingAsyncResource; + } + } + global { + namespace NodeJS { + interface EventEmitter = DefaultEventMap> { + [EventEmitter.captureRejectionSymbol]?(error: Error, event: Key, ...args: Args): void; + /** + * Alias for `emitter.on(eventName, listener)`. + * @since v0.1.26 + */ + addListener(eventName: Key, listener: Listener1): this; + /** + * Adds the `listener` function to the end of the listeners array for the event + * named `eventName`. No checks are made to see if the `listener` has already + * been added. Multiple calls passing the same combination of `eventName` and + * `listener` will result in the `listener` being added, and called, multiple times. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The `emitter.prependListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEE = new EventEmitter(); + * myEE.on('foo', () => console.log('a')); + * myEE.prependListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.1.101 + * @param eventName The name of the event. + * @param listener The callback function + */ + on(eventName: Key, listener: Listener1): this; + /** + * Adds a **one-time** `listener` function for the event named `eventName`. The + * next time `eventName` is triggered, this listener is removed and then invoked. + * + * ```js + * server.once('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The `emitter.prependOnceListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEE = new EventEmitter(); + * myEE.once('foo', () => console.log('a')); + * myEE.prependOnceListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.3.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + once(eventName: Key, listener: Listener1): this; + /** + * Removes the specified `listener` from the listener array for the event named `eventName`. + * + * ```js + * const callback = (stream) => { + * console.log('someone connected!'); + * }; + * server.on('connection', callback); + * // ... + * server.removeListener('connection', callback); + * ``` + * + * `removeListener()` will remove, at most, one instance of a listener from the + * listener array. If any single listener has been added multiple times to the + * listener array for the specified `eventName`, then `removeListener()` must be + * called multiple times to remove each instance. + * + * Once an event is emitted, all listeners attached to it at the + * time of emitting are called in order. This implies that any `removeListener()` or `removeAllListeners()` calls _after_ emitting and _before_ the last listener finishes execution + * will not remove them from`emit()` in progress. Subsequent events behave as expected. + * + * ```js + * import { EventEmitter } from 'node:events'; + * class MyEmitter extends EventEmitter {} + * const myEmitter = new MyEmitter(); + * + * const callbackA = () => { + * console.log('A'); + * myEmitter.removeListener('event', callbackB); + * }; + * + * const callbackB = () => { + * console.log('B'); + * }; + * + * myEmitter.on('event', callbackA); + * + * myEmitter.on('event', callbackB); + * + * // callbackA removes listener callbackB but it will still be called. + * // Internal listener array at time of emit [callbackA, callbackB] + * myEmitter.emit('event'); + * // Prints: + * // A + * // B + * + * // callbackB is now removed. + * // Internal listener array [callbackA] + * myEmitter.emit('event'); + * // Prints: + * // A + * ``` + * + * Because listeners are managed using an internal array, calling this will + * change the position indices of any listener registered _after_ the listener + * being removed. This will not impact the order in which listeners are called, + * but it means that any copies of the listener array as returned by + * the `emitter.listeners()` method will need to be recreated. + * + * When a single function has been added as a handler multiple times for a single + * event (as in the example below), `removeListener()` will remove the most + * recently added instance. In the example the `once('ping')` listener is removed: + * + * ```js + * import { EventEmitter } from 'node:events'; + * const ee = new EventEmitter(); + * + * function pong() { + * console.log('pong'); + * } + * + * ee.on('ping', pong); + * ee.once('ping', pong); + * ee.removeListener('ping', pong); + * + * ee.emit('ping'); + * ee.emit('ping'); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeListener(eventName: Key, listener: Listener1): this; + /** + * Alias for `emitter.removeListener()`. + * @since v10.0.0 + */ + off(eventName: Key, listener: Listener1): this; + /** + * Removes all listeners, or those of the specified `eventName`. + * + * It is bad practice to remove listeners added elsewhere in the code, + * particularly when the `EventEmitter` instance was created by some other + * component or module (e.g. sockets or file streams). + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeAllListeners(eventName?: Key): this; + /** + * By default `EventEmitter`s will print a warning if more than `10` listeners are + * added for a particular event. This is a useful default that helps finding + * memory leaks. The `emitter.setMaxListeners()` method allows the limit to be + * modified for this specific `EventEmitter` instance. The value can be set to `Infinity` (or `0`) to indicate an unlimited number of listeners. + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.3.5 + */ + setMaxListeners(n: number): this; + /** + * Returns the current max listener value for the `EventEmitter` which is either + * set by `emitter.setMaxListeners(n)` or defaults to {@link defaultMaxListeners}. + * @since v1.0.0 + */ + getMaxListeners(): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * console.log(util.inspect(server.listeners('connection'))); + * // Prints: [ [Function] ] + * ``` + * @since v0.1.26 + */ + listeners(eventName: Key): Array>; + /** + * Returns a copy of the array of listeners for the event named `eventName`, + * including any wrappers (such as those created by `.once()`). + * + * ```js + * import { EventEmitter } from 'node:events'; + * const emitter = new EventEmitter(); + * emitter.once('log', () => console.log('log once')); + * + * // Returns a new Array with a function `onceWrapper` which has a property + * // `listener` which contains the original listener bound above + * const listeners = emitter.rawListeners('log'); + * const logFnWrapper = listeners[0]; + * + * // Logs "log once" to the console and does not unbind the `once` event + * logFnWrapper.listener(); + * + * // Logs "log once" to the console and removes the listener + * logFnWrapper(); + * + * emitter.on('log', () => console.log('log persistently')); + * // Will return a new Array with a single function bound by `.on()` above + * const newListeners = emitter.rawListeners('log'); + * + * // Logs "log persistently" twice + * newListeners[0](); + * emitter.emit('log'); + * ``` + * @since v9.4.0 + */ + rawListeners(eventName: Key): Array>; + /** + * Synchronously calls each of the listeners registered for the event named `eventName`, in the order they were registered, passing the supplied arguments + * to each. + * + * Returns `true` if the event had listeners, `false` otherwise. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEmitter = new EventEmitter(); + * + * // First listener + * myEmitter.on('event', function firstListener() { + * console.log('Helloooo! first listener'); + * }); + * // Second listener + * myEmitter.on('event', function secondListener(arg1, arg2) { + * console.log(`event with parameters ${arg1}, ${arg2} in second listener`); + * }); + * // Third listener + * myEmitter.on('event', function thirdListener(...args) { + * const parameters = args.join(', '); + * console.log(`event with parameters ${parameters} in third listener`); + * }); + * + * console.log(myEmitter.listeners('event')); + * + * myEmitter.emit('event', 1, 2, 3, 4, 5); + * + * // Prints: + * // [ + * // [Function: firstListener], + * // [Function: secondListener], + * // [Function: thirdListener] + * // ] + * // Helloooo! first listener + * // event with parameters 1, 2 in second listener + * // event with parameters 1, 2, 3, 4, 5 in third listener + * ``` + * @since v0.1.26 + */ + emit(eventName: Key, ...args: Args): boolean; + /** + * Returns the number of listeners listening for the event named `eventName`. + * If `listener` is provided, it will return how many times the listener is found + * in the list of the listeners of the event. + * @since v3.2.0 + * @param eventName The name of the event being listened for + * @param listener The event handler function + */ + listenerCount(eventName: Key, listener?: Listener2): number; + /** + * Adds the `listener` function to the _beginning_ of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName` + * and `listener` will result in the `listener` being added, and called, multiple times. + * + * ```js + * server.prependListener('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependListener(eventName: Key, listener: Listener1): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName` to the _beginning_ of the listeners array. The next time `eventName` is triggered, this + * listener is removed, and then invoked. + * + * ```js + * server.prependOnceListener('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependOnceListener(eventName: Key, listener: Listener1): this; + /** + * Returns an array listing the events for which the emitter has registered + * listeners. The values in the array are strings or `Symbol`s. + * + * ```js + * import { EventEmitter } from 'node:events'; + * + * const myEE = new EventEmitter(); + * myEE.on('foo', () => {}); + * myEE.on('bar', () => {}); + * + * const sym = Symbol('symbol'); + * myEE.on(sym, () => {}); + * + * console.log(myEE.eventNames()); + * // Prints: [ 'foo', 'bar', Symbol(symbol) ] + * ``` + * @since v6.0.0 + */ + eventNames(): Array<(string | symbol) & Key2>; + } + } + } + export = EventEmitter; +} +declare module "node:events" { + import events = require("events"); + export = events; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/fs.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/fs.d.ts new file mode 100644 index 0000000..9ae12cd --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/fs.d.ts @@ -0,0 +1,4396 @@ +/** + * The `node:fs` module enables interacting with the file system in a + * way modeled on standard POSIX functions. + * + * To use the promise-based APIs: + * + * ```js + * import * as fs from 'node:fs/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as fs from 'node:fs'; + * ``` + * + * All file system operations have synchronous, callback, and promise-based + * forms, and are accessible using both CommonJS syntax and ES6 Modules (ESM). + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/fs.js) + */ +declare module "fs" { + import * as stream from "node:stream"; + import { Abortable, EventEmitter } from "node:events"; + import { URL } from "node:url"; + import * as promises from "node:fs/promises"; + export { promises }; + /** + * Valid types for path values in "fs". + */ + export type PathLike = string | Buffer | URL; + export type PathOrFileDescriptor = PathLike | number; + export type TimeLike = string | number | Date; + export type NoParamCallback = (err: NodeJS.ErrnoException | null) => void; + export type BufferEncodingOption = + | "buffer" + | { + encoding: "buffer"; + }; + export interface ObjectEncodingOptions { + encoding?: BufferEncoding | null | undefined; + } + export type EncodingOption = ObjectEncodingOptions | BufferEncoding | undefined | null; + export type OpenMode = number | string; + export type Mode = number | string; + export interface StatsBase { + isFile(): boolean; + isDirectory(): boolean; + isBlockDevice(): boolean; + isCharacterDevice(): boolean; + isSymbolicLink(): boolean; + isFIFO(): boolean; + isSocket(): boolean; + dev: T; + ino: T; + mode: T; + nlink: T; + uid: T; + gid: T; + rdev: T; + size: T; + blksize: T; + blocks: T; + atimeMs: T; + mtimeMs: T; + ctimeMs: T; + birthtimeMs: T; + atime: Date; + mtime: Date; + ctime: Date; + birthtime: Date; + } + export interface Stats extends StatsBase {} + /** + * A `fs.Stats` object provides information about a file. + * + * Objects returned from {@link stat}, {@link lstat}, {@link fstat}, and + * their synchronous counterparts are of this type. + * If `bigint` in the `options` passed to those methods is true, the numeric values + * will be `bigint` instead of `number`, and the object will contain additional + * nanosecond-precision properties suffixed with `Ns`. `Stat` objects are not to be created directly using the `new` keyword. + * + * ```console + * Stats { + * dev: 2114, + * ino: 48064969, + * mode: 33188, + * nlink: 1, + * uid: 85, + * gid: 100, + * rdev: 0, + * size: 527, + * blksize: 4096, + * blocks: 8, + * atimeMs: 1318289051000.1, + * mtimeMs: 1318289051000.1, + * ctimeMs: 1318289051000.1, + * birthtimeMs: 1318289051000.1, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * + * `bigint` version: + * + * ```console + * BigIntStats { + * dev: 2114n, + * ino: 48064969n, + * mode: 33188n, + * nlink: 1n, + * uid: 85n, + * gid: 100n, + * rdev: 0n, + * size: 527n, + * blksize: 4096n, + * blocks: 8n, + * atimeMs: 1318289051000n, + * mtimeMs: 1318289051000n, + * ctimeMs: 1318289051000n, + * birthtimeMs: 1318289051000n, + * atimeNs: 1318289051000000000n, + * mtimeNs: 1318289051000000000n, + * ctimeNs: 1318289051000000000n, + * birthtimeNs: 1318289051000000000n, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * @since v0.1.21 + */ + export class Stats { + private constructor(); + } + export interface StatsFsBase { + /** Type of file system. */ + type: T; + /** Optimal transfer block size. */ + bsize: T; + /** Total data blocks in file system. */ + blocks: T; + /** Free blocks in file system. */ + bfree: T; + /** Available blocks for unprivileged users */ + bavail: T; + /** Total file nodes in file system. */ + files: T; + /** Free file nodes in file system. */ + ffree: T; + } + export interface StatsFs extends StatsFsBase {} + /** + * Provides information about a mounted file system. + * + * Objects returned from {@link statfs} and its synchronous counterpart are of + * this type. If `bigint` in the `options` passed to those methods is `true`, the + * numeric values will be `bigint` instead of `number`. + * + * ```console + * StatFs { + * type: 1397114950, + * bsize: 4096, + * blocks: 121938943, + * bfree: 61058895, + * bavail: 61058895, + * files: 999, + * ffree: 1000000 + * } + * ``` + * + * `bigint` version: + * + * ```console + * StatFs { + * type: 1397114950n, + * bsize: 4096n, + * blocks: 121938943n, + * bfree: 61058895n, + * bavail: 61058895n, + * files: 999n, + * ffree: 1000000n + * } + * ``` + * @since v19.6.0, v18.15.0 + */ + export class StatsFs {} + export interface BigIntStatsFs extends StatsFsBase {} + export interface StatFsOptions { + bigint?: boolean | undefined; + } + /** + * A representation of a directory entry, which can be a file or a subdirectory + * within the directory, as returned by reading from an `fs.Dir`. The + * directory entry is a combination of the file name and file type pairs. + * + * Additionally, when {@link readdir} or {@link readdirSync} is called with + * the `withFileTypes` option set to `true`, the resulting array is filled with `fs.Dirent` objects, rather than strings or `Buffer` s. + * @since v10.10.0 + */ + export class Dirent { + /** + * Returns `true` if the `fs.Dirent` object describes a regular file. + * @since v10.10.0 + */ + isFile(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a file system + * directory. + * @since v10.10.0 + */ + isDirectory(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a block device. + * @since v10.10.0 + */ + isBlockDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a character device. + * @since v10.10.0 + */ + isCharacterDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a symbolic link. + * @since v10.10.0 + */ + isSymbolicLink(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a first-in-first-out + * (FIFO) pipe. + * @since v10.10.0 + */ + isFIFO(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a socket. + * @since v10.10.0 + */ + isSocket(): boolean; + /** + * The file name that this `fs.Dirent` object refers to. The type of this + * value is determined by the `options.encoding` passed to {@link readdir} or {@link readdirSync}. + * @since v10.10.0 + */ + name: string; + /** + * The base path that this `fs.Dirent` object refers to. + * @since v20.12.0 + */ + parentPath: string; + /** + * Alias for `dirent.parentPath`. + * @since v20.1.0 + * @deprecated Since v20.12.0 + */ + path: string; + } + /** + * A class representing a directory stream. + * + * Created by {@link opendir}, {@link opendirSync}, or `fsPromises.opendir()`. + * + * ```js + * import { opendir } from 'node:fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + */ + export class Dir implements AsyncIterable { + /** + * The read-only path of this directory as was provided to {@link opendir},{@link opendirSync}, or `fsPromises.opendir()`. + * @since v12.12.0 + */ + readonly path: string; + /** + * Asynchronously iterates over the directory via `readdir(3)` until all entries have been read. + */ + [Symbol.asyncIterator](): NodeJS.AsyncIterator; + /** + * Asynchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * + * A promise is returned that will be fulfilled after the resource has been + * closed. + * @since v12.12.0 + */ + close(): Promise; + close(cb: NoParamCallback): void; + /** + * Synchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * @since v12.12.0 + */ + closeSync(): void; + /** + * Asynchronously read the next directory entry via [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) as an `fs.Dirent`. + * + * A promise is returned that will be fulfilled with an `fs.Dirent`, or `null` if there are no more directory entries to read. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + * @return containing {fs.Dirent|null} + */ + read(): Promise; + read(cb: (err: NodeJS.ErrnoException | null, dirEnt: Dirent | null) => void): void; + /** + * Synchronously read the next directory entry as an `fs.Dirent`. See the + * POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more detail. + * + * If there are no more directory entries to read, `null` will be returned. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + */ + readSync(): Dirent | null; + } + /** + * Class: fs.StatWatcher + * @since v14.3.0, v12.20.0 + * Extends `EventEmitter` + * A successful call to {@link watchFile} method will return a new fs.StatWatcher object. + */ + export interface StatWatcher extends EventEmitter { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.StatWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.StatWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.StatWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.StatWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + } + export interface FSWatcher extends EventEmitter { + /** + * Stop watching for changes on the given `fs.FSWatcher`. Once stopped, the `fs.FSWatcher` object is no longer usable. + * @since v0.5.8 + */ + close(): void; + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.FSWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.FSWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.FSWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.FSWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + /** + * events.EventEmitter + * 1. change + * 2. close + * 3. error + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + } + /** + * Instances of `fs.ReadStream` are created and returned using the {@link createReadStream} function. + * @since v0.1.93 + */ + export class ReadStream extends stream.Readable { + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes that have been read so far. + * @since v6.4.0 + */ + bytesRead: number; + /** + * The path to the file the stream is reading from as specified in the first + * argument to `fs.createReadStream()`. If `path` is passed as a string, then`readStream.path` will be a string. If `path` is passed as a `Buffer`, then`readStream.path` will be a + * `Buffer`. If `fd` is specified, then`readStream.path` will be `undefined`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0, v10.16.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "open", listener: (fd: number) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "open", listener: (fd: number) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "ready", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "open", listener: (fd: number) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "ready", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "open", listener: (fd: number) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "open", listener: (fd: number) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * * Extends `stream.Writable` + * + * Instances of `fs.WriteStream` are created and returned using the {@link createWriteStream} function. + * @since v0.1.93 + */ + export class WriteStream extends stream.Writable { + /** + * Closes `writeStream`. Optionally accepts a + * callback that will be executed once the `writeStream`is closed. + * @since v0.9.4 + */ + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes written so far. Does not include data that is still queued + * for writing. + * @since v0.4.7 + */ + bytesWritten: number; + /** + * The path to the file the stream is writing to as specified in the first + * argument to {@link createWriteStream}. If `path` is passed as a string, then`writeStream.path` will be a string. If `path` is passed as a `Buffer`, then`writeStream.path` will be a + * `Buffer`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "open", listener: (fd: number) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "open", listener: (fd: number) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "ready", listener: () => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "open", listener: (fd: number) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "ready", listener: () => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "open", listener: (fd: number) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "open", listener: (fd: number) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * Asynchronously rename file at `oldPath` to the pathname provided + * as `newPath`. In the case that `newPath` already exists, it will + * be overwritten. If there is a directory at `newPath`, an error will + * be raised instead. No arguments other than a possible exception are + * given to the completion callback. + * + * See also: [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html). + * + * ```js + * import { rename } from 'node:fs'; + * + * rename('oldFile.txt', 'newFile.txt', (err) => { + * if (err) throw err; + * console.log('Rename complete!'); + * }); + * ``` + * @since v0.0.2 + */ + export function rename(oldPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace rename { + /** + * Asynchronous rename(2) - Change the name or location of a file or directory. + * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(oldPath: PathLike, newPath: PathLike): Promise; + } + /** + * Renames the file from `oldPath` to `newPath`. Returns `undefined`. + * + * See the POSIX [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html) documentation for more details. + * @since v0.1.21 + */ + export function renameSync(oldPath: PathLike, newPath: PathLike): void; + /** + * Truncates the file. No arguments other than a possible exception are + * given to the completion callback. A file descriptor can also be passed as the + * first argument. In this case, `fs.ftruncate()` is called. + * + * ```js + * import { truncate } from 'node:fs'; + * // Assuming that 'path/file.txt' is a regular file. + * truncate('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was truncated'); + * }); + * ``` + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * + * See the POSIX [`truncate(2)`](http://man7.org/linux/man-pages/man2/truncate.2.html) documentation for more details. + * @since v0.8.6 + * @param [len=0] + */ + export function truncate(path: PathLike, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function truncate(path: PathLike, callback: NoParamCallback): void; + export namespace truncate { + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(path: PathLike, len?: number | null): Promise; + } + /** + * Truncates the file. Returns `undefined`. A file descriptor can also be + * passed as the first argument. In this case, `fs.ftruncateSync()` is called. + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * @since v0.8.6 + * @param [len=0] + */ + export function truncateSync(path: PathLike, len?: number | null): void; + /** + * Truncates the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`ftruncate(2)`](http://man7.org/linux/man-pages/man2/ftruncate.2.html) documentation for more detail. + * + * If the file referred to by the file descriptor was larger than `len` bytes, only + * the first `len` bytes will be retained in the file. + * + * For example, the following program retains only the first four bytes of the + * file: + * + * ```js + * import { open, close, ftruncate } from 'node:fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('temp.txt', 'r+', (err, fd) => { + * if (err) throw err; + * + * try { + * ftruncate(fd, 4, (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * if (err) throw err; + * } + * }); + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncate(fd: number, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + */ + export function ftruncate(fd: number, callback: NoParamCallback): void; + export namespace ftruncate { + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(fd: number, len?: number | null): Promise; + } + /** + * Truncates the file descriptor. Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link ftruncate}. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncateSync(fd: number, len?: number | null): void; + /** + * Asynchronously changes owner and group of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace chown { + /** + * Asynchronous chown(2) - Change ownership of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Synchronously changes owner and group of a file. Returns `undefined`. + * This is the synchronous version of {@link chown}. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chownSync(path: PathLike, uid: number, gid: number): void; + /** + * Sets the owner of the file. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchown(fd: number, uid: number, gid: number, callback: NoParamCallback): void; + export namespace fchown { + /** + * Asynchronous fchown(2) - Change ownership of a file. + * @param fd A file descriptor. + */ + function __promisify__(fd: number, uid: number, gid: number): Promise; + } + /** + * Sets the owner of the file. Returns `undefined`. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function fchownSync(fd: number, uid: number, gid: number): void; + /** + * Set the owner of the symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more detail. + */ + export function lchown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace lchown { + /** + * Asynchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Set the owner for the path. Returns `undefined`. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more details. + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function lchownSync(path: PathLike, uid: number, gid: number): void; + /** + * Changes the access and modification times of a file in the same way as {@link utimes}, with the difference that if the path refers to a symbolic + * link, then the link is not dereferenced: instead, the timestamps of the + * symbolic link itself are changed. + * + * No arguments other than a possible exception are given to the completion + * callback. + * @since v14.5.0, v12.19.0 + */ + export function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace lutimes { + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, + * with the difference that if the path refers to a symbolic link, then the link is not + * dereferenced: instead, the timestamps of the symbolic link itself are changed. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Change the file system timestamps of the symbolic link referenced by `path`. + * Returns `undefined`, or throws an exception when parameters are incorrect or + * the operation fails. This is the synchronous version of {@link lutimes}. + * @since v14.5.0, v12.19.0 + */ + export function lutimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Asynchronously changes the permissions of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * + * ```js + * import { chmod } from 'node:fs'; + * + * chmod('my_file.txt', 0o775, (err) => { + * if (err) throw err; + * console.log('The permissions for file "my_file.txt" have been changed!'); + * }); + * ``` + * @since v0.1.30 + */ + export function chmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + export namespace chmod { + /** + * Asynchronous chmod(2) - Change permissions of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link chmod}. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * @since v0.6.7 + */ + export function chmodSync(path: PathLike, mode: Mode): void; + /** + * Sets the permissions on the file. No arguments other than a possible exception + * are given to the completion callback. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmod(fd: number, mode: Mode, callback: NoParamCallback): void; + export namespace fchmod { + /** + * Asynchronous fchmod(2) - Change permissions of a file. + * @param fd A file descriptor. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(fd: number, mode: Mode): Promise; + } + /** + * Sets the permissions on the file. Returns `undefined`. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmodSync(fd: number, mode: Mode): void; + /** + * Changes the permissions on a symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + /** @deprecated */ + export namespace lchmod { + /** + * Asynchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * Changes the permissions on a symbolic link. Returns `undefined`. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmodSync(path: PathLike, mode: Mode): void; + /** + * Asynchronous [`stat(2)`](http://man7.org/linux/man-pages/man2/stat.2.html). The callback gets two arguments `(err, stats)` where`stats` is an `fs.Stats` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * + * {@link stat} follows symbolic links. Use {@link lstat} to look at the + * links themselves. + * + * Using `fs.stat()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()` is not recommended. + * Instead, user code should open/read/write the file directly and handle the + * error raised if the file is not available. + * + * To check if a file exists without manipulating it afterwards, {@link access} is recommended. + * + * For example, given the following directory structure: + * + * ```text + * - txtDir + * -- file.txt + * - app.js + * ``` + * + * The next program will check for the stats of the given paths: + * + * ```js + * import { stat } from 'node:fs'; + * + * const pathsToCheck = ['./txtDir', './txtDir/file.txt']; + * + * for (let i = 0; i < pathsToCheck.length; i++) { + * stat(pathsToCheck[i], (err, stats) => { + * console.log(stats.isDirectory()); + * console.log(stats); + * }); + * } + * ``` + * + * The resulting output will resemble: + * + * ```console + * true + * Stats { + * dev: 16777220, + * mode: 16877, + * nlink: 3, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214262, + * size: 96, + * blocks: 0, + * atimeMs: 1561174653071.963, + * mtimeMs: 1561174614583.3518, + * ctimeMs: 1561174626623.5366, + * birthtimeMs: 1561174126937.2893, + * atime: 2019-06-22T03:37:33.072Z, + * mtime: 2019-06-22T03:36:54.583Z, + * ctime: 2019-06-22T03:37:06.624Z, + * birthtime: 2019-06-22T03:28:46.937Z + * } + * false + * Stats { + * dev: 16777220, + * mode: 33188, + * nlink: 1, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214074, + * size: 8, + * blocks: 8, + * atimeMs: 1561174616618.8555, + * mtimeMs: 1561174614584, + * ctimeMs: 1561174614583.8145, + * birthtimeMs: 1561174007710.7478, + * atime: 2019-06-22T03:36:56.619Z, + * mtime: 2019-06-22T03:36:54.584Z, + * ctime: 2019-06-22T03:36:54.584Z, + * birthtime: 2019-06-22T03:26:47.711Z + * } + * ``` + * @since v0.0.2 + */ + export function stat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function stat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function stat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function stat( + path: PathLike, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace stat { + /** + * Asynchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + export interface StatSyncFn extends Function { + (path: PathLike, options?: undefined): Stats; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + throwIfNoEntry: false; + }, + ): Stats | undefined; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + throwIfNoEntry: false; + }, + ): BigIntStats | undefined; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + }, + ): Stats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + }, + ): BigIntStats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: boolean; + throwIfNoEntry?: false | undefined; + }, + ): Stats | BigIntStats; + (path: PathLike, options?: StatSyncOptions): Stats | BigIntStats | undefined; + } + /** + * Synchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const statSync: StatSyncFn; + /** + * Invokes the callback with the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstat(fd: number, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function fstat( + fd: number, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function fstat( + fd: number, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function fstat( + fd: number, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace fstat { + /** + * Asynchronous fstat(2) - Get file status. + * @param fd A file descriptor. + */ + function __promisify__( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + fd: number, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(fd: number, options?: StatOptions): Promise; + } + /** + * Retrieves the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstatSync( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Stats; + export function fstatSync( + fd: number, + options: StatOptions & { + bigint: true; + }, + ): BigIntStats; + export function fstatSync(fd: number, options?: StatOptions): Stats | BigIntStats; + /** + * Retrieves the `fs.Stats` for the symbolic link referred to by the path. + * The callback gets two arguments `(err, stats)` where `stats` is a `fs.Stats` object. `lstat()` is identical to `stat()`, except that if `path` is a symbolic + * link, then the link itself is stat-ed, not the file that it refers to. + * + * See the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) documentation for more details. + * @since v0.1.30 + */ + export function lstat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function lstat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function lstat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function lstat( + path: PathLike, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace lstat { + /** + * Asynchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + /** + * Asynchronous [`statfs(2)`](http://man7.org/linux/man-pages/man2/statfs.2.html). Returns information about the mounted file system which + * contains `path`. The callback gets two arguments `(err, stats)` where `stats`is an `fs.StatFs` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * @since v19.6.0, v18.15.0 + * @param path A path to an existing file or directory on the file system to be queried. + */ + export function statfs(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: StatsFs) => void): void; + export function statfs( + path: PathLike, + options: + | (StatFsOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: StatsFs) => void, + ): void; + export function statfs( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStatsFs) => void, + ): void; + export function statfs( + path: PathLike, + options: StatFsOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: StatsFs | BigIntStatsFs) => void, + ): void; + export namespace statfs { + /** + * Asynchronous statfs(2) - Returns information about the mounted file system which contains path. The callback gets two arguments (err, stats) where stats is an object. + * @param path A path to an existing file or directory on the file system to be queried. + */ + function __promisify__( + path: PathLike, + options?: StatFsOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatFsOptions): Promise; + } + /** + * Synchronous [`statfs(2)`](http://man7.org/linux/man-pages/man2/statfs.2.html). Returns information about the mounted file system which + * contains `path`. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * @since v19.6.0, v18.15.0 + * @param path A path to an existing file or directory on the file system to be queried. + */ + export function statfsSync( + path: PathLike, + options?: StatFsOptions & { + bigint?: false | undefined; + }, + ): StatsFs; + export function statfsSync( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + ): BigIntStatsFs; + export function statfsSync(path: PathLike, options?: StatFsOptions): StatsFs | BigIntStatsFs; + /** + * Synchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const lstatSync: StatSyncFn; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. No arguments other than + * a possible + * exception are given to the completion callback. + * @since v0.1.31 + */ + export function link(existingPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace link { + /** + * Asynchronous link(2) - Create a new link (also known as a hard link) to an existing file. + * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(existingPath: PathLike, newPath: PathLike): Promise; + } + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.31 + */ + export function linkSync(existingPath: PathLike, newPath: PathLike): void; + /** + * Creates the link called `path` pointing to `target`. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`symlink(2)`](http://man7.org/linux/man-pages/man2/symlink.2.html) documentation for more details. + * + * The `type` argument is only available on Windows and ignored on other platforms. + * It can be set to `'dir'`, `'file'`, or `'junction'`. If the `type` argument is + * not a string, Node.js will autodetect `target` type and use `'file'` or `'dir'`. + * If the `target` does not exist, `'file'` will be used. Windows junction points + * require the destination path to be absolute. When using `'junction'`, the`target` argument will automatically be normalized to absolute path. Junction + * points on NTFS volumes can only point to directories. + * + * Relative targets are relative to the link's parent directory. + * + * ```js + * import { symlink } from 'node:fs'; + * + * symlink('./mew', './mewtwo', callback); + * ``` + * + * The above example creates a symbolic link `mewtwo` which points to `mew` in the + * same directory: + * + * ```bash + * $ tree . + * . + * ├── mew + * └── mewtwo -> ./mew + * ``` + * @since v0.1.31 + * @param [type='null'] + */ + export function symlink( + target: PathLike, + path: PathLike, + type: symlink.Type | undefined | null, + callback: NoParamCallback, + ): void; + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + */ + export function symlink(target: PathLike, path: PathLike, callback: NoParamCallback): void; + export namespace symlink { + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). + * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. + */ + function __promisify__(target: PathLike, path: PathLike, type?: string | null): Promise; + type Type = "dir" | "file" | "junction"; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link symlink}. + * @since v0.1.31 + * @param [type='null'] + */ + export function symlinkSync(target: PathLike, path: PathLike, type?: symlink.Type | null): void; + /** + * Reads the contents of the symbolic link referred to by `path`. The callback gets + * two arguments `(err, linkString)`. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path passed to the callback. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlink( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: string) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: Buffer) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: string | Buffer) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readlink( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, linkString: string) => void, + ): void; + export namespace readlink { + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + } + /** + * Returns the symbolic link's string value. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string | Buffer; + /** + * Asynchronously computes the canonical pathname by resolving `.`, `..`, and + * symbolic links. + * + * A canonical pathname is not necessarily unique. Hard links and bind mounts can + * expose a file system entity through many pathnames. + * + * This function behaves like [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html), with some exceptions: + * + * 1. No case conversion is performed on case-insensitive file systems. + * 2. The maximum number of symbolic links is platform-independent and generally + * (much) higher than what the native [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html) implementation supports. + * + * The `callback` gets two arguments `(err, resolvedPath)`. May use `process.cwd` to resolve relative paths. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * If `path` resolves to a socket or a pipe, the function will return a system + * dependent name for that object. + * @since v0.1.31 + */ + export function realpath( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function realpath( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + export namespace realpath { + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html). + * + * The `callback` gets two arguments `(err, resolvedPath)`. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v9.2.0 + */ + function native( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + function native( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void, + ): void; + function native( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void, + ): void; + function native( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + } + /** + * Returns the resolved pathname. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link realpath}. + * @since v0.1.31 + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string | Buffer; + export namespace realpathSync { + function native(path: PathLike, options?: EncodingOption): string; + function native(path: PathLike, options: BufferEncodingOption): Buffer; + function native(path: PathLike, options?: EncodingOption): string | Buffer; + } + /** + * Asynchronously removes a file or symbolic link. No arguments other than a + * possible exception are given to the completion callback. + * + * ```js + * import { unlink } from 'node:fs'; + * // Assuming that 'path/file.txt' is a regular file. + * unlink('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was deleted'); + * }); + * ``` + * + * `fs.unlink()` will not work on a directory, empty or otherwise. To remove a + * directory, use {@link rmdir}. + * + * See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more details. + * @since v0.0.2 + */ + export function unlink(path: PathLike, callback: NoParamCallback): void; + export namespace unlink { + /** + * Asynchronous unlink(2) - delete a name and possibly the file it refers to. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Synchronous [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html). Returns `undefined`. + * @since v0.1.21 + */ + export function unlinkSync(path: PathLike): void; + export interface RmDirOptions { + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * @deprecated since v14.14.0 In future versions of Node.js and will trigger a warning + * `fs.rmdir(path, { recursive: true })` will throw if `path` does not exist or is a file. + * Use `fs.rm(path, { recursive: true, force: true })` instead. + * + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). No arguments other than a possible exception are given + * to the completion callback. + * + * Using `fs.rmdir()` on a file (not a directory) results in an `ENOENT` error on + * Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rm} with options `{ recursive: true, force: true }`. + * @since v0.0.2 + */ + export function rmdir(path: PathLike, callback: NoParamCallback): void; + export function rmdir(path: PathLike, options: RmDirOptions, callback: NoParamCallback): void; + export namespace rmdir { + /** + * Asynchronous rmdir(2) - delete a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, options?: RmDirOptions): Promise; + } + /** + * Synchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). Returns `undefined`. + * + * Using `fs.rmdirSync()` on a file (not a directory) results in an `ENOENT` error + * on Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rmSync} with options `{ recursive: true, force: true }`. + * @since v0.1.21 + */ + export function rmdirSync(path: PathLike, options?: RmDirOptions): void; + export interface RmOptions { + /** + * When `true`, exceptions will be ignored if `path` does not exist. + * @default false + */ + force?: boolean | undefined; + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm` utility). No arguments other than a possible exception are given to the + * completion callback. + * @since v14.14.0 + */ + export function rm(path: PathLike, callback: NoParamCallback): void; + export function rm(path: PathLike, options: RmOptions, callback: NoParamCallback): void; + export namespace rm { + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm` utility). + */ + function __promisify__(path: PathLike, options?: RmOptions): Promise; + } + /** + * Synchronously removes files and directories (modeled on the standard POSIX `rm` utility). Returns `undefined`. + * @since v14.14.0 + */ + export function rmSync(path: PathLike, options?: RmOptions): void; + export interface MakeDirectoryOptions { + /** + * Indicates whether parent folders should be created. + * If a folder was created, the path to the first created folder will be returned. + * @default false + */ + recursive?: boolean | undefined; + /** + * A file mode. If a string is passed, it is parsed as an octal integer. If not specified + * @default 0o777 + */ + mode?: Mode | undefined; + } + /** + * Asynchronously creates a directory. + * + * The callback is given a possible exception and, if `recursive` is `true`, the + * first directory path created, `(err[, path])`.`path` can still be `undefined` when `recursive` is `true`, if no directory was + * created (for instance, if it was previously created). + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive` property indicating whether parent directories should be created. Calling `fs.mkdir()` when `path` is a directory that + * exists results in an error only + * when `recursive` is false. If `recursive` is false and the directory exists, + * an `EEXIST` error occurs. + * + * ```js + * import { mkdir } from 'node:fs'; + * + * // Create ./tmp/a/apple, regardless of whether ./tmp and ./tmp/a exist. + * mkdir('./tmp/a/apple', { recursive: true }, (err) => { + * if (err) throw err; + * }); + * ``` + * + * On Windows, using `fs.mkdir()` on the root directory even with recursion will + * result in an error: + * + * ```js + * import { mkdir } from 'node:fs'; + * + * mkdir('/', { recursive: true }, (err) => { + * // => [Error: EPERM: operation not permitted, mkdir 'C:\'] + * }); + * ``` + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.8 + */ + export function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void, + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + | undefined, + callback: NoParamCallback, + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: Mode | MakeDirectoryOptions | null | undefined, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void, + ): void; + /** + * Asynchronous mkdir(2) - create a directory with a mode of `0o777`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function mkdir(path: PathLike, callback: NoParamCallback): void; + export namespace mkdir { + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: Mode | MakeDirectoryOptions | null, + ): Promise; + } + /** + * Synchronously creates a directory. Returns `undefined`, or if `recursive` is `true`, the first directory path created. + * This is the synchronous version of {@link mkdir}. + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.21 + */ + export function mkdirSync( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): string | undefined; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): void; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync(path: PathLike, options?: Mode | MakeDirectoryOptions | null): string | undefined; + /** + * Creates a unique temporary directory. + * + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. Due to platform + * inconsistencies, avoid trailing `X` characters in `prefix`. Some platforms, + * notably the BSDs, can return more than six random characters, and replace + * trailing `X` characters in `prefix` with random characters. + * + * The created directory path is passed as a string to the callback's second + * parameter. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'node:fs'; + * import { join } from 'node:path'; + * import { tmpdir } from 'node:os'; + * + * mkdtemp(join(tmpdir(), 'foo-'), (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Prints: /tmp/foo-itXde2 or C:\Users\...\AppData\Local\Temp\foo-itXde2 + * }); + * ``` + * + * The `fs.mkdtemp()` method will append the six randomly selected characters + * directly to the `prefix` string. For instance, given a directory `/tmp`, if the + * intention is to create a temporary directory _within_`/tmp`, the `prefix`must end with a trailing platform-specific path separator + * (`import { sep } from 'node:path'`). + * + * ```js + * import { tmpdir } from 'node:os'; + * import { mkdtemp } from 'node:fs'; + * + * // The parent directory for the new temporary directory + * const tmpDir = tmpdir(); + * + * // This method is *INCORRECT*: + * mkdtemp(tmpDir, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmpabc123`. + * // A new temporary directory is created at the file system root + * // rather than *within* the /tmp directory. + * }); + * + * // This method is *CORRECT*: + * import { sep } from 'node:path'; + * mkdtemp(`${tmpDir}${sep}`, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmp/abc123`. + * // A new temporary directory is created within + * // the /tmp directory. + * }); + * ``` + * @since v5.10.0 + */ + export function mkdtemp( + prefix: string, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, folder: string) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: + | "buffer" + | { + encoding: "buffer"; + }, + callback: (err: NodeJS.ErrnoException | null, folder: Buffer) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, folder: string | Buffer) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + */ + export function mkdtemp( + prefix: string, + callback: (err: NodeJS.ErrnoException | null, folder: string) => void, + ): void; + export namespace mkdtemp { + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + } + /** + * Returns the created directory path. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link mkdtemp}. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * @since v5.10.0 + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options: BufferEncodingOption): Buffer; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string | Buffer; + /** + * Reads the contents of a directory. The callback gets two arguments `(err, files)` where `files` is an array of the names of the files in the directory excluding `'.'` and `'..'`. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames passed to the callback. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the `files` array will contain `fs.Dirent` objects. + * @since v0.1.8 + */ + export function readdir( + path: PathLike, + options: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + callback: (err: NodeJS.ErrnoException | null, files: Buffer[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[] | Buffer[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readdir( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + callback: (err: NodeJS.ErrnoException | null, files: Dirent[]) => void, + ): void; + export namespace readdir { + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options: + | "buffer" + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent + */ + function __promisify__( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Promise; + } + /** + * Reads the contents of the directory. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames returned. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the result will contain `fs.Dirent` objects. + * @since v0.1.21 + */ + export function readdirSync( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | null, + ): string[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + ): Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): string[] | Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdirSync( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Dirent[]; + /** + * Closes the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * Calling `fs.close()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.0.2 + */ + export function close(fd: number, callback?: NoParamCallback): void; + export namespace close { + /** + * Asynchronous close(2) - close a file descriptor. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Closes the file descriptor. Returns `undefined`. + * + * Calling `fs.closeSync()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.1.21 + */ + export function closeSync(fd: number): void; + /** + * Asynchronous file open. See the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more details. + * + * `mode` sets the file mode (permission and sticky bits), but only if the file was + * created. On Windows, only the write permission can be manipulated; see {@link chmod}. + * + * The callback gets two arguments `(err, fd)`. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * + * Functions based on `fs.open()` exhibit this behavior as well:`fs.writeFile()`, `fs.readFile()`, etc. + * @since v0.0.2 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] + */ + export function open( + path: PathLike, + flags: OpenMode | undefined, + mode: Mode | undefined | null, + callback: (err: NodeJS.ErrnoException | null, fd: number) => void, + ): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param [flags='r'] See `support of file system `flags``. + */ + export function open( + path: PathLike, + flags: OpenMode | undefined, + callback: (err: NodeJS.ErrnoException | null, fd: number) => void, + ): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function open(path: PathLike, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + export namespace open { + /** + * Asynchronous open(2) - open and possibly create a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not supplied, defaults to `0o666`. + */ + function __promisify__(path: PathLike, flags: OpenMode, mode?: Mode | null): Promise; + } + /** + * Returns an integer representing the file descriptor. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link open}. + * @since v0.1.21 + * @param [flags='r'] + * @param [mode=0o666] + */ + export function openSync(path: PathLike, flags: OpenMode, mode?: Mode | null): number; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time in seconds, `Date`s, or a numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity`, or `-Infinity`, an `Error` will be thrown. + * @since v0.4.2 + */ + export function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace utimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied path. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link utimes}. + * @since v0.4.2 + */ + export function utimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Change the file system timestamps of the object referenced by the supplied file + * descriptor. See {@link utimes}. + * @since v0.4.2 + */ + export function futimes(fd: number, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace futimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(fd: number, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Synchronous version of {@link futimes}. Returns `undefined`. + * @since v0.4.2 + */ + export function futimesSync(fd: number, atime: TimeLike, mtime: TimeLike): void; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. No arguments other + * than a possible exception are given to the completion callback. + * @since v0.1.96 + */ + export function fsync(fd: number, callback: NoParamCallback): void; + export namespace fsync { + /** + * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.96 + */ + export function fsyncSync(fd: number): void; + /** + * Write `buffer` to the file specified by `fd`. + * + * `offset` determines the part of the buffer to be written, and `length` is + * an integer specifying the number of bytes to write. + * + * `position` refers to the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. See [`pwrite(2)`](http://man7.org/linux/man-pages/man2/pwrite.2.html). + * + * The callback will be given three arguments `(err, bytesWritten, buffer)` where `bytesWritten` specifies how many _bytes_ were written from `buffer`. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesWritten` and `buffer` properties. + * + * It is unsafe to use `fs.write()` multiple times on the same file without waiting + * for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v0.0.2 + * @param [offset=0] + * @param [length=buffer.byteLength - offset] + * @param [position='null'] + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + */ + export function write( + fd: number, + buffer: TBuffer, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + encoding: BufferEncoding | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + */ + export function write( + fd: number, + string: string, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + export namespace write { + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + function __promisify__( + fd: number, + buffer?: TBuffer, + offset?: number, + length?: number, + position?: number | null, + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + function __promisify__( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link write}. + * @since v0.1.21 + * @param [offset=0] + * @param [length=buffer.byteLength - offset] + * @param [position='null'] + * @return The number of bytes written. + */ + export function writeSync( + fd: number, + buffer: NodeJS.ArrayBufferView, + offset?: number | null, + length?: number | null, + position?: number | null, + ): number; + /** + * Synchronously writes `string` to the file referenced by the supplied file descriptor, returning the number of bytes written. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function writeSync( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): number; + export type ReadPosition = number | bigint; + export interface ReadSyncOptions { + /** + * @default 0 + */ + offset?: number | undefined; + /** + * @default `length of buffer` + */ + length?: number | undefined; + /** + * @default null + */ + position?: ReadPosition | null | undefined; + } + export interface ReadAsyncOptions extends ReadSyncOptions { + buffer?: TBuffer; + } + /** + * Read data from the file specified by `fd`. + * + * The callback is given the three arguments, `(err, bytesRead, buffer)`. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffer` properties. + * @since v0.0.2 + * @param buffer The buffer that the data will be written to. + * @param offset The position in `buffer` to write the data to. + * @param length The number of bytes to read. + * @param position Specifies where to begin reading from in the file. If `position` is `null` or `-1 `, data will be read from the current file position, and the file position will be updated. If + * `position` is an integer, the file position will be unchanged. + */ + export function read( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: ReadPosition | null, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void, + ): void; + /** + * Similar to the above `fs.read` function, this version takes an optional `options` object. + * If not otherwise specified in an `options` object, + * `buffer` defaults to `Buffer.alloc(16384)`, + * `offset` defaults to `0`, + * `length` defaults to `buffer.byteLength`, `- offset` as of Node 17.6.0 + * `position` defaults to `null` + * @since v12.17.0, 13.11.0 + */ + export function read( + fd: number, + options: ReadAsyncOptions, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void, + ): void; + export function read( + fd: number, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: NodeJS.ArrayBufferView) => void, + ): void; + export namespace read { + /** + * @param fd A file descriptor. + * @param buffer The buffer that the data will be written to. + * @param offset The offset in the buffer at which to start writing. + * @param length The number of bytes to read. + * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. + */ + function __promisify__( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: number | null, + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__( + fd: number, + options: ReadAsyncOptions, + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__(fd: number): Promise<{ + bytesRead: number; + buffer: NodeJS.ArrayBufferView; + }>; + } + /** + * Returns the number of `bytesRead`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link read}. + * @since v0.1.21 + * @param [position='null'] + */ + export function readSync( + fd: number, + buffer: NodeJS.ArrayBufferView, + offset: number, + length: number, + position: ReadPosition | null, + ): number; + /** + * Similar to the above `fs.readSync` function, this version takes an optional `options` object. + * If no `options` object is specified, it will default with the above values. + */ + export function readSync(fd: number, buffer: NodeJS.ArrayBufferView, opts?: ReadSyncOptions): number; + /** + * Asynchronously reads the entire contents of a file. + * + * ```js + * import { readFile } from 'node:fs'; + * + * readFile('/etc/passwd', (err, data) => { + * if (err) throw err; + * console.log(data); + * }); + * ``` + * + * The callback is passed two arguments `(err, data)`, where `data` is the + * contents of the file. + * + * If no encoding is specified, then the raw buffer is returned. + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { readFile } from 'node:fs'; + * + * readFile('/etc/passwd', 'utf8', callback); + * ``` + * + * When the path is a directory, the behavior of `fs.readFile()` and {@link readFileSync} is platform-specific. On macOS, Linux, and Windows, an + * error will be returned. On FreeBSD, a representation of the directory's contents + * will be returned. + * + * ```js + * import { readFile } from 'node:fs'; + * + * // macOS, Linux, and Windows + * readFile('', (err, data) => { + * // => [Error: EISDIR: illegal operation on a directory, read ] + * }); + * + * // FreeBSD + * readFile('', (err, data) => { + * // => null, + * }); + * ``` + * + * It is possible to abort an ongoing request using an `AbortSignal`. If a + * request is aborted the callback is called with an `AbortError`: + * + * ```js + * import { readFile } from 'node:fs'; + * + * const controller = new AbortController(); + * const signal = controller.signal; + * readFile(fileInfo[0].name, { signal }, (err, buf) => { + * // ... + * }); + * // When you want to abort the request + * controller.abort(); + * ``` + * + * The `fs.readFile()` function buffers the entire file. To minimize memory costs, + * when possible prefer streaming via `fs.createReadStream()`. + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * @since v0.1.29 + * @param path filename or file descriptor + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding?: null | undefined; + flag?: string | undefined; + } & Abortable) + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding: BufferEncoding; + flag?: string | undefined; + } & Abortable) + | BufferEncoding, + callback: (err: NodeJS.ErrnoException | null, data: string) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | (ObjectEncodingOptions & { + flag?: string | undefined; + } & Abortable) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: string | Buffer) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + */ + export function readFile( + path: PathOrFileDescriptor, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void, + ): void; + export namespace readFile { + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null, + ): Promise; + } + /** + * Returns the contents of the `path`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readFile}. + * + * If the `encoding` option is specified then this function returns a + * string. Otherwise it returns a buffer. + * + * Similar to {@link readFile}, when the path is a directory, the behavior of `fs.readFileSync()` is platform-specific. + * + * ```js + * import { readFileSync } from 'node:fs'; + * + * // macOS, Linux, and Windows + * readFileSync(''); + * // => [Error: EISDIR: illegal operation on a directory, read ] + * + * // FreeBSD + * readFileSync(''); // => + * ``` + * @since v0.1.8 + * @param path filename or file descriptor + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null, + ): Buffer; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding, + ): string; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null, + ): string | Buffer; + export type WriteFileOptions = + | ( + & ObjectEncodingOptions + & Abortable + & { + mode?: Mode | undefined; + flag?: string | undefined; + flush?: boolean | undefined; + } + ) + | BufferEncoding + | null; + /** + * When `file` is a filename, asynchronously writes data to the file, replacing the + * file if it already exists. `data` can be a string or a buffer. + * + * When `file` is a file descriptor, the behavior is similar to calling `fs.write()` directly (which is recommended). See the notes below on using + * a file descriptor. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { writeFile } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, (err) => { + * if (err) throw err; + * console.log('The file has been saved!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { writeFile } from 'node:fs'; + * + * writeFile('message.txt', 'Hello Node.js', 'utf8', callback); + * ``` + * + * It is unsafe to use `fs.writeFile()` multiple times on the same file without + * waiting for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * Similarly to `fs.readFile` \- `fs.writeFile` is a convenience method that + * performs multiple `write` calls internally to write the buffer passed to it. + * For performance sensitive code consider using {@link createWriteStream}. + * + * It is possible to use an `AbortSignal` to cancel an `fs.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, { signal }, (err) => { + * // When a request is aborted - the callback is called with an AbortError + * }); + * // When the request should be aborted + * controller.abort(); + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFile( + file: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options: WriteFileOptions, + callback: NoParamCallback, + ): void; + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function writeFile( + path: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + callback: NoParamCallback, + ): void; + export namespace writeFile { + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'w'` is used. + */ + function __promisify__( + path: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options?: WriteFileOptions, + ): Promise; + } + /** + * Returns `undefined`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writeFile}. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFileSync( + file: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options?: WriteFileOptions, + ): void; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFile } from 'node:fs'; + * + * appendFile('message.txt', 'data to append', (err) => { + * if (err) throw err; + * console.log('The "data to append" was appended to file!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFile } from 'node:fs'; + * + * appendFile('message.txt', 'data to append', 'utf8', callback); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { open, close, appendFile } from 'node:fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('message.txt', 'a', (err, fd) => { + * if (err) throw err; + * + * try { + * appendFile(fd, 'data to append', 'utf8', (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * throw err; + * } + * }); + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFile( + path: PathOrFileDescriptor, + data: string | Uint8Array, + options: WriteFileOptions, + callback: NoParamCallback, + ): void; + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function appendFile(file: PathOrFileDescriptor, data: string | Uint8Array, callback: NoParamCallback): void; + export namespace appendFile { + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'a'` is used. + */ + function __promisify__( + file: PathOrFileDescriptor, + data: string | Uint8Array, + options?: WriteFileOptions, + ): Promise; + } + /** + * Synchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFileSync } from 'node:fs'; + * + * try { + * appendFileSync('message.txt', 'data to append'); + * console.log('The "data to append" was appended to file!'); + * } catch (err) { + * // Handle the error + * } + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFileSync } from 'node:fs'; + * + * appendFileSync('message.txt', 'data to append', 'utf8'); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { openSync, closeSync, appendFileSync } from 'node:fs'; + * + * let fd; + * + * try { + * fd = openSync('message.txt', 'a'); + * appendFileSync(fd, 'data to append', 'utf8'); + * } catch (err) { + * // Handle the error + * } finally { + * if (fd !== undefined) + * closeSync(fd); + * } + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFileSync( + path: PathOrFileDescriptor, + data: string | Uint8Array, + options?: WriteFileOptions, + ): void; + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The `options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'node:fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and `fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and `fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export interface WatchFileOptions { + bigint?: boolean | undefined; + persistent?: boolean | undefined; + interval?: number | undefined; + } + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The `options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'node:fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and `fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and `fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint?: false | undefined; + }) + | undefined, + listener: StatsListener, + ): StatWatcher; + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint: true; + }) + | undefined, + listener: BigIntStatsListener, + ): StatWatcher; + /** + * Watch for changes on `filename`. The callback `listener` will be called each time the file is accessed. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watchFile(filename: PathLike, listener: StatsListener): StatWatcher; + /** + * Stop watching for changes on `filename`. If `listener` is specified, only that + * particular listener is removed. Otherwise, _all_ listeners are removed, + * effectively stopping watching of `filename`. + * + * Calling `fs.unwatchFile()` with a filename that is not being watched is a + * no-op, not an error. + * + * Using {@link watch} is more efficient than `fs.watchFile()` and `fs.unwatchFile()`. `fs.watch()` should be used instead of `fs.watchFile()` and `fs.unwatchFile()` when possible. + * @since v0.1.31 + * @param listener Optional, a listener previously attached using `fs.watchFile()` + */ + export function unwatchFile(filename: PathLike, listener?: StatsListener): void; + export function unwatchFile(filename: PathLike, listener?: BigIntStatsListener): void; + export interface WatchOptions extends Abortable { + encoding?: BufferEncoding | "buffer" | undefined; + persistent?: boolean | undefined; + recursive?: boolean | undefined; + } + export type WatchEventType = "rename" | "change"; + export type WatchListener = (event: WatchEventType, filename: T | null) => void; + export type StatsListener = (curr: Stats, prev: Stats) => void; + export type BigIntStatsListener = (curr: BigIntStats, prev: BigIntStats) => void; + /** + * Watch for changes on `filename`, where `filename` is either a file or a + * directory. + * + * The second argument is optional. If `options` is provided as a string, it + * specifies the `encoding`. Otherwise `options` should be passed as an object. + * + * The listener callback gets two arguments `(eventType, filename)`. `eventType`is either `'rename'` or `'change'`, and `filename` is the name of the file + * which triggered the event. + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * The listener callback is attached to the `'change'` event fired by `fs.FSWatcher`, but it is not the same thing as the `'change'` value of `eventType`. + * + * If a `signal` is passed, aborting the corresponding AbortController will close + * the returned `fs.FSWatcher`. + * @since v0.5.10 + * @param listener + */ + export function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: "buffer"; + }) + | "buffer", + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch( + filename: PathLike, + options?: WatchOptions | BufferEncoding | null, + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch( + filename: PathLike, + options: WatchOptions | string, + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watch(filename: PathLike, listener?: WatchListener): FSWatcher; + /** + * Test whether or not the given path exists by checking with the file system. + * Then call the `callback` argument with either true or false: + * + * ```js + * import { exists } from 'node:fs'; + * + * exists('/etc/passwd', (e) => { + * console.log(e ? 'it exists' : 'no passwd!'); + * }); + * ``` + * + * **The parameters for this callback are not consistent with other Node.js** + * **callbacks.** Normally, the first parameter to a Node.js callback is an `err` parameter, optionally followed by other parameters. The `fs.exists()` callback + * has only one boolean parameter. This is one reason `fs.access()` is recommended + * instead of `fs.exists()`. + * + * Using `fs.exists()` to check for the existence of a file before calling `fs.open()`, `fs.readFile()`, or `fs.writeFile()` is not recommended. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file does not exist. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { exists, open, close } from 'node:fs'; + * + * exists('myfile', (e) => { + * if (e) { + * console.error('myfile already exists'); + * } else { + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { open, close, exists } from 'node:fs'; + * + * exists('myfile', (e) => { + * if (e) { + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } else { + * console.error('myfile does not exist'); + * } + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for existence and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the existence of a file only if the file won't be + * used directly, for example when its existence is a signal from another + * process. + * @since v0.0.2 + * @deprecated Since v1.0.0 - Use {@link stat} or {@link access} instead. + */ + export function exists(path: PathLike, callback: (exists: boolean) => void): void; + /** @deprecated */ + export namespace exists { + /** + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Returns `true` if the path exists, `false` otherwise. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link exists}. + * + * `fs.exists()` is deprecated, but `fs.existsSync()` is not. The `callback` parameter to `fs.exists()` accepts parameters that are inconsistent with other + * Node.js callbacks. `fs.existsSync()` does not use a callback. + * + * ```js + * import { existsSync } from 'node:fs'; + * + * if (existsSync('/etc/passwd')) + * console.log('The path exists.'); + * ``` + * @since v0.1.21 + */ + export function existsSync(path: PathLike): boolean; + export namespace constants { + // File Access Constants + /** Constant for fs.access(). File is visible to the calling process. */ + const F_OK: number; + /** Constant for fs.access(). File can be read by the calling process. */ + const R_OK: number; + /** Constant for fs.access(). File can be written by the calling process. */ + const W_OK: number; + /** Constant for fs.access(). File can be executed by the calling process. */ + const X_OK: number; + // File Copy Constants + /** Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. */ + const COPYFILE_EXCL: number; + /** + * Constant for fs.copyFile. copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used. + */ + const COPYFILE_FICLONE: number; + /** + * Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then the operation will fail with an error. + */ + const COPYFILE_FICLONE_FORCE: number; + // File Open Constants + /** Constant for fs.open(). Flag indicating to open a file for read-only access. */ + const O_RDONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for write-only access. */ + const O_WRONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for read-write access. */ + const O_RDWR: number; + /** Constant for fs.open(). Flag indicating to create the file if it does not already exist. */ + const O_CREAT: number; + /** Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */ + const O_EXCL: number; + /** + * Constant for fs.open(). Flag indicating that if path identifies a terminal device, + * opening the path shall not cause that terminal to become the controlling terminal for the process + * (if the process does not already have one). + */ + const O_NOCTTY: number; + /** Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */ + const O_TRUNC: number; + /** Constant for fs.open(). Flag indicating that data will be appended to the end of the file. */ + const O_APPEND: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. */ + const O_DIRECTORY: number; + /** + * constant for fs.open(). + * Flag indicating reading accesses to the file system will no longer result in + * an update to the atime information associated with the file. + * This flag is available on Linux operating systems only. + */ + const O_NOATIME: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. */ + const O_NOFOLLOW: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. */ + const O_SYNC: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. */ + const O_DSYNC: number; + /** Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */ + const O_SYMLINK: number; + /** Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. */ + const O_DIRECT: number; + /** Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. */ + const O_NONBLOCK: number; + // File Type Constants + /** Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. */ + const S_IFMT: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. */ + const S_IFREG: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. */ + const S_IFDIR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. */ + const S_IFCHR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. */ + const S_IFBLK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. */ + const S_IFIFO: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. */ + const S_IFLNK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. */ + const S_IFSOCK: number; + // File Mode Constants + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. */ + const S_IRWXU: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. */ + const S_IRUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. */ + const S_IWUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. */ + const S_IXUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. */ + const S_IRWXG: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. */ + const S_IRGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. */ + const S_IWGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. */ + const S_IXGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. */ + const S_IRWXO: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. */ + const S_IROTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. */ + const S_IWOTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. */ + const S_IXOTH: number; + /** + * When set, a memory file mapping is used to access the file. This flag + * is available on Windows operating systems only. On other operating systems, + * this flag is ignored. + */ + const UV_FS_O_FILEMAP: number; + } + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK` or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`, `fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * The final argument, `callback`, is a callback function that is invoked with + * a possible error argument. If any of the accessibility checks fail, the error + * argument will be an `Error` object. The following examples check if `package.json` exists, and if it is readable or writable. + * + * ```js + * import { access, constants } from 'node:fs'; + * + * const file = 'package.json'; + * + * // Check if the file exists in the current directory. + * access(file, constants.F_OK, (err) => { + * console.log(`${file} ${err ? 'does not exist' : 'exists'}`); + * }); + * + * // Check if the file is readable. + * access(file, constants.R_OK, (err) => { + * console.log(`${file} ${err ? 'is not readable' : 'is readable'}`); + * }); + * + * // Check if the file is writable. + * access(file, constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not writable' : 'is writable'}`); + * }); + * + * // Check if the file is readable and writable. + * access(file, constants.R_OK | constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not' : 'is'} readable and writable`); + * }); + * ``` + * + * Do not use `fs.access()` to check for the accessibility of a file before calling `fs.open()`, `fs.readFile()`, or `fs.writeFile()`. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file is not accessible. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'node:fs'; + * + * access('myfile', (err) => { + * if (!err) { + * console.error('myfile already exists'); + * return; + * } + * + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'node:fs'; + * access('myfile', (err) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for accessibility and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the accessibility of a file only if the file will not be + * used directly, for example when its accessibility is a signal from another + * process. + * + * On Windows, access-control policies (ACLs) on a directory may limit access to + * a file or directory. The `fs.access()` function, however, does not check the + * ACL and therefore may report that a path is accessible even if the ACL restricts + * the user from reading or writing to it. + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function access(path: PathLike, mode: number | undefined, callback: NoParamCallback): void; + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function access(path: PathLike, callback: NoParamCallback): void; + export namespace access { + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike, mode?: number): Promise; + } + /** + * Synchronously tests a user's permissions for the file or directory specified + * by `path`. The `mode` argument is an optional integer that specifies the + * accessibility checks to be performed. `mode` should be either the value `fs.constants.F_OK` or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`, `fs.constants.W_OK`, and + * `fs.constants.X_OK` (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If any of the accessibility checks fail, an `Error` will be thrown. Otherwise, + * the method will return `undefined`. + * + * ```js + * import { accessSync, constants } from 'node:fs'; + * + * try { + * accessSync('etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can read/write'); + * } catch (err) { + * console.error('no access!'); + * } + * ``` + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function accessSync(path: PathLike, mode?: number): void; + interface StreamOptions { + flags?: string | undefined; + encoding?: BufferEncoding | undefined; + fd?: number | promises.FileHandle | undefined; + mode?: number | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + signal?: AbortSignal | null | undefined; + highWaterMark?: number | undefined; + } + interface FSImplementation { + open?: (...args: any[]) => any; + close?: (...args: any[]) => any; + } + interface CreateReadStreamFSImplementation extends FSImplementation { + read: (...args: any[]) => any; + } + interface CreateWriteStreamFSImplementation extends FSImplementation { + write: (...args: any[]) => any; + writev?: (...args: any[]) => any; + } + interface ReadStreamOptions extends StreamOptions { + fs?: CreateReadStreamFSImplementation | null | undefined; + end?: number | undefined; + } + interface WriteStreamOptions extends StreamOptions { + fs?: CreateWriteStreamFSImplementation | null | undefined; + flush?: boolean | undefined; + } + /** + * Unlike the 16 KiB default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 KiB. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `fd` is specified and `start` is + * omitted or `undefined`, `fs.createReadStream()` reads sequentially from the + * current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `fd` is specified, `ReadStream` will ignore the `path` argument and will use + * the specified file descriptor. This means that no `'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s should be passed to `net.Socket`. + * + * If `fd` points to a character device that only supports blocking reads + * (such as keyboard or sound card), read operations do not finish until data is + * available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option, it is possible to override the corresponding `fs` implementations for `open`, `read`, and `close`. When providing the `fs` option, + * an override for `read` is required. If no `fd` is provided, an override for `open` is also required. If `autoClose` is `true`, an override for `close` is + * also required. + * + * ```js + * import { createReadStream } from 'node:fs'; + * + * // Create a stream from some character device. + * const stream = createReadStream('/dev/input/event0'); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * `mode` sets the file mode (permission and sticky bits), but only if the + * file was created. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { createReadStream } from 'node:fs'; + * + * createReadStream('sample.txt', { start: 90, end: 99 }); + * ``` + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createReadStream(path: PathLike, options?: BufferEncoding | ReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` option to be set to `r+` rather than the + * default `w`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'` the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option it is possible to override the corresponding `fs` implementations for `open`, `write`, `writev`, and `close`. Overriding `write()` without `writev()` can reduce + * performance as some optimizations (`_writev()`) + * will be disabled. When providing the `fs` option, overrides for at least one of `write` and `writev` are required. If no `fd` option is supplied, an override + * for `open` is also required. If `autoClose` is `true`, an override for `close` is also required. + * + * Like `fs.ReadStream`, if `fd` is specified, `fs.WriteStream` will ignore the `path` argument and will use the specified file descriptor. This means that no `'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s + * should be passed to `net.Socket`. + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createWriteStream(path: PathLike, options?: BufferEncoding | WriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. No arguments other + * than a possible + * exception are given to the completion callback. + * @since v0.1.96 + */ + export function fdatasync(fd: number, callback: NoParamCallback): void; + export namespace fdatasync { + /** + * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. Returns `undefined`. + * @since v0.1.96 + */ + export function fdatasyncSync(fd: number): void; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. No arguments other than a possible exception are given to the + * callback function. Node.js makes no guarantees about the atomicity of the copy + * operation. If an error occurs after the destination file has been opened for + * writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFile, constants } from 'node:fs'; + * + * function callback(err) { + * if (err) throw err; + * console.log('source.txt was copied to destination.txt'); + * } + * + * // destination.txt will be created or overwritten by default. + * copyFile('source.txt', 'destination.txt', callback); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL, callback); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFile(src: PathLike, dest: PathLike, callback: NoParamCallback): void; + export function copyFile(src: PathLike, dest: PathLike, mode: number, callback: NoParamCallback): void; + export namespace copyFile { + function __promisify__(src: PathLike, dst: PathLike, mode?: number): Promise; + } + /** + * Synchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. Returns `undefined`. Node.js makes no guarantees about the + * atomicity of the copy operation. If an error occurs after the destination file + * has been opened for writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFileSync, constants } from 'node:fs'; + * + * // destination.txt will be created or overwritten by default. + * copyFileSync('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFileSync('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFileSync(src: PathLike, dest: PathLike, mode?: number): void; + /** + * Write an array of `ArrayBufferView`s to the file specified by `fd` using `writev()`. + * + * `position` is the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. + * + * The callback will be given three arguments: `err`, `bytesWritten`, and `buffers`. `bytesWritten` is how many bytes were written from `buffers`. + * + * If this method is `util.promisify()` ed, it returns a promise for an `Object` with `bytesWritten` and `buffers` properties. + * + * It is unsafe to use `fs.writev()` multiple times on the same file without + * waiting for the callback. For this scenario, use {@link createWriteStream}. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param [position='null'] + */ + export function writev( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export function writev( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export interface WriteVResult { + bytesWritten: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace writev { + function __promisify__( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position?: number, + ): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writev}. + * @since v12.9.0 + * @param [position='null'] + * @return The number of bytes written. + */ + export function writevSync(fd: number, buffers: readonly NodeJS.ArrayBufferView[], position?: number): number; + /** + * Read from a file specified by `fd` and write to an array of `ArrayBufferView`s + * using `readv()`. + * + * `position` is the offset from the beginning of the file from where data + * should be read. If `typeof position !== 'number'`, the data will be read + * from the current position. + * + * The callback will be given three arguments: `err`, `bytesRead`, and `buffers`. `bytesRead` is how many bytes were read from the file. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffers` properties. + * @since v13.13.0, v12.17.0 + * @param [position='null'] + */ + export function readv( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export function readv( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export interface ReadVResult { + bytesRead: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace readv { + function __promisify__( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position?: number, + ): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readv}. + * @since v13.13.0, v12.17.0 + * @param [position='null'] + * @return The number of bytes read. + */ + export function readvSync(fd: number, buffers: readonly NodeJS.ArrayBufferView[], position?: number): number; + + export interface OpenAsBlobOptions { + /** + * An optional mime type for the blob. + * + * @default 'undefined' + */ + type?: string | undefined; + } + + /** + * Returns a `Blob` whose data is backed by the given file. + * + * The file must not be modified after the `Blob` is created. Any modifications + * will cause reading the `Blob` data to fail with a `DOMException` error. + * Synchronous stat operations on the file when the `Blob` is created, and before + * each read in order to detect whether the file data has been modified on disk. + * + * ```js + * import { openAsBlob } from 'node:fs'; + * + * const blob = await openAsBlob('the.file.txt'); + * const ab = await blob.arrayBuffer(); + * blob.stream(); + * ``` + * @since v19.8.0 + * @experimental + */ + export function openAsBlob(path: PathLike, options?: OpenAsBlobOptions): Promise; + + export interface OpenDirOptions { + /** + * @default 'utf8' + */ + encoding?: BufferEncoding | undefined; + /** + * Number of directory entries that are buffered + * internally when reading from the directory. Higher values lead to better + * performance but higher memory usage. + * @default 32 + */ + bufferSize?: number | undefined; + /** + * @default false + */ + recursive?: boolean; + } + /** + * Synchronously open a directory. See [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html). + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendirSync(path: PathLike, options?: OpenDirOptions): Dir; + /** + * Asynchronously open a directory. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for + * more details. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendir(path: PathLike, cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void): void; + export function opendir( + path: PathLike, + options: OpenDirOptions, + cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void, + ): void; + export namespace opendir { + function __promisify__(path: PathLike, options?: OpenDirOptions): Promise; + } + export interface BigIntStats extends StatsBase { + atimeNs: bigint; + mtimeNs: bigint; + ctimeNs: bigint; + birthtimeNs: bigint; + } + export interface BigIntOptions { + bigint: true; + } + export interface StatOptions { + bigint?: boolean | undefined; + } + export interface StatSyncOptions extends StatOptions { + throwIfNoEntry?: boolean | undefined; + } + interface CopyOptionsBase { + /** + * Dereference symlinks + * @default false + */ + dereference?: boolean; + /** + * When `force` is `false`, and the destination + * exists, throw an error. + * @default false + */ + errorOnExist?: boolean; + /** + * Overwrite existing file or directory. _The copy + * operation will ignore errors if you set this to false and the destination + * exists. Use the `errorOnExist` option to change this behavior. + * @default true + */ + force?: boolean; + /** + * Modifiers for copy operation. See `mode` flag of {@link copyFileSync()} + */ + mode?: number; + /** + * When `true` timestamps from `src` will + * be preserved. + * @default false + */ + preserveTimestamps?: boolean; + /** + * Copy directories recursively. + * @default false + */ + recursive?: boolean; + /** + * When true, path resolution for symlinks will be skipped + * @default false + */ + verbatimSymlinks?: boolean; + } + export interface CopyOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean | Promise; + } + export interface CopySyncOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean; + } + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cp( + source: string | URL, + destination: string | URL, + callback: (err: NodeJS.ErrnoException | null) => void, + ): void; + export function cp( + source: string | URL, + destination: string | URL, + opts: CopyOptions, + callback: (err: NodeJS.ErrnoException | null) => void, + ): void; + /** + * Synchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cpSync(source: string | URL, destination: string | URL, opts?: CopySyncOptions): void; + + interface GlobOptionsBase { + /** + * Current working directory. + * @default process.cwd() + */ + cwd?: string | undefined; + /** + * `true` if the glob should return paths as `Dirent`s, `false` otherwise. + * @default false + * @since v22.2.0 + */ + withFileTypes?: boolean | undefined; + /** + * Function to filter out files/directories. Return true to exclude the item, false to include it. + */ + exclude?: ((fileName: any) => boolean) | undefined; + } + export interface GlobOptionsWithFileTypes extends GlobOptionsBase { + exclude?: ((fileName: Dirent) => boolean) | undefined; + withFileTypes: true; + } + export interface GlobOptionsWithoutFileTypes extends GlobOptionsBase { + exclude?: ((fileName: string) => boolean) | undefined; + withFileTypes?: false | undefined; + } + export interface GlobOptions extends GlobOptionsBase { + exclude?: ((fileName: Dirent | string) => boolean) | undefined; + } + + /** + * Retrieves the files matching the specified pattern. + */ + export function glob( + pattern: string | string[], + callback: (err: NodeJS.ErrnoException | null, matches: string[]) => void, + ): void; + export function glob( + pattern: string | string[], + options: GlobOptionsWithFileTypes, + callback: ( + err: NodeJS.ErrnoException | null, + matches: Dirent[], + ) => void, + ): void; + export function glob( + pattern: string | string[], + options: GlobOptionsWithoutFileTypes, + callback: ( + err: NodeJS.ErrnoException | null, + matches: string[], + ) => void, + ): void; + export function glob( + pattern: string | string[], + options: GlobOptions, + callback: ( + err: NodeJS.ErrnoException | null, + matches: Dirent[] | string[], + ) => void, + ): void; + /** + * Retrieves the files matching the specified pattern. + */ + export function globSync(pattern: string | string[]): string[]; + export function globSync( + pattern: string | string[], + options: GlobOptionsWithFileTypes, + ): Dirent[]; + export function globSync( + pattern: string | string[], + options: GlobOptionsWithoutFileTypes, + ): string[]; + export function globSync( + pattern: string | string[], + options: GlobOptions, + ): Dirent[] | string[]; +} +declare module "node:fs" { + export * from "fs"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/fs/promises.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/fs/promises.d.ts new file mode 100644 index 0000000..6dc864a --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/fs/promises.d.ts @@ -0,0 +1,1275 @@ +/** + * The `fs/promises` API provides asynchronous file system methods that return + * promises. + * + * The promise APIs use the underlying Node.js threadpool to perform file + * system operations off the event loop thread. These operations are not + * synchronized or threadsafe. Care must be taken when performing multiple + * concurrent modifications on the same file or data corruption may occur. + * @since v10.0.0 + */ +declare module "fs/promises" { + import { Abortable } from "node:events"; + import { Stream } from "node:stream"; + import { ReadableStream } from "node:stream/web"; + import { + BigIntStats, + BigIntStatsFs, + BufferEncodingOption, + constants as fsConstants, + CopyOptions, + Dir, + Dirent, + GlobOptions, + GlobOptionsWithFileTypes, + GlobOptionsWithoutFileTypes, + MakeDirectoryOptions, + Mode, + ObjectEncodingOptions, + OpenDirOptions, + OpenMode, + PathLike, + ReadStream, + ReadVResult, + RmDirOptions, + RmOptions, + StatFsOptions, + StatOptions, + Stats, + StatsFs, + TimeLike, + WatchEventType, + WatchOptions, + WriteStream, + WriteVResult, + } from "node:fs"; + import { Interface as ReadlineInterface } from "node:readline"; + interface FileChangeInfo { + eventType: WatchEventType; + filename: T | null; + } + interface FlagAndOpenMode { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } + interface FileReadResult { + bytesRead: number; + buffer: T; + } + interface FileReadOptions { + /** + * @default `Buffer.alloc(0xffff)` + */ + buffer?: T; + /** + * @default 0 + */ + offset?: number | null; + /** + * @default `buffer.byteLength` + */ + length?: number | null; + position?: number | null; + } + interface CreateReadStreamOptions extends Abortable { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + end?: number | undefined; + highWaterMark?: number | undefined; + } + interface CreateWriteStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + highWaterMark?: number | undefined; + flush?: boolean | undefined; + } + interface ReadableWebStreamOptions { + /** + * Whether to open a normal or a `'bytes'` stream. + * @since v20.0.0 + */ + type?: "bytes" | undefined; + } + // TODO: Add `EventEmitter` close + interface FileHandle { + /** + * The numeric file descriptor managed by the {FileHandle} object. + * @since v10.0.0 + */ + readonly fd: number; + /** + * Alias of `filehandle.writeFile()`. + * + * When operating on file handles, the mode cannot be changed from what it was set + * to with `fsPromises.open()`. Therefore, this is equivalent to `filehandle.writeFile()`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + appendFile( + data: string | Uint8Array, + options?: + | (ObjectEncodingOptions & FlagAndOpenMode & { flush?: boolean | undefined }) + | BufferEncoding + | null, + ): Promise; + /** + * Changes the ownership of the file. A wrapper for [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html). + * @since v10.0.0 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + * @return Fulfills with `undefined` upon success. + */ + chown(uid: number, gid: number): Promise; + /** + * Modifies the permissions on the file. See [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html). + * @since v10.0.0 + * @param mode the file mode bit mask. + * @return Fulfills with `undefined` upon success. + */ + chmod(mode: Mode): Promise; + /** + * Unlike the 16 KiB default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 KiB. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `start` is + * omitted or `undefined`, `filehandle.createReadStream()` reads sequentially from + * the current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If the `FileHandle` points to a character device that only supports blocking + * reads (such as keyboard or sound card), read operations do not finish until data + * is available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const fd = await open('/dev/input/event0'); + * // Create a stream from some character device. + * const stream = fd.createReadStream(); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const fd = await open('sample.txt'); + * fd.createReadStream({ start: 90, end: 99 }); + * ``` + * @since v16.11.0 + */ + createReadStream(options?: CreateReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` `open` option to be set to `r+` rather than + * the default `r`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'` the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * @since v16.11.0 + */ + createWriteStream(options?: CreateWriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. + * + * Unlike `filehandle.sync` this method does not flush modified metadata. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + datasync(): Promise; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + sync(): Promise; + /** + * Reads data from the file and stores that in the given buffer. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * @since v10.0.0 + * @param buffer A buffer that will be filled with the file data read. + * @param offset The location in the buffer at which to start filling. + * @param length The number of bytes to read. + * @param position The location where to begin reading data from the file. If `null`, data will be read from the current file position, and the position will be updated. If `position` is an + * integer, the current file position will remain unchanged. + * @return Fulfills upon success with an object with two properties: + */ + read( + buffer: T, + offset?: number | null, + length?: number | null, + position?: number | null, + ): Promise>; + read( + buffer: T, + options?: FileReadOptions, + ): Promise>; + read(options?: FileReadOptions): Promise>; + /** + * Returns a `ReadableStream` that may be used to read the files data. + * + * An error will be thrown if this method is called more than once or is called + * after the `FileHandle` is closed or closing. + * + * ```js + * import { + * open, + * } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const chunk of file.readableWebStream()) + * console.log(chunk); + * + * await file.close(); + * ``` + * + * While the `ReadableStream` will read the file to completion, it will not + * close the `FileHandle` automatically. User code must still call the`fileHandle.close()` method. + * @since v17.0.0 + * @experimental + */ + readableWebStream(options?: ReadableWebStreamOptions): ReadableStream; + /** + * Asynchronously reads the entire contents of a file. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support reading. + * + * If one or more `filehandle.read()` calls are made on a file handle and then a `filehandle.readFile()` call is made, the data will be read from the current + * position till the end of the file. It doesn't always read from the beginning + * of the file. + * @since v10.0.0 + * @return Fulfills upon a successful read with the contents of the file. If no encoding is specified (using `options.encoding`), the data is returned as a {Buffer} object. Otherwise, the + * data will be a string. + */ + readFile( + options?: { + encoding?: null | undefined; + flag?: OpenMode | undefined; + } | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options: + | { + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options?: + | (ObjectEncodingOptions & { + flag?: OpenMode | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Convenience method to create a `readline` interface and stream over the file. + * See `filehandle.createReadStream()` for the options. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const line of file.readLines()) { + * console.log(line); + * } + * ``` + * @since v18.11.0 + */ + readLines(options?: CreateReadStreamOptions): ReadlineInterface; + /** + * @since v10.0.0 + * @return Fulfills with an {fs.Stats} for the file. + */ + stat( + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + stat( + opts: StatOptions & { + bigint: true; + }, + ): Promise; + stat(opts?: StatOptions): Promise; + /** + * Truncates the file. + * + * If the file was larger than `len` bytes, only the first `len` bytes will be + * retained in the file. + * + * The following example retains only the first four bytes of the file: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * let filehandle = null; + * try { + * filehandle = await open('temp.txt', 'r+'); + * await filehandle.truncate(4); + * } finally { + * await filehandle?.close(); + * } + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + truncate(len?: number): Promise; + /** + * Change the file system timestamps of the object referenced by the `FileHandle` then fulfills the promise with no arguments upon success. + * @since v10.0.0 + */ + utimes(atime: TimeLike, mtime: TimeLike): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists. `data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface), or an + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * The promise is fulfilled with no arguments upon success. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support writing. + * + * It is unsafe to use `filehandle.writeFile()` multiple times on the same file + * without waiting for the promise to be fulfilled (or rejected). + * + * If one or more `filehandle.write()` calls are made on a file handle and then a`filehandle.writeFile()` call is made, the data will be written from the + * current position till the end of the file. It doesn't always write from the + * beginning of the file. + * @since v10.0.0 + */ + writeFile( + data: string | Uint8Array, + options?: + | (ObjectEncodingOptions & FlagAndOpenMode & Abortable & { flush?: boolean | undefined }) + | BufferEncoding + | null, + ): Promise; + /** + * Write `buffer` to the file. + * + * The promise is fulfilled with an object containing two properties: + * + * It is unsafe to use `filehandle.write()` multiple times on the same file + * without waiting for the promise to be fulfilled (or rejected). For this + * scenario, use `filehandle.createWriteStream()`. + * + * On Linux, positional writes do not work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v10.0.0 + * @param offset The start position from within `buffer` where the data to write begins. + * @param [length=buffer.byteLength - offset] The number of bytes from `buffer` to write. + * @param [position='null'] The offset from the beginning of the file where the data from `buffer` should be written. If `position` is not a `number`, the data will be written at the current + * position. See the POSIX pwrite(2) documentation for more detail. + */ + write( + buffer: TBuffer, + offset?: number | null, + length?: number | null, + position?: number | null, + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + write( + buffer: TBuffer, + options?: { offset?: number; length?: number; position?: number }, + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + write( + data: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + /** + * Write an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s to the file. + * + * The promise is fulfilled with an object containing a two properties: + * + * It is unsafe to call `writev()` multiple times on the same file without waiting + * for the promise to be fulfilled (or rejected). + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param [position='null'] The offset from the beginning of the file where the data from `buffers` should be written. If `position` is not a `number`, the data will be written at the current + * position. + */ + writev(buffers: readonly NodeJS.ArrayBufferView[], position?: number): Promise; + /** + * Read from a file and write to an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s + * @since v13.13.0, v12.17.0 + * @param [position='null'] The offset from the beginning of the file where the data should be read from. If `position` is not a `number`, the data will be read from the current position. + * @return Fulfills upon success an object containing two properties: + */ + readv(buffers: readonly NodeJS.ArrayBufferView[], position?: number): Promise; + /** + * Closes the file handle after waiting for any pending operation on the handle to + * complete. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * let filehandle; + * try { + * filehandle = await open('thefile.txt', 'r'); + * } finally { + * await filehandle?.close(); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + close(): Promise; + /** + * An alias for {@link FileHandle.close()}. + * @since v20.4.0 + */ + [Symbol.asyncDispose](): Promise; + } + const constants: typeof fsConstants; + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK` or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`, `fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If the accessibility check is successful, the promise is fulfilled with no + * value. If any of the accessibility checks fail, the promise is rejected + * with an [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object. The following example checks if the file`/etc/passwd` can be read and + * written by the current process. + * + * ```js + * import { access, constants } from 'node:fs/promises'; + * + * try { + * await access('/etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can access'); + * } catch { + * console.error('cannot access'); + * } + * ``` + * + * Using `fsPromises.access()` to check for the accessibility of a file before + * calling `fsPromises.open()` is not recommended. Doing so introduces a race + * condition, since other processes may change the file's state between the two + * calls. Instead, user code should open/read/write the file directly and handle + * the error raised if the file is not accessible. + * @since v10.0.0 + * @param [mode=fs.constants.F_OK] + * @return Fulfills with `undefined` upon success. + */ + function access(path: PathLike, mode?: number): Promise; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. + * + * No guarantees are made about the atomicity of the copy operation. If an + * error occurs after the destination file has been opened for writing, an attempt + * will be made to remove the destination. + * + * ```js + * import { copyFile, constants } from 'node:fs/promises'; + * + * try { + * await copyFile('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.error('The file could not be copied'); + * } + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * try { + * await copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.error('The file could not be copied'); + * } + * ``` + * @since v10.0.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] Optional modifiers that specify the behavior of the copy operation. It is possible to create a mask consisting of the bitwise OR of two or more values (e.g. + * `fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`) + * @return Fulfills with `undefined` upon success. + */ + function copyFile(src: PathLike, dest: PathLike, mode?: number): Promise; + /** + * Opens a `FileHandle`. + * + * Refer to the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more detail. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * @since v10.0.0 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] Sets the file mode (permission and sticky bits) if the file is created. + * @return Fulfills with a {FileHandle} object. + */ + function open(path: PathLike, flags?: string | number, mode?: Mode): Promise; + /** + * Renames `oldPath` to `newPath`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rename(oldPath: PathLike, newPath: PathLike): Promise; + /** + * Truncates (shortens or extends the length) of the content at `path` to `len` bytes. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + function truncate(path: PathLike, len?: number): Promise; + /** + * Removes the directory identified by `path`. + * + * Using `fsPromises.rmdir()` on a file (not a directory) results in the + * promise being rejected with an `ENOENT` error on Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use `fsPromises.rm()` with options `{ recursive: true, force: true }`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rmdir(path: PathLike, options?: RmDirOptions): Promise; + /** + * Removes files and directories (modeled on the standard POSIX `rm` utility). + * @since v14.14.0 + * @return Fulfills with `undefined` upon success. + */ + function rm(path: PathLike, options?: RmOptions): Promise; + /** + * Asynchronously creates a directory. + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive` property indicating whether parent directories should be created. Calling `fsPromises.mkdir()` when `path` is a directory + * that exists results in a + * rejection only when `recursive` is false. + * + * ```js + * import { mkdir } from 'node:fs/promises'; + * + * try { + * const projectFolder = new URL('./test/project/', import.meta.url); + * const createDir = await mkdir(projectFolder, { recursive: true }); + * + * console.log(`created ${createDir}`); + * } catch (err) { + * console.error(err.message); + * } + * ``` + * @since v10.0.0 + * @return Upon success, fulfills with `undefined` if `recursive` is `false`, or the first directory path created if `recursive` is `true`. + */ + function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir(path: PathLike, options?: Mode | MakeDirectoryOptions | null): Promise; + /** + * Reads the contents of a directory. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames. If the `encoding` is set to `'buffer'`, the filenames returned + * will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the returned array will contain `fs.Dirent` objects. + * + * ```js + * import { readdir } from 'node:fs/promises'; + * + * try { + * const files = await readdir(path); + * for (const file of files) + * console.log(file); + * } catch (err) { + * console.error(err); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with an array of the names of the files in the directory excluding `'.'` and `'..'`. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Promise; + /** + * Reads the contents of the symbolic link referred to by `path`. See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more detail. The promise is + * fulfilled with the`linkString` upon success. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, the link path + * returned will be passed as a `Buffer` object. + * @since v10.0.0 + * @return Fulfills with the `linkString` upon success. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | string | null): Promise; + /** + * Creates a symbolic link. + * + * The `type` argument is only used on Windows platforms and can be one of `'dir'`, `'file'`, or `'junction'`. If the `type` argument is not a string, Node.js will + * autodetect `target` type and use `'file'` or `'dir'`. If the `target` does not + * exist, `'file'` will be used. Windows junction points require the destination + * path to be absolute. When using `'junction'`, the `target` argument will + * automatically be normalized to absolute path. Junction points on NTFS volumes + * can only point to directories. + * @since v10.0.0 + * @param [type='null'] + * @return Fulfills with `undefined` upon success. + */ + function symlink(target: PathLike, path: PathLike, type?: string | null): Promise; + /** + * Equivalent to `fsPromises.stat()` unless `path` refers to a symbolic link, + * in which case the link itself is stat-ed, not the file that it refers to. + * Refer to the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) document for more detail. + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given symbolic link `path`. + */ + function lstat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function lstat( + path: PathLike, + opts: StatOptions & { + bigint: true; + }, + ): Promise; + function lstat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given `path`. + */ + function stat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function stat( + path: PathLike, + opts: StatOptions & { + bigint: true; + }, + ): Promise; + function stat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v19.6.0, v18.15.0 + * @return Fulfills with the {fs.StatFs} object for the given `path`. + */ + function statfs( + path: PathLike, + opts?: StatFsOptions & { + bigint?: false | undefined; + }, + ): Promise; + function statfs( + path: PathLike, + opts: StatFsOptions & { + bigint: true; + }, + ): Promise; + function statfs(path: PathLike, opts?: StatFsOptions): Promise; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function link(existingPath: PathLike, newPath: PathLike): Promise; + /** + * If `path` refers to a symbolic link, then the link is removed without affecting + * the file or directory to which that link refers. If the `path` refers to a file + * path that is not a symbolic link, the file is deleted. See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function unlink(path: PathLike): Promise; + /** + * Changes the permissions of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the permissions on a symbolic link. + * + * This method is only implemented on macOS. + * @deprecated Since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the ownership on a symbolic link. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchown(path: PathLike, uid: number, gid: number): Promise; + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, with the difference that if the path refers to a + * symbolic link, then the link is not dereferenced: instead, the timestamps of + * the symbolic link itself are changed. + * @since v14.5.0, v12.19.0 + * @return Fulfills with `undefined` upon success. + */ + function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Changes the ownership of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chown(path: PathLike, uid: number, gid: number): Promise; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time, `Date`s, or a + * numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity`, or `-Infinity`, an `Error` will be thrown. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Determines the actual location of `path` using the same semantics as the `fs.realpath.native()` function. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path. If the `encoding` is set to `'buffer'`, the path returned will be + * passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v10.0.0 + * @return Fulfills with the resolved path upon success. + */ + function realpath(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath( + path: PathLike, + options?: ObjectEncodingOptions | BufferEncoding | null, + ): Promise; + /** + * Creates a unique temporary directory. A unique directory name is generated by + * appending six random characters to the end of the provided `prefix`. Due to + * platform inconsistencies, avoid trailing `X` characters in `prefix`. Some + * platforms, notably the BSDs, can return more than six random characters, and + * replace trailing `X` characters in `prefix` with random characters. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'node:fs/promises'; + * import { join } from 'node:path'; + * import { tmpdir } from 'node:os'; + * + * try { + * await mkdtemp(join(tmpdir(), 'foo-')); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * The `fsPromises.mkdtemp()` method will append the six randomly selected + * characters directly to the `prefix` string. For instance, given a directory `/tmp`, if the intention is to create a temporary directory _within_ `/tmp`, the `prefix` must end with a trailing + * platform-specific path separator + * (`import { sep } from 'node:path'`). + * @since v10.0.0 + * @return Fulfills with a string containing the file system path of the newly created temporary directory. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists. `data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface), or an + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * If `options` is a string, then it specifies the encoding. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * Any specified `FileHandle` has to support writing. + * + * It is unsafe to use `fsPromises.writeFile()` multiple times on the same file + * without waiting for the promise to be settled. + * + * Similarly to `fsPromises.readFile` \- `fsPromises.writeFile` is a convenience + * method that performs multiple `write` calls internally to write the buffer + * passed to it. For performance sensitive code consider using `fs.createWriteStream()` or `filehandle.createWriteStream()`. + * + * It is possible to use an `AbortSignal` to cancel an `fsPromises.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'node:fs/promises'; + * import { Buffer } from 'node:buffer'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * const promise = writeFile('message.txt', data, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v10.0.0 + * @param file filename or `FileHandle` + * @return Fulfills with `undefined` upon success. + */ + function writeFile( + file: PathLike | FileHandle, + data: + | string + | NodeJS.ArrayBufferView + | Iterable + | AsyncIterable + | Stream, + options?: + | (ObjectEncodingOptions & { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + /** + * If all data is successfully written to the file, and `flush` + * is `true`, `filehandle.sync()` is used to flush the data. + * @default false + */ + flush?: boolean | undefined; + } & Abortable) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * The `path` may be specified as a `FileHandle` that has been opened + * for appending (using `fsPromises.open()`). + * @since v10.0.0 + * @param path filename or {FileHandle} + * @return Fulfills with `undefined` upon success. + */ + function appendFile( + path: PathLike | FileHandle, + data: string | Uint8Array, + options?: (ObjectEncodingOptions & FlagAndOpenMode & { flush?: boolean | undefined }) | BufferEncoding | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * + * If no encoding is specified (using `options.encoding`), the data is returned + * as a `Buffer` object. Otherwise, the data will be a string. + * + * If `options` is a string, then it specifies the encoding. + * + * When the `path` is a directory, the behavior of `fsPromises.readFile()` is + * platform-specific. On macOS, Linux, and Windows, the promise will be rejected + * with an error. On FreeBSD, a representation of the directory's contents will be + * returned. + * + * An example of reading a `package.json` file located in the same directory of the + * running code: + * + * ```js + * import { readFile } from 'node:fs/promises'; + * try { + * const filePath = new URL('./package.json', import.meta.url); + * const contents = await readFile(filePath, { encoding: 'utf8' }); + * console.log(contents); + * } catch (err) { + * console.error(err.message); + * } + * ``` + * + * It is possible to abort an ongoing `readFile` using an `AbortSignal`. If a + * request is aborted the promise returned is rejected with an `AbortError`: + * + * ```js + * import { readFile } from 'node:fs/promises'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const promise = readFile(fileName, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * + * Any specified `FileHandle` has to support reading. + * @since v10.0.0 + * @param path filename or `FileHandle` + * @return Fulfills with the contents of the file. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ({ + encoding?: null | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options: + | ({ + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ( + & ObjectEncodingOptions + & Abortable + & { + flag?: OpenMode | undefined; + } + ) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronously open a directory for iterative scanning. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for more detail. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * + * Example using async iteration: + * + * ```js + * import { opendir } from 'node:fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + * @return Fulfills with an {fs.Dir}. + */ + function opendir(path: PathLike, options?: OpenDirOptions): Promise; + /** + * Returns an async iterator that watches for changes on `filename`, where `filename`is either a file or a directory. + * + * ```js + * import { watch } from 'node:fs/promises'; + * + * const ac = new AbortController(); + * const { signal } = ac; + * setTimeout(() => ac.abort(), 10000); + * + * (async () => { + * try { + * const watcher = watch(__filename, { signal }); + * for await (const event of watcher) + * console.log(event); + * } catch (err) { + * if (err.name === 'AbortError') + * return; + * throw err; + * } + * })(); + * ``` + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * All the `caveats` for `fs.watch()` also apply to `fsPromises.watch()`. + * @since v15.9.0, v14.18.0 + * @return of objects with the properties: + */ + function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: "buffer"; + }) + | "buffer", + ): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch(filename: PathLike, options?: WatchOptions | BufferEncoding): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch( + filename: PathLike, + options: WatchOptions | string, + ): AsyncIterable> | AsyncIterable>; + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + * @return Fulfills with `undefined` upon success. + */ + function cp(source: string | URL, destination: string | URL, opts?: CopyOptions): Promise; + /** + * Retrieves the files matching the specified pattern. + */ + function glob(pattern: string | string[]): NodeJS.AsyncIterator; + function glob( + pattern: string | string[], + opt: GlobOptionsWithFileTypes, + ): NodeJS.AsyncIterator; + function glob( + pattern: string | string[], + opt: GlobOptionsWithoutFileTypes, + ): NodeJS.AsyncIterator; + function glob( + pattern: string | string[], + opt: GlobOptions, + ): NodeJS.AsyncIterator; +} +declare module "node:fs/promises" { + export * from "fs/promises"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/globals.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/globals.d.ts new file mode 100644 index 0000000..e223aed --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/globals.d.ts @@ -0,0 +1,566 @@ +export {}; // Make this a module + +// #region Fetch and friends +// Conditional type aliases, used at the end of this file. +// Will either be empty if lib.dom (or lib.webworker) is included, or the undici version otherwise. +type _Request = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Request; +type _Response = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Response; +type _FormData = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").FormData; +type _Headers = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Headers; +type _MessageEvent = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").MessageEvent; +type _RequestInit = typeof globalThis extends { onmessage: any } ? {} + : import("undici-types").RequestInit; +type _ResponseInit = typeof globalThis extends { onmessage: any } ? {} + : import("undici-types").ResponseInit; +type _WebSocket = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").WebSocket; +type _EventSource = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").EventSource; +// #endregion Fetch and friends + +// Conditional type definitions for webstorage interface, which conflicts with lib.dom otherwise. +type _Storage = typeof globalThis extends { onabort: any } ? {} : { + /** + * Returns the number of key/value pairs. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Storage/length) + */ + readonly length: number; + /** + * Removes all key/value pairs, if there are any. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Storage/clear) + */ + clear(): void; + /** + * Returns the current value associated with the given key, or null if the given key does not exist. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Storage/getItem) + */ + getItem(key: string): string | null; + /** + * Returns the name of the nth key, or null if n is greater than or equal to the number of key/value pairs. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Storage/key) + */ + key(index: number): string | null; + /** + * Removes the key/value pair with the given key, if a key/value pair with the given key exists. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Storage/removeItem) + */ + removeItem(key: string): void; + /** + * Sets the value of the pair identified by key to value, creating a new key/value pair if none existed for key previously. + * + * Throws a "QuotaExceededError" DOMException exception if the new value couldn't be set. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Storage/setItem) + */ + setItem(key: string, value: string): void; + [key: string]: any; +}; + +// #region DOMException +type _DOMException = typeof globalThis extends { onmessage: any } ? {} : NodeDOMException; +interface NodeDOMException extends Error { + /** + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/code) + */ + readonly code: number; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/message) */ + readonly message: string; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/name) */ + readonly name: string; + readonly INDEX_SIZE_ERR: 1; + readonly DOMSTRING_SIZE_ERR: 2; + readonly HIERARCHY_REQUEST_ERR: 3; + readonly WRONG_DOCUMENT_ERR: 4; + readonly INVALID_CHARACTER_ERR: 5; + readonly NO_DATA_ALLOWED_ERR: 6; + readonly NO_MODIFICATION_ALLOWED_ERR: 7; + readonly NOT_FOUND_ERR: 8; + readonly NOT_SUPPORTED_ERR: 9; + readonly INUSE_ATTRIBUTE_ERR: 10; + readonly INVALID_STATE_ERR: 11; + readonly SYNTAX_ERR: 12; + readonly INVALID_MODIFICATION_ERR: 13; + readonly NAMESPACE_ERR: 14; + readonly INVALID_ACCESS_ERR: 15; + readonly VALIDATION_ERR: 16; + readonly TYPE_MISMATCH_ERR: 17; + readonly SECURITY_ERR: 18; + readonly NETWORK_ERR: 19; + readonly ABORT_ERR: 20; + readonly URL_MISMATCH_ERR: 21; + readonly QUOTA_EXCEEDED_ERR: 22; + readonly TIMEOUT_ERR: 23; + readonly INVALID_NODE_TYPE_ERR: 24; + readonly DATA_CLONE_ERR: 25; +} +interface NodeDOMExceptionConstructor { + prototype: DOMException; + new(message?: string, nameOrOptions?: string | { name?: string; cause?: unknown }): DOMException; + readonly INDEX_SIZE_ERR: 1; + readonly DOMSTRING_SIZE_ERR: 2; + readonly HIERARCHY_REQUEST_ERR: 3; + readonly WRONG_DOCUMENT_ERR: 4; + readonly INVALID_CHARACTER_ERR: 5; + readonly NO_DATA_ALLOWED_ERR: 6; + readonly NO_MODIFICATION_ALLOWED_ERR: 7; + readonly NOT_FOUND_ERR: 8; + readonly NOT_SUPPORTED_ERR: 9; + readonly INUSE_ATTRIBUTE_ERR: 10; + readonly INVALID_STATE_ERR: 11; + readonly SYNTAX_ERR: 12; + readonly INVALID_MODIFICATION_ERR: 13; + readonly NAMESPACE_ERR: 14; + readonly INVALID_ACCESS_ERR: 15; + readonly VALIDATION_ERR: 16; + readonly TYPE_MISMATCH_ERR: 17; + readonly SECURITY_ERR: 18; + readonly NETWORK_ERR: 19; + readonly ABORT_ERR: 20; + readonly URL_MISMATCH_ERR: 21; + readonly QUOTA_EXCEEDED_ERR: 22; + readonly TIMEOUT_ERR: 23; + readonly INVALID_NODE_TYPE_ERR: 24; + readonly DATA_CLONE_ERR: 25; +} +// #endregion DOMException + +declare global { + // Declare "static" methods in Error + interface ErrorConstructor { + /** Create .stack property on a target object */ + captureStackTrace(targetObject: object, constructorOpt?: Function): void; + + /** + * Optional override for formatting stack traces + * + * @see https://v8.dev/docs/stack-trace-api#customizing-stack-traces + */ + prepareStackTrace?: ((err: Error, stackTraces: NodeJS.CallSite[]) => any) | undefined; + + stackTraceLimit: number; + } + + /*-----------------------------------------------* + * * + * GLOBAL * + * * + ------------------------------------------------*/ + + // For backwards compability + interface NodeRequire extends NodeJS.Require {} + interface RequireResolve extends NodeJS.RequireResolve {} + interface NodeModule extends NodeJS.Module {} + + var global: typeof globalThis; + + var process: NodeJS.Process; + var console: Console; + + var __filename: string; + var __dirname: string; + + var require: NodeRequire; + var module: NodeModule; + + // Same as module.exports + var exports: any; + + interface GCFunction { + (options: { + execution?: "sync"; + flavor?: "regular" | "last-resort"; + type?: "major-snapshot" | "major" | "minor"; + filename?: string; + }): void; + (options: { + execution: "async"; + flavor?: "regular" | "last-resort"; + type?: "major-snapshot" | "major" | "minor"; + filename?: string; + }): Promise; + (options?: boolean): void; + } + + /** + * Only available if `--expose-gc` is passed to the process. + */ + var gc: undefined | GCFunction; + + // #region borrowed + // from https://github.com/microsoft/TypeScript/blob/38da7c600c83e7b31193a62495239a0fe478cb67/lib/lib.webworker.d.ts#L633 until moved to separate lib + /** A controller object that allows you to abort one or more DOM requests as and when desired. */ + interface AbortController { + /** + * Returns the AbortSignal object associated with this object. + */ + + readonly signal: AbortSignal; + /** + * Invoking this method will set this object's AbortSignal's aborted flag and signal to any observers that the associated activity is to be aborted. + */ + abort(reason?: any): void; + } + + /** A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ + interface AbortSignal extends EventTarget { + /** + * Returns true if this AbortSignal's AbortController has signaled to abort, and false otherwise. + */ + readonly aborted: boolean; + readonly reason: any; + onabort: null | ((this: AbortSignal, event: Event) => any); + throwIfAborted(): void; + } + + var AbortController: typeof globalThis extends { onmessage: any; AbortController: infer T } ? T + : { + prototype: AbortController; + new(): AbortController; + }; + + var AbortSignal: typeof globalThis extends { onmessage: any; AbortSignal: infer T } ? T + : { + prototype: AbortSignal; + new(): AbortSignal; + abort(reason?: any): AbortSignal; + timeout(milliseconds: number): AbortSignal; + any(signals: AbortSignal[]): AbortSignal; + }; + // #endregion borrowed + + // #region Storage + /** + * This Web Storage API interface provides access to a particular domain's session or local storage. It allows, for example, the addition, modification, or deletion of stored data items. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Storage) + */ + interface Storage extends _Storage {} + + // Conditional on `onabort` rather than `onmessage`, in order to exclude lib.webworker + var Storage: typeof globalThis extends { onabort: any; Storage: infer T } ? T + : { + prototype: Storage; + new(): Storage; + }; + + /** + * A browser-compatible implementation of [`localStorage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage). + * Data is stored unencrypted in the file specified by the `--localstorage-file` CLI flag. + * Any modification of this data outside of the Web Storage API is not supported. + * Enable this API with the `--experimental-webstorage` CLI flag. + * @since v22.4.0 + */ + var localStorage: Storage; + + /** + * A browser-compatible implementation of [`sessionStorage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/sessionStorage). + * Data is stored in memory, with a storage quota of 10 MB. + * Any modification of this data outside of the Web Storage API is not supported. + * Enable this API with the `--experimental-webstorage` CLI flag. + * @since v22.4.0 + */ + var sessionStorage: Storage; + // #endregion Storage + + /** + * @since v17.0.0 + * + * Creates a deep clone of an object. + */ + function structuredClone( + value: T, + transfer?: { transfer: ReadonlyArray }, + ): T; + + // #region DOMException + /** + * @since v17.0.0 + * An abnormal event (called an exception) which occurs as a result of calling a method or accessing a property of a web API. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException) + */ + interface DOMException extends _DOMException {} + + /** + * @since v17.0.0 + * + * The WHATWG `DOMException` class. See [DOMException](https://developer.mozilla.org/docs/Web/API/DOMException) for more details. + */ + var DOMException: typeof globalThis extends { onmessage: any; DOMException: infer T } ? T + : NodeDOMExceptionConstructor; + // #endregion DOMException + + /*----------------------------------------------* + * * + * GLOBAL INTERFACES * + * * + *-----------------------------------------------*/ + namespace NodeJS { + interface CallSite { + /** + * Value of "this" + */ + getThis(): unknown; + + /** + * Type of "this" as a string. + * This is the name of the function stored in the constructor field of + * "this", if available. Otherwise the object's [[Class]] internal + * property. + */ + getTypeName(): string | null; + + /** + * Current function + */ + getFunction(): Function | undefined; + + /** + * Name of the current function, typically its name property. + * If a name property is not available an attempt will be made to try + * to infer a name from the function's context. + */ + getFunctionName(): string | null; + + /** + * Name of the property [of "this" or one of its prototypes] that holds + * the current function + */ + getMethodName(): string | null; + + /** + * Name of the script [if this function was defined in a script] + */ + getFileName(): string | undefined; + + /** + * Current line number [if this function was defined in a script] + */ + getLineNumber(): number | null; + + /** + * Current column number [if this function was defined in a script] + */ + getColumnNumber(): number | null; + + /** + * A call site object representing the location where eval was called + * [if this function was created using a call to eval] + */ + getEvalOrigin(): string | undefined; + + /** + * Is this a toplevel invocation, that is, is "this" the global object? + */ + isToplevel(): boolean; + + /** + * Does this call take place in code defined by a call to eval? + */ + isEval(): boolean; + + /** + * Is this call in native V8 code? + */ + isNative(): boolean; + + /** + * Is this a constructor call? + */ + isConstructor(): boolean; + + /** + * is this an async call (i.e. await, Promise.all(), or Promise.any())? + */ + isAsync(): boolean; + + /** + * is this an async call to Promise.all()? + */ + isPromiseAll(): boolean; + + /** + * returns the index of the promise element that was followed in + * Promise.all() or Promise.any() for async stack traces, or null + * if the CallSite is not an async + */ + getPromiseIndex(): number | null; + + getScriptNameOrSourceURL(): string; + getScriptHash(): string; + + getEnclosingColumnNumber(): number; + getEnclosingLineNumber(): number; + getPosition(): number; + + toString(): string; + } + + interface ErrnoException extends Error { + errno?: number | undefined; + code?: string | undefined; + path?: string | undefined; + syscall?: string | undefined; + } + + interface ReadableStream extends EventEmitter { + readable: boolean; + read(size?: number): string | Buffer; + setEncoding(encoding: BufferEncoding): this; + pause(): this; + resume(): this; + isPaused(): boolean; + pipe(destination: T, options?: { end?: boolean | undefined }): T; + unpipe(destination?: WritableStream): this; + unshift(chunk: string | Uint8Array, encoding?: BufferEncoding): void; + wrap(oldStream: ReadableStream): this; + [Symbol.asyncIterator](): NodeJS.AsyncIterator; + } + + interface WritableStream extends EventEmitter { + writable: boolean; + write(buffer: Uint8Array | string, cb?: (err?: Error | null) => void): boolean; + write(str: string, encoding?: BufferEncoding, cb?: (err?: Error | null) => void): boolean; + end(cb?: () => void): this; + end(data: string | Uint8Array, cb?: () => void): this; + end(str: string, encoding?: BufferEncoding, cb?: () => void): this; + } + + interface ReadWriteStream extends ReadableStream, WritableStream {} + + interface RefCounted { + ref(): this; + unref(): this; + } + + interface Require { + (id: string): any; + resolve: RequireResolve; + cache: Dict; + /** + * @deprecated + */ + extensions: RequireExtensions; + main: Module | undefined; + } + + interface RequireResolve { + (id: string, options?: { paths?: string[] | undefined }): string; + paths(request: string): string[] | null; + } + + interface RequireExtensions extends Dict<(m: Module, filename: string) => any> { + ".js": (m: Module, filename: string) => any; + ".json": (m: Module, filename: string) => any; + ".node": (m: Module, filename: string) => any; + } + interface Module { + /** + * `true` if the module is running during the Node.js preload + */ + isPreloading: boolean; + exports: any; + require: Require; + id: string; + filename: string; + loaded: boolean; + /** @deprecated since v14.6.0 Please use `require.main` and `module.children` instead. */ + parent: Module | null | undefined; + children: Module[]; + /** + * @since v11.14.0 + * + * The directory name of the module. This is usually the same as the path.dirname() of the module.id. + */ + path: string; + paths: string[]; + } + + interface Dict { + [key: string]: T | undefined; + } + + interface ReadOnlyDict { + readonly [key: string]: T | undefined; + } + + /** An iterable iterator returned by the Node.js API. */ + // Default TReturn/TNext in v22 is `any`, for compatibility with the previously-used IterableIterator. + // TODO: In next major @types/node version, change default TReturn to undefined. + interface Iterator extends IteratorObject { + [Symbol.iterator](): NodeJS.Iterator; + } + + /** An async iterable iterator returned by the Node.js API. */ + // Default TReturn/TNext in v22 is `any`, for compatibility with the previously-used AsyncIterableIterator. + // TODO: In next major @types/node version, change default TReturn to undefined. + interface AsyncIterator extends AsyncIteratorObject { + [Symbol.asyncIterator](): NodeJS.AsyncIterator; + } + } + + interface RequestInit extends _RequestInit {} + + function fetch( + input: string | URL | globalThis.Request, + init?: RequestInit, + ): Promise; + + interface Request extends _Request {} + var Request: typeof globalThis extends { + onmessage: any; + Request: infer T; + } ? T + : typeof import("undici-types").Request; + + interface ResponseInit extends _ResponseInit {} + + interface Response extends _Response {} + var Response: typeof globalThis extends { + onmessage: any; + Response: infer T; + } ? T + : typeof import("undici-types").Response; + + interface FormData extends _FormData {} + var FormData: typeof globalThis extends { + onmessage: any; + FormData: infer T; + } ? T + : typeof import("undici-types").FormData; + + interface Headers extends _Headers {} + var Headers: typeof globalThis extends { + onmessage: any; + Headers: infer T; + } ? T + : typeof import("undici-types").Headers; + + interface MessageEvent extends _MessageEvent {} + /** + * @since v15.0.0 + */ + var MessageEvent: typeof globalThis extends { + onmessage: any; + MessageEvent: infer T; + } ? T + : typeof import("undici-types").MessageEvent; + + interface WebSocket extends _WebSocket {} + var WebSocket: typeof globalThis extends { onmessage: any; WebSocket: infer T } ? T + : typeof import("undici-types").WebSocket; + + interface EventSource extends _EventSource {} + /** + * Only available through the [--experimental-eventsource](https://nodejs.org/api/cli.html#--experimental-eventsource) flag. + * + * @since v22.3.0 + */ + var EventSource: typeof globalThis extends { onmessage: any; EventSource: infer T } ? T + : typeof import("undici-types").EventSource; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/globals.typedarray.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/globals.typedarray.d.ts new file mode 100644 index 0000000..0c7280c --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/globals.typedarray.d.ts @@ -0,0 +1,21 @@ +export {}; // Make this a module + +declare global { + namespace NodeJS { + type TypedArray = + | Uint8Array + | Uint8ClampedArray + | Uint16Array + | Uint32Array + | Int8Array + | Int16Array + | Int32Array + | BigUint64Array + | BigInt64Array + | Float32Array + | Float64Array; + type ArrayBufferView = + | TypedArray + | DataView; + } +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/http.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/http.d.ts new file mode 100644 index 0000000..3fff62d --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/http.d.ts @@ -0,0 +1,1958 @@ +/** + * To use the HTTP server and client one must import the `node:http` module. + * + * The HTTP interfaces in Node.js are designed to support many features + * of the protocol which have been traditionally difficult to use. + * In particular, large, possibly chunk-encoded, messages. The interface is + * careful to never buffer entire requests or responses, so the + * user is able to stream data. + * + * HTTP message headers are represented by an object like this: + * + * ```json + * { "content-length": "123", + * "content-type": "text/plain", + * "connection": "keep-alive", + * "host": "example.com", + * "accept": "*" } + * ``` + * + * Keys are lowercased. Values are not modified. + * + * In order to support the full spectrum of possible HTTP applications, the Node.js + * HTTP API is very low-level. It deals with stream handling and message + * parsing only. It parses a message into headers and body but it does not + * parse the actual headers or the body. + * + * See `message.headers` for details on how duplicate headers are handled. + * + * The raw headers as they were received are retained in the `rawHeaders` property, which is an array of `[key, value, key2, value2, ...]`. For + * example, the previous message header object might have a `rawHeaders` list like the following: + * + * ```js + * [ 'ConTent-Length', '123456', + * 'content-LENGTH', '123', + * 'content-type', 'text/plain', + * 'CONNECTION', 'keep-alive', + * 'Host', 'example.com', + * 'accepT', '*' ] + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/http.js) + */ +declare module "http" { + import * as stream from "node:stream"; + import { URL } from "node:url"; + import { LookupOptions } from "node:dns"; + import { EventEmitter } from "node:events"; + import { LookupFunction, Server as NetServer, Socket, TcpSocketConnectOpts } from "node:net"; + // incoming headers will never contain number + interface IncomingHttpHeaders extends NodeJS.Dict { + accept?: string | undefined; + "accept-language"?: string | undefined; + "accept-patch"?: string | undefined; + "accept-ranges"?: string | undefined; + "access-control-allow-credentials"?: string | undefined; + "access-control-allow-headers"?: string | undefined; + "access-control-allow-methods"?: string | undefined; + "access-control-allow-origin"?: string | undefined; + "access-control-expose-headers"?: string | undefined; + "access-control-max-age"?: string | undefined; + "access-control-request-headers"?: string | undefined; + "access-control-request-method"?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + "alt-svc"?: string | undefined; + authorization?: string | undefined; + "cache-control"?: string | undefined; + connection?: string | undefined; + "content-disposition"?: string | undefined; + "content-encoding"?: string | undefined; + "content-language"?: string | undefined; + "content-length"?: string | undefined; + "content-location"?: string | undefined; + "content-range"?: string | undefined; + "content-type"?: string | undefined; + cookie?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + "if-match"?: string | undefined; + "if-modified-since"?: string | undefined; + "if-none-match"?: string | undefined; + "if-unmodified-since"?: string | undefined; + "last-modified"?: string | undefined; + location?: string | undefined; + origin?: string | undefined; + pragma?: string | undefined; + "proxy-authenticate"?: string | undefined; + "proxy-authorization"?: string | undefined; + "public-key-pins"?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + "retry-after"?: string | undefined; + "sec-websocket-accept"?: string | undefined; + "sec-websocket-extensions"?: string | undefined; + "sec-websocket-key"?: string | undefined; + "sec-websocket-protocol"?: string | undefined; + "sec-websocket-version"?: string | undefined; + "set-cookie"?: string[] | undefined; + "strict-transport-security"?: string | undefined; + tk?: string | undefined; + trailer?: string | undefined; + "transfer-encoding"?: string | undefined; + upgrade?: string | undefined; + "user-agent"?: string | undefined; + vary?: string | undefined; + via?: string | undefined; + warning?: string | undefined; + "www-authenticate"?: string | undefined; + } + // outgoing headers allows numbers (as they are converted internally to strings) + type OutgoingHttpHeader = number | string | string[]; + interface OutgoingHttpHeaders extends NodeJS.Dict { + accept?: string | string[] | undefined; + "accept-charset"?: string | string[] | undefined; + "accept-encoding"?: string | string[] | undefined; + "accept-language"?: string | string[] | undefined; + "accept-ranges"?: string | undefined; + "access-control-allow-credentials"?: string | undefined; + "access-control-allow-headers"?: string | undefined; + "access-control-allow-methods"?: string | undefined; + "access-control-allow-origin"?: string | undefined; + "access-control-expose-headers"?: string | undefined; + "access-control-max-age"?: string | undefined; + "access-control-request-headers"?: string | undefined; + "access-control-request-method"?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + authorization?: string | undefined; + "cache-control"?: string | undefined; + "cdn-cache-control"?: string | undefined; + connection?: string | string[] | undefined; + "content-disposition"?: string | undefined; + "content-encoding"?: string | undefined; + "content-language"?: string | undefined; + "content-length"?: string | number | undefined; + "content-location"?: string | undefined; + "content-range"?: string | undefined; + "content-security-policy"?: string | undefined; + "content-security-policy-report-only"?: string | undefined; + "content-type"?: string | undefined; + cookie?: string | string[] | undefined; + dav?: string | string[] | undefined; + dnt?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + "if-match"?: string | undefined; + "if-modified-since"?: string | undefined; + "if-none-match"?: string | undefined; + "if-range"?: string | undefined; + "if-unmodified-since"?: string | undefined; + "last-modified"?: string | undefined; + link?: string | string[] | undefined; + location?: string | undefined; + "max-forwards"?: string | undefined; + origin?: string | undefined; + pragma?: string | string[] | undefined; + "proxy-authenticate"?: string | string[] | undefined; + "proxy-authorization"?: string | undefined; + "public-key-pins"?: string | undefined; + "public-key-pins-report-only"?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + "referrer-policy"?: string | undefined; + refresh?: string | undefined; + "retry-after"?: string | undefined; + "sec-websocket-accept"?: string | undefined; + "sec-websocket-extensions"?: string | string[] | undefined; + "sec-websocket-key"?: string | undefined; + "sec-websocket-protocol"?: string | string[] | undefined; + "sec-websocket-version"?: string | undefined; + server?: string | undefined; + "set-cookie"?: string | string[] | undefined; + "strict-transport-security"?: string | undefined; + te?: string | undefined; + trailer?: string | undefined; + "transfer-encoding"?: string | undefined; + "user-agent"?: string | undefined; + upgrade?: string | undefined; + "upgrade-insecure-requests"?: string | undefined; + vary?: string | undefined; + via?: string | string[] | undefined; + warning?: string | undefined; + "www-authenticate"?: string | string[] | undefined; + "x-content-type-options"?: string | undefined; + "x-dns-prefetch-control"?: string | undefined; + "x-frame-options"?: string | undefined; + "x-xss-protection"?: string | undefined; + } + interface ClientRequestArgs { + _defaultAgent?: Agent | undefined; + agent?: Agent | boolean | undefined; + auth?: string | null | undefined; + createConnection?: + | (( + options: ClientRequestArgs, + oncreate: (err: Error | null, socket: stream.Duplex) => void, + ) => stream.Duplex | null | undefined) + | undefined; + defaultPort?: number | string | undefined; + family?: number | undefined; + headers?: OutgoingHttpHeaders | undefined; + hints?: LookupOptions["hints"]; + host?: string | null | undefined; + hostname?: string | null | undefined; + insecureHTTPParser?: boolean | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + lookup?: LookupFunction | undefined; + /** + * @default 16384 + */ + maxHeaderSize?: number | undefined; + method?: string | undefined; + path?: string | null | undefined; + port?: number | string | null | undefined; + protocol?: string | null | undefined; + setHost?: boolean | undefined; + signal?: AbortSignal | undefined; + socketPath?: string | undefined; + timeout?: number | undefined; + uniqueHeaders?: Array | undefined; + joinDuplicateHeaders?: boolean; + } + interface ServerOptions< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse> = typeof ServerResponse, + > { + /** + * Specifies the `IncomingMessage` class to be used. Useful for extending the original `IncomingMessage`. + */ + IncomingMessage?: Request | undefined; + /** + * Specifies the `ServerResponse` class to be used. Useful for extending the original `ServerResponse`. + */ + ServerResponse?: Response | undefined; + /** + * Sets the timeout value in milliseconds for receiving the entire request from the client. + * @see Server.requestTimeout for more information. + * @default 300000 + * @since v18.0.0 + */ + requestTimeout?: number | undefined; + /** + * It joins the field line values of multiple headers in a request with `, ` instead of discarding the duplicates. + * @default false + * @since v18.14.0 + */ + joinDuplicateHeaders?: boolean; + /** + * The number of milliseconds of inactivity a server needs to wait for additional incoming data, + * after it has finished writing the last response, before a socket will be destroyed. + * @see Server.keepAliveTimeout for more information. + * @default 5000 + * @since v18.0.0 + */ + keepAliveTimeout?: number | undefined; + /** + * Sets the interval value in milliseconds to check for request and headers timeout in incomplete requests. + * @default 30000 + */ + connectionsCheckingInterval?: number | undefined; + /** + * Optionally overrides all `socket`s' `readableHighWaterMark` and `writableHighWaterMark`. + * This affects `highWaterMark` property of both `IncomingMessage` and `ServerResponse`. + * Default: @see stream.getDefaultHighWaterMark(). + * @since v20.1.0 + */ + highWaterMark?: number | undefined; + /** + * Use an insecure HTTP parser that accepts invalid HTTP headers when `true`. + * Using the insecure parser should be avoided. + * See --insecure-http-parser for more information. + * @default false + */ + insecureHTTPParser?: boolean | undefined; + /** + * Optionally overrides the value of `--max-http-header-size` for requests received by + * this server, i.e. the maximum length of request headers in bytes. + * @default 16384 + * @since v13.3.0 + */ + maxHeaderSize?: number | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default true + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + /** + * A list of response headers that should be sent only once. + * If the header's value is an array, the items will be joined using `; `. + */ + uniqueHeaders?: Array | undefined; + } + type RequestListener< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse> = typeof ServerResponse, + > = (req: InstanceType, res: InstanceType & { req: InstanceType }) => void; + /** + * @since v0.1.17 + */ + class Server< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse> = typeof ServerResponse, + > extends NetServer { + constructor(requestListener?: RequestListener); + constructor(options: ServerOptions, requestListener?: RequestListener); + /** + * Sets the timeout value for sockets, and emits a `'timeout'` event on + * the Server object, passing the socket as an argument, if a timeout + * occurs. + * + * If there is a `'timeout'` event listener on the Server object, then it + * will be called with the timed-out socket as an argument. + * + * By default, the Server does not timeout sockets. However, if a callback + * is assigned to the Server's `'timeout'` event, timeouts must be handled + * explicitly. + * @since v0.9.12 + * @param [msecs=0 (no timeout)] + */ + setTimeout(msecs?: number, callback?: () => void): this; + setTimeout(callback: () => void): this; + /** + * Limits maximum incoming headers count. If set to 0, no limit will be applied. + * @since v0.7.0 + */ + maxHeadersCount: number | null; + /** + * The maximum number of requests socket can handle + * before closing keep alive connection. + * + * A value of `0` will disable the limit. + * + * When the limit is reached it will set the `Connection` header value to `close`, + * but will not actually close the connection, subsequent requests sent + * after the limit is reached will get `503 Service Unavailable` as a response. + * @since v16.10.0 + */ + maxRequestsPerSocket: number | null; + /** + * The number of milliseconds of inactivity before a socket is presumed + * to have timed out. + * + * A value of `0` will disable the timeout behavior on incoming connections. + * + * The socket timeout logic is set up on connection, so changing this + * value only affects new connections to the server, not any existing connections. + * @since v0.9.12 + */ + timeout: number; + /** + * Limit the amount of time the parser will wait to receive the complete HTTP + * headers. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v11.3.0, v10.14.0 + */ + headersTimeout: number; + /** + * The number of milliseconds of inactivity a server needs to wait for additional + * incoming data, after it has finished writing the last response, before a socket + * will be destroyed. If the server receives new data before the keep-alive + * timeout has fired, it will reset the regular inactivity timeout, i.e., `server.timeout`. + * + * A value of `0` will disable the keep-alive timeout behavior on incoming + * connections. + * A value of `0` makes the http server behave similarly to Node.js versions prior + * to 8.0.0, which did not have a keep-alive timeout. + * + * The socket timeout logic is set up on connection, so changing this value only + * affects new connections to the server, not any existing connections. + * @since v8.0.0 + */ + keepAliveTimeout: number; + /** + * Sets the timeout value in milliseconds for receiving the entire request from + * the client. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v14.11.0 + */ + requestTimeout: number; + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request + * or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Socket) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "checkContinue", listener: RequestListener): this; + addListener(event: "checkExpectation", listener: RequestListener): this; + addListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + addListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + addListener(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + addListener(event: "request", listener: RequestListener): this; + addListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Socket): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit( + event: "checkContinue", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit( + event: "checkExpectation", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: "clientError", err: Error, socket: stream.Duplex): boolean; + emit(event: "connect", req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + emit(event: "dropRequest", req: InstanceType, socket: stream.Duplex): boolean; + emit( + event: "request", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: "upgrade", req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Socket) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "checkContinue", listener: RequestListener): this; + on(event: "checkExpectation", listener: RequestListener): this; + on(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + on(event: "connect", listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + on(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + on(event: "request", listener: RequestListener): this; + on(event: "upgrade", listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Socket) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "checkContinue", listener: RequestListener): this; + once(event: "checkExpectation", listener: RequestListener): this; + once(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + once( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + once(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + once(event: "request", listener: RequestListener): this; + once( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Socket) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "checkContinue", listener: RequestListener): this; + prependListener(event: "checkExpectation", listener: RequestListener): this; + prependListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + prependListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener( + event: "dropRequest", + listener: (req: InstanceType, socket: stream.Duplex) => void, + ): this; + prependListener(event: "request", listener: RequestListener): this; + prependListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "checkContinue", listener: RequestListener): this; + prependOnceListener(event: "checkExpectation", listener: RequestListener): this; + prependOnceListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + prependOnceListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener( + event: "dropRequest", + listener: (req: InstanceType, socket: stream.Duplex) => void, + ): this; + prependOnceListener(event: "request", listener: RequestListener): this; + prependOnceListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + } + /** + * This class serves as the parent class of {@link ClientRequest} and {@link ServerResponse}. It is an abstract outgoing message from + * the perspective of the participants of an HTTP transaction. + * @since v0.1.17 + */ + class OutgoingMessage extends stream.Writable { + readonly req: Request; + chunkedEncoding: boolean; + shouldKeepAlive: boolean; + useChunkedEncodingByDefault: boolean; + sendDate: boolean; + /** + * @deprecated Use `writableEnded` instead. + */ + finished: boolean; + /** + * Read-only. `true` if the headers were sent, otherwise `false`. + * @since v0.9.3 + */ + readonly headersSent: boolean; + /** + * Alias of `outgoingMessage.socket`. + * @since v0.3.0 + * @deprecated Since v15.12.0,v14.17.1 - Use `socket` instead. + */ + readonly connection: Socket | null; + /** + * Reference to the underlying socket. Usually, users will not want to access + * this property. + * + * After calling `outgoingMessage.end()`, this property will be nulled. + * @since v0.3.0 + */ + readonly socket: Socket | null; + constructor(); + /** + * Once a socket is associated with the message and is connected, `socket.setTimeout()` will be called with `msecs` as the first parameter. + * @since v0.9.12 + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `timeout` event. + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * Sets a single header value. If the header already exists in the to-be-sent + * headers, its value will be replaced. Use an array of strings to send multiple + * headers with the same name. + * @since v0.4.0 + * @param name Header name + * @param value Header value + */ + setHeader(name: string, value: number | string | readonly string[]): this; + /** + * Sets multiple header values for implicit headers. headers must be an instance of + * `Headers` or `Map`, if a header already exists in the to-be-sent headers, its + * value will be replaced. + * + * ```js + * const headers = new Headers({ foo: 'bar' }); + * outgoingMessage.setHeaders(headers); + * ``` + * + * or + * + * ```js + * const headers = new Map([['foo', 'bar']]); + * outgoingMessage.setHeaders(headers); + * ``` + * + * When headers have been set with `outgoingMessage.setHeaders()`, they will be + * merged with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http.createServer((req, res) => { + * const headers = new Headers({ 'Content-Type': 'text/html' }); + * res.setHeaders(headers); + * res.writeHead(200, { 'Content-Type': 'text/plain' }); + * res.end('ok'); + * }); + * ``` + * + * @since v19.6.0, v18.15.0 + * @param name Header name + * @param value Header value + */ + setHeaders(headers: Headers | Map): this; + /** + * Append a single header value to the header object. + * + * If the value is an array, this is equivalent to calling this method multiple + * times. + * + * If there were no previous values for the header, this is equivalent to calling `outgoingMessage.setHeader(name, value)`. + * + * Depending of the value of `options.uniqueHeaders` when the client request or the + * server were created, this will end up in the header being sent multiple times or + * a single time with values joined using `; `. + * @since v18.3.0, v16.17.0 + * @param name Header name + * @param value Header value + */ + appendHeader(name: string, value: string | readonly string[]): this; + /** + * Gets the value of the HTTP header with the given name. If that header is not + * set, the returned value will be `undefined`. + * @since v0.4.0 + * @param name Name of header + */ + getHeader(name: string): number | string | string[] | undefined; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow + * copy is used, array values may be mutated without additional calls to + * various header-related HTTP module methods. The keys of the returned + * object are the header names and the values are the respective header + * values. All header names are lowercase. + * + * The object returned by the `outgoingMessage.getHeaders()` method does + * not prototypically inherit from the JavaScript `Object`. This means that + * typical `Object` methods such as `obj.toString()`, `obj.hasOwnProperty()`, + * and others are not defined and will not work. + * + * ```js + * outgoingMessage.setHeader('Foo', 'bar'); + * outgoingMessage.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = outgoingMessage.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v7.7.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All names are lowercase. + * @since v7.7.0 + */ + getHeaderNames(): string[]; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name is case-insensitive. + * + * ```js + * const hasContentType = outgoingMessage.hasHeader('content-type'); + * ``` + * @since v7.7.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that is queued for implicit sending. + * + * ```js + * outgoingMessage.removeHeader('Content-Encoding'); + * ``` + * @since v0.4.0 + * @param name Header name + */ + removeHeader(name: string): void; + /** + * Adds HTTP trailers (headers but at the end of the message) to the message. + * + * Trailers will **only** be emitted if the message is chunked encoded. If not, + * the trailers will be silently discarded. + * + * HTTP requires the `Trailer` header to be sent to emit trailers, + * with a list of header field names in its value, e.g. + * + * ```js + * message.writeHead(200, { 'Content-Type': 'text/plain', + * 'Trailer': 'Content-MD5' }); + * message.write(fileData); + * message.addTrailers({ 'Content-MD5': '7895bf4b8828b55ceaf47747b4bca667' }); + * message.end(); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v0.3.0 + */ + addTrailers(headers: OutgoingHttpHeaders | ReadonlyArray<[string, string]>): void; + /** + * Flushes the message headers. + * + * For efficiency reason, Node.js normally buffers the message headers + * until `outgoingMessage.end()` is called or the first chunk of message data + * is written. It then tries to pack the headers and data into a single TCP + * packet. + * + * It is usually desired (it saves a TCP round-trip), but not when the first + * data is not sent until possibly much later. `outgoingMessage.flushHeaders()` bypasses the optimization and kickstarts the message. + * @since v1.6.0 + */ + flushHeaders(): void; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v0.1.17 + */ + class ServerResponse extends OutgoingMessage { + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v0.4.0 + */ + statusCode: number; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status message that will be sent to the client when + * the headers get flushed. If this is left as `undefined` then the standard + * message for the status code will be used. + * + * ```js + * response.statusMessage = 'Not found'; + * ``` + * + * After response header was sent to the client, this property indicates the + * status message which was sent out. + * @since v0.11.8 + */ + statusMessage: string; + /** + * If set to `true`, Node.js will check whether the `Content-Length` header value and the size of the body, in bytes, are equal. + * Mismatching the `Content-Length` header value will result + * in an `Error` being thrown, identified by `code:``'ERR_HTTP_CONTENT_LENGTH_MISMATCH'`. + * @since v18.10.0, v16.18.0 + */ + strictContentLength: boolean; + constructor(req: Request); + assignSocket(socket: Socket): void; + detachSocket(socket: Socket): void; + /** + * Sends an HTTP/1.1 100 Continue message to the client, indicating that + * the request body should be sent. See the `'checkContinue'` event on `Server`. + * @since v0.3.0 + */ + writeContinue(callback?: () => void): void; + /** + * Sends an HTTP/1.1 103 Early Hints message to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. The optional `callback` argument will be called when + * the response message has been written. + * + * **Example** + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * 'x-trace-id': 'id for diagnostics', + * }); + * + * const earlyHintsCallback = () => console.log('early hints message sent'); + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * }, earlyHintsCallback); + * ``` + * @since v18.11.0 + * @param hints An object containing the values of headers + * @param callback Will be called when the response message has been written + */ + writeEarlyHints(hints: Record, callback?: () => void): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * Optionally one can give a human-readable `statusMessage` as the second + * argument. + * + * `headers` may be an `Array` where the keys and values are in the same list. + * It is _not_ a list of tuples. So, the even-numbered offsets are key values, + * and the odd-numbered offsets are the associated values. The array is in the same + * format as `request.rawHeaders`. + * + * Returns a reference to the `ServerResponse`, so that calls can be chained. + * + * ```js + * const body = 'hello world'; + * response + * .writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain', + * }) + * .end(body); + * ``` + * + * This method must only be called once on a message and it must + * be called before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * If this method is called and `response.setHeader()` has not been called, + * it will directly write the supplied header values onto the network channel + * without caching internally, and the `response.getHeader()` on the header + * will not yield the expected result. If progressive population of headers is + * desired with potential future retrieval and modification, use `response.setHeader()` instead. + * + * ```js + * // Returns content-type = text/plain + * const server = http.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain' }); + * res.end('ok'); + * }); + * ``` + * + * `Content-Length` is read in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. Node.js + * will check whether `Content-Length` and the length of the body which has + * been transmitted are equal or not. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a \[`Error`\]\[\] being thrown. + * @since v0.1.30 + */ + writeHead( + statusCode: number, + statusMessage?: string, + headers?: OutgoingHttpHeaders | OutgoingHttpHeader[], + ): this; + writeHead(statusCode: number, headers?: OutgoingHttpHeaders | OutgoingHttpHeader[]): this; + /** + * Sends a HTTP/1.1 102 Processing message to the client, indicating that + * the request body should be sent. + * @since v10.0.0 + */ + writeProcessing(): void; + } + interface InformationEvent { + statusCode: number; + statusMessage: string; + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + headers: IncomingHttpHeaders; + rawHeaders: string[]; + } + /** + * This object is created internally and returned from {@link request}. It + * represents an _in-progress_ request whose header has already been queued. The + * header is still mutable using the `setHeader(name, value)`, `getHeader(name)`, `removeHeader(name)` API. The actual header will + * be sent along with the first data chunk or when calling `request.end()`. + * + * To get the response, add a listener for `'response'` to the request object. `'response'` will be emitted from the request object when the response + * headers have been received. The `'response'` event is executed with one + * argument which is an instance of {@link IncomingMessage}. + * + * During the `'response'` event, one can add listeners to the + * response object; particularly to listen for the `'data'` event. + * + * If no `'response'` handler is added, then the response will be + * entirely discarded. However, if a `'response'` event handler is added, + * then the data from the response object **must** be consumed, either by + * calling `response.read()` whenever there is a `'readable'` event, or + * by adding a `'data'` handler, or by calling the `.resume()` method. + * Until the data is consumed, the `'end'` event will not fire. Also, until + * the data is read it will consume memory that can eventually lead to a + * 'process out of memory' error. + * + * For backward compatibility, `res` will only emit `'error'` if there is an `'error'` listener registered. + * + * Set `Content-Length` header to limit the response body size. + * If `response.strictContentLength` is set to `true`, mismatching the `Content-Length` header value will result in an `Error` being thrown, + * identified by `code:``'ERR_HTTP_CONTENT_LENGTH_MISMATCH'`. + * + * `Content-Length` value should be in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. + * @since v0.1.17 + */ + class ClientRequest extends OutgoingMessage { + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v0.11.14 + * @deprecated Since v17.0.0, v16.12.0 - Check `destroyed` instead. + */ + aborted: boolean; + /** + * The request host. + * @since v14.5.0, v12.19.0 + */ + host: string; + /** + * The request protocol. + * @since v14.5.0, v12.19.0 + */ + protocol: string; + /** + * When sending request through a keep-alive enabled agent, the underlying socket + * might be reused. But if server closes connection at unfortunate time, client + * may run into a 'ECONNRESET' error. + * + * ```js + * import http from 'node:http'; + * + * // Server has a 5 seconds keep-alive timeout by default + * http + * .createServer((req, res) => { + * res.write('hello\n'); + * res.end(); + * }) + * .listen(3000); + * + * setInterval(() => { + * // Adapting a keep-alive agent + * http.get('http://localhost:3000', { agent }, (res) => { + * res.on('data', (data) => { + * // Do nothing + * }); + * }); + * }, 5000); // Sending request on 5s interval so it's easy to hit idle timeout + * ``` + * + * By marking a request whether it reused socket or not, we can do + * automatic error retry base on it. + * + * ```js + * import http from 'node:http'; + * const agent = new http.Agent({ keepAlive: true }); + * + * function retriableRequest() { + * const req = http + * .get('http://localhost:3000', { agent }, (res) => { + * // ... + * }) + * .on('error', (err) => { + * // Check if retry is needed + * if (req.reusedSocket && err.code === 'ECONNRESET') { + * retriableRequest(); + * } + * }); + * } + * + * retriableRequest(); + * ``` + * @since v13.0.0, v12.16.0 + */ + reusedSocket: boolean; + /** + * Limits maximum response headers count. If set to 0, no limit will be applied. + */ + maxHeadersCount: number; + constructor(url: string | URL | ClientRequestArgs, cb?: (res: IncomingMessage) => void); + /** + * The request method. + * @since v0.1.97 + */ + method: string; + /** + * The request path. + * @since v0.4.0 + */ + path: string; + /** + * Marks the request as aborting. Calling this will cause remaining data + * in the response to be dropped and the socket to be destroyed. + * @since v0.3.8 + * @deprecated Since v14.1.0,v13.14.0 - Use `destroy` instead. + */ + abort(): void; + onSocket(socket: Socket): void; + /** + * Once a socket is assigned to this request and is connected `socket.setTimeout()` will be called. + * @since v0.5.9 + * @param timeout Milliseconds before a request times out. + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `'timeout'` event. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Once a socket is assigned to this request and is connected `socket.setNoDelay()` will be called. + * @since v0.5.9 + */ + setNoDelay(noDelay?: boolean): void; + /** + * Once a socket is assigned to this request and is connected `socket.setKeepAlive()` will be called. + * @since v0.5.9 + */ + setSocketKeepAlive(enable?: boolean, initialDelay?: number): void; + /** + * Returns an array containing the unique names of the current outgoing raw + * headers. Header names are returned with their exact casing being set. + * + * ```js + * request.setHeader('Foo', 'bar'); + * request.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = request.getRawHeaderNames(); + * // headerNames === ['Foo', 'Set-Cookie'] + * ``` + * @since v15.13.0, v14.17.0 + */ + getRawHeaderNames(): string[]; + /** + * @deprecated + */ + addListener(event: "abort", listener: () => void): this; + addListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: "continue", listener: () => void): this; + addListener(event: "information", listener: (info: InformationEvent) => void): this; + addListener(event: "response", listener: (response: IncomingMessage) => void): this; + addListener(event: "socket", listener: (socket: Socket) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + on(event: "abort", listener: () => void): this; + on(event: "connect", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: "continue", listener: () => void): this; + on(event: "information", listener: (info: InformationEvent) => void): this; + on(event: "response", listener: (response: IncomingMessage) => void): this; + on(event: "socket", listener: (socket: Socket) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: "upgrade", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + once(event: "abort", listener: () => void): this; + once(event: "connect", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: "continue", listener: () => void): this; + once(event: "information", listener: (info: InformationEvent) => void): this; + once(event: "response", listener: (response: IncomingMessage) => void): this; + once(event: "socket", listener: (socket: Socket) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: "upgrade", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependListener(event: "abort", listener: () => void): this; + prependListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: "continue", listener: () => void): this; + prependListener(event: "information", listener: (info: InformationEvent) => void): this; + prependListener(event: "response", listener: (response: IncomingMessage) => void): this; + prependListener(event: "socket", listener: (socket: Socket) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependOnceListener(event: "abort", listener: () => void): this; + prependOnceListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: "continue", listener: () => void): this; + prependOnceListener(event: "information", listener: (info: InformationEvent) => void): this; + prependOnceListener(event: "response", listener: (response: IncomingMessage) => void): this; + prependOnceListener(event: "socket", listener: (socket: Socket) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * An `IncomingMessage` object is created by {@link Server} or {@link ClientRequest} and passed as the first argument to the `'request'` and `'response'` event respectively. It may be used to + * access response + * status, headers, and data. + * + * Different from its `socket` value which is a subclass of `stream.Duplex`, the `IncomingMessage` itself extends `stream.Readable` and is created separately to + * parse and emit the incoming HTTP headers and payload, as the underlying socket + * may be reused multiple times in case of keep-alive. + * @since v0.1.17 + */ + class IncomingMessage extends stream.Readable { + constructor(socket: Socket); + /** + * The `message.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + * @deprecated Since v17.0.0,v16.12.0 - Check `message.destroyed` from stream.Readable. + */ + aborted: boolean; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. + * Probably either `'1.1'` or `'1.0'`. + * + * Also `message.httpVersionMajor` is the first integer and `message.httpVersionMinor` is the second. + * @since v0.1.1 + */ + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + /** + * The `message.complete` property will be `true` if a complete HTTP message has + * been received and successfully parsed. + * + * This property is particularly useful as a means of determining if a client or + * server fully transmitted a message before a connection was terminated: + * + * ```js + * const req = http.request({ + * host: '127.0.0.1', + * port: 8080, + * method: 'POST', + * }, (res) => { + * res.resume(); + * res.on('end', () => { + * if (!res.complete) + * console.error( + * 'The connection was terminated while the message was still being sent'); + * }); + * }); + * ``` + * @since v0.3.0 + */ + complete: boolean; + /** + * Alias for `message.socket`. + * @since v0.1.90 + * @deprecated Since v16.0.0 - Use `socket`. + */ + connection: Socket; + /** + * The `net.Socket` object associated with the connection. + * + * With HTTPS support, use `request.socket.getPeerCertificate()` to obtain the + * client's authentication details. + * + * This property is guaranteed to be an instance of the `net.Socket` class, + * a subclass of `stream.Duplex`, unless the user specified a socket + * type other than `net.Socket` or internally nulled. + * @since v0.3.0 + */ + socket: Socket; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * Duplicates in raw headers are handled in the following ways, depending on the + * header name: + * + * * Duplicates of `age`, `authorization`, `content-length`, `content-type`, `etag`, `expires`, `from`, `host`, `if-modified-since`, `if-unmodified-since`, `last-modified`, `location`, + * `max-forwards`, `proxy-authorization`, `referer`, `retry-after`, `server`, or `user-agent` are discarded. + * To allow duplicate values of the headers listed above to be joined, + * use the option `joinDuplicateHeaders` in {@link request} and {@link createServer}. See RFC 9110 Section 5.3 for more + * information. + * * `set-cookie` is always an array. Duplicates are added to the array. + * * For duplicate `cookie` headers, the values are joined together with `; `. + * * For all other headers, the values are joined together with `, `. + * @since v0.1.5 + */ + headers: IncomingHttpHeaders; + /** + * Similar to `message.headers`, but there is no join logic and the values are + * always arrays of strings, even for headers received just once. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': ['curl/7.22.0'], + * // host: ['127.0.0.1:8000'], + * // accept: ['*'] } + * console.log(request.headersDistinct); + * ``` + * @since v18.3.0, v16.17.0 + */ + headersDistinct: NodeJS.Dict; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v0.11.6 + */ + rawHeaders: string[]; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v0.3.0 + */ + trailers: NodeJS.Dict; + /** + * Similar to `message.trailers`, but there is no join logic and the values are + * always arrays of strings, even for headers received just once. + * Only populated at the `'end'` event. + * @since v18.3.0, v16.17.0 + */ + trailersDistinct: NodeJS.Dict; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v0.11.6 + */ + rawTrailers: string[]; + /** + * Calls `message.socket.setTimeout(msecs, callback)`. + * @since v0.5.9 + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * **Only valid for request obtained from {@link Server}.** + * + * The request method as a string. Read only. Examples: `'GET'`, `'DELETE'`. + * @since v0.1.1 + */ + method?: string | undefined; + /** + * **Only valid for request obtained from {@link Server}.** + * + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. Take the following request: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * To parse the URL into its parts: + * + * ```js + * new URL(`http://${process.env.HOST ?? 'localhost'}${request.url}`); + * ``` + * + * When `request.url` is `'/status?name=ryan'` and `process.env.HOST` is undefined: + * + * ```console + * $ node + * > new URL(`http://${process.env.HOST ?? 'localhost'}${request.url}`); + * URL { + * href: 'http://localhost/status?name=ryan', + * origin: 'http://localhost', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'localhost', + * hostname: 'localhost', + * port: '', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * + * Ensure that you set `process.env.HOST` to the server's host name, or consider replacing this part entirely. If using `req.headers.host`, ensure proper + * validation is used, as clients may specify a custom `Host` header. + * @since v0.1.90 + */ + url?: string | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The 3-digit HTTP response status code. E.G. `404`. + * @since v0.1.1 + */ + statusCode?: number | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The HTTP response status message (reason phrase). E.G. `OK` or `Internal Server Error`. + * @since v0.11.10 + */ + statusMessage?: string | undefined; + /** + * Calls `destroy()` on the socket that received the `IncomingMessage`. If `error` is provided, an `'error'` event is emitted on the socket and `error` is passed + * as an argument to any listeners on the event. + * @since v0.3.0 + */ + destroy(error?: Error): this; + } + interface AgentOptions extends Partial { + /** + * Keep sockets around in a pool to be used by other requests in the future. Default = false + */ + keepAlive?: boolean | undefined; + /** + * When using HTTP KeepAlive, how often to send TCP KeepAlive packets over sockets being kept alive. Default = 1000. + * Only relevant if keepAlive is set to true. + */ + keepAliveMsecs?: number | undefined; + /** + * Maximum number of sockets to allow per host. Default for Node 0.10 is 5, default for Node 0.12 is Infinity + */ + maxSockets?: number | undefined; + /** + * Maximum number of sockets allowed for all hosts in total. Each request will use a new socket until the maximum is reached. Default: Infinity. + */ + maxTotalSockets?: number | undefined; + /** + * Maximum number of sockets to leave open in a free state. Only relevant if keepAlive is set to true. Default = 256. + */ + maxFreeSockets?: number | undefined; + /** + * Socket timeout in milliseconds. This will set the timeout after the socket is connected. + */ + timeout?: number | undefined; + /** + * Scheduling strategy to apply when picking the next free socket to use. + * @default `lifo` + */ + scheduling?: "fifo" | "lifo" | undefined; + } + /** + * An `Agent` is responsible for managing connection persistence + * and reuse for HTTP clients. It maintains a queue of pending requests + * for a given host and port, reusing a single socket connection for each + * until the queue is empty, at which time the socket is either destroyed + * or put into a pool where it is kept to be used again for requests to the + * same host and port. Whether it is destroyed or pooled depends on the `keepAlive` `option`. + * + * Pooled connections have TCP Keep-Alive enabled for them, but servers may + * still close idle connections, in which case they will be removed from the + * pool and a new connection will be made when a new HTTP request is made for + * that host and port. Servers may also refuse to allow multiple requests + * over the same connection, in which case the connection will have to be + * remade for every request and cannot be pooled. The `Agent` will still make + * the requests to that server, but each one will occur over a new connection. + * + * When a connection is closed by the client or the server, it is removed + * from the pool. Any unused sockets in the pool will be unrefed so as not + * to keep the Node.js process running when there are no outstanding requests. + * (see `socket.unref()`). + * + * It is good practice, to `destroy()` an `Agent` instance when it is no + * longer in use, because unused sockets consume OS resources. + * + * Sockets are removed from an agent when the socket emits either + * a `'close'` event or an `'agentRemove'` event. When intending to keep one + * HTTP request open for a long time without keeping it in the agent, something + * like the following may be done: + * + * ```js + * http.get(options, (res) => { + * // Do stuff + * }).on('socket', (socket) => { + * socket.emit('agentRemove'); + * }); + * ``` + * + * An agent may also be used for an individual request. By providing `{agent: false}` as an option to the `http.get()` or `http.request()` functions, a one-time use `Agent` with default options + * will be used + * for the client connection. + * + * `agent:false`: + * + * ```js + * http.get({ + * hostname: 'localhost', + * port: 80, + * path: '/', + * agent: false, // Create a new agent just for this one request + * }, (res) => { + * // Do stuff with response + * }); + * ``` + * + * `options` in [`socket.connect()`](https://nodejs.org/docs/latest-v22.x/api/net.html#socketconnectoptions-connectlistener) are also supported. + * + * To configure any of them, a custom {@link Agent} instance must be created. + * + * ```js + * import http from 'node:http'; + * const keepAliveAgent = new http.Agent({ keepAlive: true }); + * options.agent = keepAliveAgent; + * http.request(options, onResponseCallback) + * ``` + * @since v0.3.4 + */ + class Agent extends EventEmitter { + /** + * By default set to 256. For agents with `keepAlive` enabled, this + * sets the maximum number of sockets that will be left open in the free + * state. + * @since v0.11.7 + */ + maxFreeSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open per origin. Origin is the returned value of `agent.getName()`. + * @since v0.3.6 + */ + maxSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open. Unlike `maxSockets`, this parameter applies across all origins. + * @since v14.5.0, v12.19.0 + */ + maxTotalSockets: number; + /** + * An object which contains arrays of sockets currently awaiting use by + * the agent when `keepAlive` is enabled. Do not modify. + * + * Sockets in the `freeSockets` list will be automatically destroyed and + * removed from the array on `'timeout'`. + * @since v0.11.4 + */ + readonly freeSockets: NodeJS.ReadOnlyDict; + /** + * An object which contains arrays of sockets currently in use by the + * agent. Do not modify. + * @since v0.3.6 + */ + readonly sockets: NodeJS.ReadOnlyDict; + /** + * An object which contains queues of requests that have not yet been assigned to + * sockets. Do not modify. + * @since v0.5.9 + */ + readonly requests: NodeJS.ReadOnlyDict; + constructor(opts?: AgentOptions); + /** + * Destroy any sockets that are currently in use by the agent. + * + * It is usually not necessary to do this. However, if using an + * agent with `keepAlive` enabled, then it is best to explicitly shut down + * the agent when it is no longer needed. Otherwise, + * sockets might stay open for quite a long time before the server + * terminates them. + * @since v0.11.4 + */ + destroy(): void; + } + const METHODS: string[]; + const STATUS_CODES: { + [errorCode: number]: string | undefined; + [errorCode: string]: string | undefined; + }; + /** + * Returns a new instance of {@link Server}. + * + * The `requestListener` is a function which is automatically + * added to the `'request'` event. + * + * ```js + * import http from 'node:http'; + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * + * ```js + * import http from 'node:http'; + * + * // Create a local server to receive data from + * const server = http.createServer(); + * + * // Listen to the request event + * server.on('request', (request, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.1.13 + */ + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse> = typeof ServerResponse, + >(requestListener?: RequestListener): Server; + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse> = typeof ServerResponse, + >( + options: ServerOptions, + requestListener?: RequestListener, + ): Server; + // although RequestOptions are passed as ClientRequestArgs to ClientRequest directly, + // create interface RequestOptions would make the naming more clear to developers + interface RequestOptions extends ClientRequestArgs {} + /** + * `options` in `socket.connect()` are also supported. + * + * Node.js maintains several connections per server to make HTTP requests. + * This function allows one to transparently issue requests. + * + * `url` can be a string or a `URL` object. If `url` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * If both `url` and `options` are specified, the objects are merged, with the `options` properties taking precedence. + * + * The optional `callback` parameter will be added as a one-time listener for + * the `'response'` event. + * + * `http.request()` returns an instance of the {@link ClientRequest} class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * import http from 'node:http'; + * import { Buffer } from 'node:buffer'; + * + * const postData = JSON.stringify({ + * 'msg': 'Hello World!', + * }); + * + * const options = { + * hostname: 'www.google.com', + * port: 80, + * path: '/upload', + * method: 'POST', + * headers: { + * 'Content-Type': 'application/json', + * 'Content-Length': Buffer.byteLength(postData), + * }, + * }; + * + * const req = http.request(options, (res) => { + * console.log(`STATUS: ${res.statusCode}`); + * console.log(`HEADERS: ${JSON.stringify(res.headers)}`); + * res.setEncoding('utf8'); + * res.on('data', (chunk) => { + * console.log(`BODY: ${chunk}`); + * }); + * res.on('end', () => { + * console.log('No more data in response.'); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(`problem with request: ${e.message}`); + * }); + * + * // Write data to request body + * req.write(postData); + * req.end(); + * ``` + * + * In the example `req.end()` was called. With `http.request()` one + * must always call `req.end()` to signify the end of the request - + * even if there is no data being written to the request body. + * + * If any error is encountered during the request (be that with DNS resolution, + * TCP level errors, or actual HTTP parse errors) an `'error'` event is emitted + * on the returned request object. As with all `'error'` events, if no listeners + * are registered the error will be thrown. + * + * There are a few special headers that should be noted. + * + * * Sending a 'Connection: keep-alive' will notify Node.js that the connection to + * the server should be persisted until the next request. + * * Sending a 'Content-Length' header will disable the default chunked encoding. + * * Sending an 'Expect' header will immediately send the request headers. + * Usually, when sending 'Expect: 100-continue', both a timeout and a listener + * for the `'continue'` event should be set. See RFC 2616 Section 8.2.3 for more + * information. + * * Sending an Authorization header will override using the `auth` option + * to compute basic authentication. + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('http://abc:xyz@example.com'); + * + * const req = http.request(options, (res) => { + * // ... + * }); + * ``` + * + * In a successful request, the following events will be emitted in the following + * order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * (`'data'` will not be emitted at all if the response body is empty, for + * instance, in most redirects) + * * `'end'` on the `res` object + * * `'close'` + * + * In the case of a connection error, the following events will be emitted: + * + * * `'socket'` + * * `'error'` + * * `'close'` + * + * In the case of a premature connection close before the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'error'` with an error with message `'Error: socket hang up'` and code `'ECONNRESET'` + * * `'close'` + * + * In the case of a premature connection close after the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (connection closed here) + * * `'aborted'` on the `res` object + * * `'close'` + * * `'error'` on the `res` object with an error with message `'Error: aborted'` and code `'ECONNRESET'` + * * `'close'` on the `res` object + * + * If `req.destroy()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code `'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * + * If `req.destroy()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code `'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * + * If `req.destroy()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.destroy()` called here) + * * `'aborted'` on the `res` object + * * `'close'` + * * `'error'` on the `res` object with an error with message `'Error: aborted'` and code `'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` on the `res` object + * + * If `req.abort()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.abort()` called here) + * * `'abort'` + * * `'close'` + * + * If `req.abort()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.abort()` called here) + * * `'abort'` + * * `'error'` with an error with message `'Error: socket hang up'` and code `'ECONNRESET'` + * * `'close'` + * + * If `req.abort()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.abort()` called here) + * * `'abort'` + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message `'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * Setting the `timeout` option or using the `setTimeout()` function will + * not abort the request or do anything besides add a `'timeout'` event. + * + * Passing an `AbortSignal` and then calling `abort()` on the corresponding `AbortController` will behave the same way as calling `.destroy()` on the + * request. Specifically, the `'error'` event will be emitted with an error with + * the message `'AbortError: The operation was aborted'`, the code `'ABORT_ERR'` and the `cause`, if one was provided. + * @since v0.3.6 + */ + function request(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: IncomingMessage) => void, + ): ClientRequest; + /** + * Since most requests are GET requests without bodies, Node.js provides this + * convenience method. The only difference between this method and {@link request} is that it sets the method to GET by default and calls `req.end()` automatically. The callback must take care to + * consume the response + * data for reasons stated in {@link ClientRequest} section. + * + * The `callback` is invoked with a single argument that is an instance of {@link IncomingMessage}. + * + * JSON fetching example: + * + * ```js + * http.get('http://localhost:8000/', (res) => { + * const { statusCode } = res; + * const contentType = res.headers['content-type']; + * + * let error; + * // Any 2xx status code signals a successful response but + * // here we're only checking for 200. + * if (statusCode !== 200) { + * error = new Error('Request Failed.\n' + + * `Status Code: ${statusCode}`); + * } else if (!/^application\/json/.test(contentType)) { + * error = new Error('Invalid content-type.\n' + + * `Expected application/json but received ${contentType}`); + * } + * if (error) { + * console.error(error.message); + * // Consume response data to free up memory + * res.resume(); + * return; + * } + * + * res.setEncoding('utf8'); + * let rawData = ''; + * res.on('data', (chunk) => { rawData += chunk; }); + * res.on('end', () => { + * try { + * const parsedData = JSON.parse(rawData); + * console.log(parsedData); + * } catch (e) { + * console.error(e.message); + * } + * }); + * }).on('error', (e) => { + * console.error(`Got error: ${e.message}`); + * }); + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the method set to GET by default. + */ + function get(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function get(url: string | URL, options: RequestOptions, callback?: (res: IncomingMessage) => void): ClientRequest; + /** + * Performs the low-level validations on the provided `name` that are done when `res.setHeader(name, value)` is called. + * + * Passing illegal value as `name` will result in a `TypeError` being thrown, + * identified by `code: 'ERR_INVALID_HTTP_TOKEN'`. + * + * It is not necessary to use this method before passing headers to an HTTP request + * or response. The HTTP module will automatically validate such headers. + * + * Example: + * + * ```js + * import { validateHeaderName } from 'node:http'; + * + * try { + * validateHeaderName(''); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code); // --> 'ERR_INVALID_HTTP_TOKEN' + * console.error(err.message); // --> 'Header name must be a valid HTTP token [""]' + * } + * ``` + * @since v14.3.0 + * @param [label='Header name'] Label for error message. + */ + function validateHeaderName(name: string): void; + /** + * Performs the low-level validations on the provided `value` that are done when `res.setHeader(name, value)` is called. + * + * Passing illegal value as `value` will result in a `TypeError` being thrown. + * + * * Undefined value error is identified by `code: 'ERR_HTTP_INVALID_HEADER_VALUE'`. + * * Invalid value character error is identified by `code: 'ERR_INVALID_CHAR'`. + * + * It is not necessary to use this method before passing headers to an HTTP request + * or response. The HTTP module will automatically validate such headers. + * + * Examples: + * + * ```js + * import { validateHeaderValue } from 'node:http'; + * + * try { + * validateHeaderValue('x-my-header', undefined); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code === 'ERR_HTTP_INVALID_HEADER_VALUE'); // --> true + * console.error(err.message); // --> 'Invalid value "undefined" for header "x-my-header"' + * } + * + * try { + * validateHeaderValue('x-my-header', 'oʊmɪɡə'); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code === 'ERR_INVALID_CHAR'); // --> true + * console.error(err.message); // --> 'Invalid character in header content ["x-my-header"]' + * } + * ``` + * @since v14.3.0 + * @param name Header name + * @param value Header value + */ + function validateHeaderValue(name: string, value: string): void; + /** + * Set the maximum number of idle HTTP parsers. + * @since v18.8.0, v16.18.0 + * @param [max=1000] + */ + function setMaxIdleHTTPParsers(max: number): void; + /** + * Global instance of `Agent` which is used as the default for all HTTP client + * requests. Diverges from a default `Agent` configuration by having `keepAlive` + * enabled and a `timeout` of 5 seconds. + * @since v0.5.9 + */ + let globalAgent: Agent; + /** + * Read-only property specifying the maximum allowed size of HTTP headers in bytes. + * Defaults to 16KB. Configurable using the `--max-http-header-size` CLI option. + */ + const maxHeaderSize: number; + /** + * A browser-compatible implementation of [WebSocket](https://nodejs.org/docs/latest/api/http.html#websocket). + * @since v22.5.0 + */ + const WebSocket: import("undici-types").WebSocket; + /** + * @since v22.5.0 + */ + const CloseEvent: import("undici-types").CloseEvent; + /** + * @since v22.5.0 + */ + const MessageEvent: import("undici-types").MessageEvent; +} +declare module "node:http" { + export * from "http"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/http2.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/http2.d.ts new file mode 100644 index 0000000..b74e1a6 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/http2.d.ts @@ -0,0 +1,2558 @@ +/** + * The `node:http2` module provides an implementation of the [HTTP/2](https://tools.ietf.org/html/rfc7540) protocol. + * It can be accessed using: + * + * ```js + * import http2 from 'node:http2'; + * ``` + * @since v8.4.0 + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/http2.js) + */ +declare module "http2" { + import EventEmitter = require("node:events"); + import * as fs from "node:fs"; + import * as net from "node:net"; + import * as stream from "node:stream"; + import * as tls from "node:tls"; + import * as url from "node:url"; + import { + IncomingHttpHeaders as Http1IncomingHttpHeaders, + IncomingMessage, + OutgoingHttpHeaders, + ServerResponse, + } from "node:http"; + export { OutgoingHttpHeaders } from "node:http"; + export interface IncomingHttpStatusHeader { + ":status"?: number | undefined; + } + export interface IncomingHttpHeaders extends Http1IncomingHttpHeaders { + ":path"?: string | undefined; + ":method"?: string | undefined; + ":authority"?: string | undefined; + ":scheme"?: string | undefined; + } + // Http2Stream + export interface StreamPriorityOptions { + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + silent?: boolean | undefined; + } + export interface StreamState { + localWindowSize?: number | undefined; + state?: number | undefined; + localClose?: number | undefined; + remoteClose?: number | undefined; + sumDependencyWeight?: number | undefined; + weight?: number | undefined; + } + export interface ServerStreamResponseOptions { + endStream?: boolean | undefined; + waitForTrailers?: boolean | undefined; + } + export interface StatOptions { + offset: number; + length: number; + } + export interface ServerStreamFileResponseOptions { + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + statCheck?(stats: fs.Stats, headers: OutgoingHttpHeaders, statOptions: StatOptions): void | boolean; + waitForTrailers?: boolean | undefined; + offset?: number | undefined; + length?: number | undefined; + } + export interface ServerStreamFileResponseOptionsWithError extends ServerStreamFileResponseOptions { + onError?(err: NodeJS.ErrnoException): void; + } + export interface Http2Stream extends stream.Duplex { + /** + * Set to `true` if the `Http2Stream` instance was aborted abnormally. When set, + * the `'aborted'` event will have been emitted. + * @since v8.4.0 + */ + readonly aborted: boolean; + /** + * This property shows the number of characters currently buffered to be written. + * See `net.Socket.bufferSize` for details. + * @since v11.2.0, v10.16.0 + */ + readonly bufferSize: number; + /** + * Set to `true` if the `Http2Stream` instance has been closed. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Set to `true` if the `Http2Stream` instance has been destroyed and is no longer + * usable. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Set to `true` if the `END_STREAM` flag was set in the request or response + * HEADERS frame received, indicating that no additional data should be received + * and the readable side of the `Http2Stream` will be closed. + * @since v10.11.0 + */ + readonly endAfterHeaders: boolean; + /** + * The numeric stream identifier of this `Http2Stream` instance. Set to `undefined` if the stream identifier has not yet been assigned. + * @since v8.4.0 + */ + readonly id?: number | undefined; + /** + * Set to `true` if the `Http2Stream` instance has not yet been assigned a + * numeric stream identifier. + * @since v9.4.0 + */ + readonly pending: boolean; + /** + * Set to the `RST_STREAM` `error code` reported when the `Http2Stream` is + * destroyed after either receiving an `RST_STREAM` frame from the connected peer, + * calling `http2stream.close()`, or `http2stream.destroy()`. Will be `undefined` if the `Http2Stream` has not been closed. + * @since v8.4.0 + */ + readonly rstCode: number; + /** + * An object containing the outbound headers sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentHeaders: OutgoingHttpHeaders; + /** + * An array of objects containing the outbound informational (additional) headers + * sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentInfoHeaders?: OutgoingHttpHeaders[] | undefined; + /** + * An object containing the outbound trailers sent for this `HttpStream`. + * @since v9.5.0 + */ + readonly sentTrailers?: OutgoingHttpHeaders | undefined; + /** + * A reference to the `Http2Session` instance that owns this `Http2Stream`. The + * value will be `undefined` after the `Http2Stream` instance is destroyed. + * @since v8.4.0 + */ + readonly session: Http2Session | undefined; + /** + * Provides miscellaneous information about the current state of the `Http2Stream`. + * + * A current state of this `Http2Stream`. + * @since v8.4.0 + */ + readonly state: StreamState; + /** + * Closes the `Http2Stream` instance by sending an `RST_STREAM` frame to the + * connected HTTP/2 peer. + * @since v8.4.0 + * @param [code=http2.constants.NGHTTP2_NO_ERROR] Unsigned 32-bit integer identifying the error code. + * @param callback An optional function registered to listen for the `'close'` event. + */ + close(code?: number, callback?: () => void): void; + /** + * Updates the priority for this `Http2Stream` instance. + * @since v8.4.0 + */ + priority(options: StreamPriorityOptions): void; + /** + * ```js + * import http2 from 'node:http2'; + * const client = http2.connect('http://example.org:8000'); + * const { NGHTTP2_CANCEL } = http2.constants; + * const req = client.request({ ':path': '/' }); + * + * // Cancel the stream if there's no activity after 5 seconds + * req.setTimeout(5000, () => req.close(NGHTTP2_CANCEL)); + * ``` + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Sends a trailing `HEADERS` frame to the connected HTTP/2 peer. This method + * will cause the `Http2Stream` to be immediately closed and must only be + * called after the `'wantTrailers'` event has been emitted. When sending a + * request or sending a response, the `options.waitForTrailers` option must be set + * in order to keep the `Http2Stream` open after the final `DATA` frame so that + * trailers can be sent. + * + * ```js + * import http2 from 'node:http2'; + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond(undefined, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ xyz: 'abc' }); + * }); + * stream.end('Hello World'); + * }); + * ``` + * + * The HTTP/1 specification forbids trailers from containing HTTP/2 pseudo-header + * fields (e.g. `':method'`, `':path'`, etc). + * @since v10.0.0 + */ + sendTrailers(headers: OutgoingHttpHeaders): void; + addListener(event: "aborted", listener: () => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: "streamClosed", listener: (code: number) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: "wantTrailers", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "aborted"): boolean; + emit(event: "close"): boolean; + emit(event: "data", chunk: Buffer | string): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "frameError", frameType: number, errorCode: number): boolean; + emit(event: "pipe", src: stream.Readable): boolean; + emit(event: "unpipe", src: stream.Readable): boolean; + emit(event: "streamClosed", code: number): boolean; + emit(event: "timeout"): boolean; + emit(event: "trailers", trailers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "wantTrailers"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "aborted", listener: () => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: "streamClosed", listener: (code: number) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + on(event: "wantTrailers", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "aborted", listener: () => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: "streamClosed", listener: (code: number) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + once(event: "wantTrailers", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "aborted", listener: () => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "streamClosed", listener: (code: number) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: "wantTrailers", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "aborted", listener: () => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "streamClosed", listener: (code: number) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: "wantTrailers", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Stream extends Http2Stream { + addListener(event: "continue", listener: () => {}): this; + addListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + addListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + addListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "continue"): boolean; + emit(event: "headers", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: "push", headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "response", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "continue", listener: () => {}): this; + on( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + on(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + on( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "continue", listener: () => {}): this; + once( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + once(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + once( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "continue", listener: () => {}): this; + prependListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "continue", listener: () => {}): this; + prependOnceListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependOnceListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ServerHttp2Stream extends Http2Stream { + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * Read-only property mapped to the `SETTINGS_ENABLE_PUSH` flag of the remote + * client's most recent `SETTINGS` frame. Will be `true` if the remote peer + * accepts push streams, `false` otherwise. Settings are the same for every `Http2Stream` in the same `Http2Session`. + * @since v8.4.0 + */ + readonly pushAllowed: boolean; + /** + * Sends an additional informational `HEADERS` frame to the connected HTTP/2 peer. + * @since v8.4.0 + */ + additionalHeaders(headers: OutgoingHttpHeaders): void; + /** + * Initiates a push stream. The callback is invoked with the new `Http2Stream` instance created for the push stream passed as the second argument, or an `Error` passed as the first argument. + * + * ```js + * import http2 from 'node:http2'; + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.pushStream({ ':path': '/' }, (err, pushStream, headers) => { + * if (err) throw err; + * pushStream.respond({ ':status': 200 }); + * pushStream.end('some pushed data'); + * }); + * stream.end('some data'); + * }); + * ``` + * + * Setting the weight of a push stream is not allowed in the `HEADERS` frame. Pass + * a `weight` value to `http2stream.priority` with the `silent` option set to `true` to enable server-side bandwidth balancing between concurrent streams. + * + * Calling `http2stream.pushStream()` from within a pushed stream is not permitted + * and will throw an error. + * @since v8.4.0 + * @param callback Callback that is called once the push stream has been initiated. + */ + pushStream( + headers: OutgoingHttpHeaders, + callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void, + ): void; + pushStream( + headers: OutgoingHttpHeaders, + options?: StreamPriorityOptions, + callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void, + ): void; + /** + * ```js + * import http2 from 'node:http2'; + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.end('some data'); + * }); + * ``` + * + * Initiates a response. When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be sent. + * The `http2stream.sendTrailers()` method can then be used to send trailing header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either `http2stream.sendTrailers()` or `http2stream.close()` to close the `Http2Stream`. + * + * ```js + * import http2 from 'node:http2'; + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * stream.end('some data'); + * }); + * ``` + * @since v8.4.0 + */ + respond(headers?: OutgoingHttpHeaders, options?: ServerStreamResponseOptions): void; + /** + * Initiates a response whose data is read from the given file descriptor. No + * validation is performed on the given file descriptor. If an error occurs while + * attempting to read data using the file descriptor, the `Http2Stream` will be + * closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR` code. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * ```js + * import http2 from 'node:http2'; + * import fs from 'node:fs'; + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8', + * }; + * stream.respondWithFD(fd, headers); + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given fd. If the `statCheck` function is provided, the `http2stream.respondWithFD()` method will + * perform an `fs.fstat()` call to collect details on the provided file descriptor. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The file descriptor or `FileHandle` is not closed when the stream is closed, + * so it will need to be closed manually once it is no longer needed. + * Using the same file descriptor concurrently for multiple streams + * is not supported and may result in data loss. Re-using a file descriptor + * after a stream has finished is supported. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code _must_ call either `http2stream.sendTrailers()` + * or `http2stream.close()` to close the `Http2Stream`. + * + * ```js + * import http2 from 'node:http2'; + * import fs from 'node:fs'; + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8', + * }; + * stream.respondWithFD(fd, headers, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * @since v8.4.0 + * @param fd A readable file descriptor. + */ + respondWithFD( + fd: number | fs.promises.FileHandle, + headers?: OutgoingHttpHeaders, + options?: ServerStreamFileResponseOptions, + ): void; + /** + * Sends a regular file as the response. The `path` must specify a regular file + * or an `'error'` event will be emitted on the `Http2Stream` object. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given file: + * + * If an error occurs while attempting to read the file data, the `Http2Stream` will be closed using an + * `RST_STREAM` frame using the standard `INTERNAL_ERROR` code. + * If the `onError` callback is defined, then it will be called. Otherwise, the stream will be destroyed. + * + * Example using a file path: + * + * ```js + * import http2 from 'node:http2'; + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * headers['last-modified'] = stat.mtime.toUTCString(); + * } + * + * function onError(err) { + * // stream.respond() can throw if the stream has been destroyed by + * // the other side. + * try { + * if (err.code === 'ENOENT') { + * stream.respond({ ':status': 404 }); + * } else { + * stream.respond({ ':status': 500 }); + * } + * } catch (err) { + * // Perform actual error handling. + * console.error(err); + * } + * stream.end(); + * } + * + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck, onError }); + * }); + * ``` + * + * The `options.statCheck` function may also be used to cancel the send operation + * by returning `false`. For instance, a conditional request may check the stat + * results to determine if the file has been modified to return an appropriate `304` response: + * + * ```js + * import http2 from 'node:http2'; + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * // Check the stat here... + * stream.respond({ ':status': 304 }); + * return false; // Cancel the send operation + * } + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck }); + * }); + * ``` + * + * The `content-length` header field will be automatically set. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The `options.onError` function may also be used to handle all the errors + * that could happen before the delivery of the file is initiated. The + * default behavior is to destroy the stream. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * import http2 from 'node:http2'; + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * }); + * ``` + * @since v8.4.0 + */ + respondWithFile( + path: string, + headers?: OutgoingHttpHeaders, + options?: ServerStreamFileResponseOptionsWithError, + ): void; + } + // Http2Session + export interface Settings { + headerTableSize?: number | undefined; + enablePush?: boolean | undefined; + initialWindowSize?: number | undefined; + maxFrameSize?: number | undefined; + maxConcurrentStreams?: number | undefined; + maxHeaderListSize?: number | undefined; + enableConnectProtocol?: boolean | undefined; + } + export interface ClientSessionRequestOptions { + endStream?: boolean | undefined; + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + waitForTrailers?: boolean | undefined; + signal?: AbortSignal | undefined; + } + export interface SessionState { + effectiveLocalWindowSize?: number | undefined; + effectiveRecvDataLength?: number | undefined; + nextStreamID?: number | undefined; + localWindowSize?: number | undefined; + lastProcStreamID?: number | undefined; + remoteWindowSize?: number | undefined; + outboundQueueSize?: number | undefined; + deflateDynamicTableSize?: number | undefined; + inflateDynamicTableSize?: number | undefined; + } + export interface Http2Session extends EventEmitter { + /** + * Value will be `undefined` if the `Http2Session` is not yet connected to a + * socket, `h2c` if the `Http2Session` is not connected to a `TLSSocket`, or + * will return the value of the connected `TLSSocket`'s own `alpnProtocol` property. + * @since v9.4.0 + */ + readonly alpnProtocol?: string | undefined; + /** + * Will be `true` if this `Http2Session` instance has been closed, otherwise `false`. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Will be `true` if this `Http2Session` instance is still connecting, will be set + * to `false` before emitting `connect` event and/or calling the `http2.connect` callback. + * @since v10.0.0 + */ + readonly connecting: boolean; + /** + * Will be `true` if this `Http2Session` instance has been destroyed and must no + * longer be used, otherwise `false`. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Value is `undefined` if the `Http2Session` session socket has not yet been + * connected, `true` if the `Http2Session` is connected with a `TLSSocket`, + * and `false` if the `Http2Session` is connected to any other kind of socket + * or stream. + * @since v9.4.0 + */ + readonly encrypted?: boolean | undefined; + /** + * A prototype-less object describing the current local settings of this `Http2Session`. + * The local settings are local to _this_`Http2Session` instance. + * @since v8.4.0 + */ + readonly localSettings: Settings; + /** + * If the `Http2Session` is connected to a `TLSSocket`, the `originSet` property + * will return an `Array` of origins for which the `Http2Session` may be + * considered authoritative. + * + * The `originSet` property is only available when using a secure TLS connection. + * @since v9.4.0 + */ + readonly originSet?: string[] | undefined; + /** + * Indicates whether the `Http2Session` is currently waiting for acknowledgment of + * a sent `SETTINGS` frame. Will be `true` after calling the `http2session.settings()` method. + * Will be `false` once all sent `SETTINGS` frames have been acknowledged. + * @since v8.4.0 + */ + readonly pendingSettingsAck: boolean; + /** + * A prototype-less object describing the current remote settings of this`Http2Session`. + * The remote settings are set by the _connected_ HTTP/2 peer. + * @since v8.4.0 + */ + readonly remoteSettings: Settings; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * limits available methods to ones safe to use with HTTP/2. + * + * `destroy`, `emit`, `end`, `pause`, `read`, `resume`, and `write` will throw + * an error with code `ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for more information. + * + * `setTimeout` method will be called on this `Http2Session`. + * + * All other interactions will be routed directly to the socket. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * Provides miscellaneous information about the current state of the`Http2Session`. + * + * An object describing the current status of this `Http2Session`. + * @since v8.4.0 + */ + readonly state: SessionState; + /** + * The `http2session.type` will be equal to `http2.constants.NGHTTP2_SESSION_SERVER` if this `Http2Session` instance is a + * server, and `http2.constants.NGHTTP2_SESSION_CLIENT` if the instance is a + * client. + * @since v8.4.0 + */ + readonly type: number; + /** + * Gracefully closes the `Http2Session`, allowing any existing streams to + * complete on their own and preventing new `Http2Stream` instances from being + * created. Once closed, `http2session.destroy()`_might_ be called if there + * are no open `Http2Stream` instances. + * + * If specified, the `callback` function is registered as a handler for the`'close'` event. + * @since v9.4.0 + */ + close(callback?: () => void): void; + /** + * Immediately terminates the `Http2Session` and the associated `net.Socket` or `tls.TLSSocket`. + * + * Once destroyed, the `Http2Session` will emit the `'close'` event. If `error` is not undefined, an `'error'` event will be emitted immediately before the `'close'` event. + * + * If there are any remaining open `Http2Streams` associated with the `Http2Session`, those will also be destroyed. + * @since v8.4.0 + * @param error An `Error` object if the `Http2Session` is being destroyed due to an error. + * @param code The HTTP/2 error code to send in the final `GOAWAY` frame. If unspecified, and `error` is not undefined, the default is `INTERNAL_ERROR`, otherwise defaults to `NO_ERROR`. + */ + destroy(error?: Error, code?: number): void; + /** + * Transmits a `GOAWAY` frame to the connected peer _without_ shutting down the`Http2Session`. + * @since v9.4.0 + * @param code An HTTP/2 error code + * @param lastStreamID The numeric ID of the last processed `Http2Stream` + * @param opaqueData A `TypedArray` or `DataView` instance containing additional data to be carried within the `GOAWAY` frame. + */ + goaway(code?: number, lastStreamID?: number, opaqueData?: NodeJS.ArrayBufferView): void; + /** + * Sends a `PING` frame to the connected HTTP/2 peer. A `callback` function must + * be provided. The method will return `true` if the `PING` was sent, `false` otherwise. + * + * The maximum number of outstanding (unacknowledged) pings is determined by the `maxOutstandingPings` configuration option. The default maximum is 10. + * + * If provided, the `payload` must be a `Buffer`, `TypedArray`, or `DataView` containing 8 bytes of data that will be transmitted with the `PING` and + * returned with the ping acknowledgment. + * + * The callback will be invoked with three arguments: an error argument that will + * be `null` if the `PING` was successfully acknowledged, a `duration` argument + * that reports the number of milliseconds elapsed since the ping was sent and the + * acknowledgment was received, and a `Buffer` containing the 8-byte `PING` payload. + * + * ```js + * session.ping(Buffer.from('abcdefgh'), (err, duration, payload) => { + * if (!err) { + * console.log(`Ping acknowledged in ${duration} milliseconds`); + * console.log(`With payload '${payload.toString()}'`); + * } + * }); + * ``` + * + * If the `payload` argument is not specified, the default payload will be the + * 64-bit timestamp (little endian) marking the start of the `PING` duration. + * @since v8.9.3 + * @param payload Optional ping payload. + */ + ping(callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; + ping( + payload: NodeJS.ArrayBufferView, + callback: (err: Error | null, duration: number, payload: Buffer) => void, + ): boolean; + /** + * Calls `ref()` on this `Http2Session` instance's underlying `net.Socket`. + * @since v9.4.0 + */ + ref(): void; + /** + * Sets the local endpoint's window size. + * The `windowSize` is the total window size to set, not + * the delta. + * + * ```js + * import http2 from 'node:http2'; + * + * const server = http2.createServer(); + * const expectedWindowSize = 2 ** 20; + * server.on('connect', (session) => { + * + * // Set local window size to be 2 ** 20 + * session.setLocalWindowSize(expectedWindowSize); + * }); + * ``` + * @since v15.3.0, v14.18.0 + */ + setLocalWindowSize(windowSize: number): void; + /** + * Used to set a callback function that is called when there is no activity on + * the `Http2Session` after `msecs` milliseconds. The given `callback` is + * registered as a listener on the `'timeout'` event. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Updates the current local settings for this `Http2Session` and sends a new `SETTINGS` frame to the connected HTTP/2 peer. + * + * Once called, the `http2session.pendingSettingsAck` property will be `true` while the session is waiting for the remote peer to acknowledge the new + * settings. + * + * The new settings will not become effective until the `SETTINGS` acknowledgment + * is received and the `'localSettings'` event is emitted. It is possible to send + * multiple `SETTINGS` frames while acknowledgment is still pending. + * @since v8.4.0 + * @param callback Callback that is called once the session is connected or right away if the session is already connected. + */ + settings( + settings: Settings, + callback?: (err: Error | null, settings: Settings, duration: number) => void, + ): void; + /** + * Calls `unref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + unref(): void; + addListener(event: "close", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + addListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + addListener(event: "localSettings", listener: (settings: Settings) => void): this; + addListener(event: "ping", listener: () => void): this; + addListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "frameError", frameType: number, errorCode: number, streamID: number): boolean; + emit(event: "goaway", errorCode: number, lastStreamID: number, opaqueData?: Buffer): boolean; + emit(event: "localSettings", settings: Settings): boolean; + emit(event: "ping"): boolean; + emit(event: "remoteSettings", settings: Settings): boolean; + emit(event: "timeout"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; + on(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void): this; + on(event: "localSettings", listener: (settings: Settings) => void): this; + on(event: "ping", listener: () => void): this; + on(event: "remoteSettings", listener: (settings: Settings) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; + once(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void): this; + once(event: "localSettings", listener: (settings: Settings) => void): this; + once(event: "ping", listener: () => void): this; + once(event: "remoteSettings", listener: (settings: Settings) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + prependListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + prependListener(event: "localSettings", listener: (settings: Settings) => void): this; + prependListener(event: "ping", listener: () => void): this; + prependListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + prependOnceListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + prependOnceListener(event: "localSettings", listener: (settings: Settings) => void): this; + prependOnceListener(event: "ping", listener: () => void): this; + prependOnceListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Session extends Http2Session { + /** + * For HTTP/2 Client `Http2Session` instances only, the `http2session.request()` creates and returns an `Http2Stream` instance that can be used to send an + * HTTP/2 request to the connected server. + * + * When a `ClientHttp2Session` is first created, the socket may not yet be + * connected. if `clienthttp2session.request()` is called during this time, the + * actual request will be deferred until the socket is ready to go. + * If the `session` is closed before the actual request be executed, an `ERR_HTTP2_GOAWAY_SESSION` is thrown. + * + * This method is only available if `http2session.type` is equal to `http2.constants.NGHTTP2_SESSION_CLIENT`. + * + * ```js + * import http2 from 'node:http2'; + * const clientSession = http2.connect('https://localhost:1234'); + * const { + * HTTP2_HEADER_PATH, + * HTTP2_HEADER_STATUS, + * } = http2.constants; + * + * const req = clientSession.request({ [HTTP2_HEADER_PATH]: '/' }); + * req.on('response', (headers) => { + * console.log(headers[HTTP2_HEADER_STATUS]); + * req.on('data', (chunk) => { // .. }); + * req.on('end', () => { // .. }); + * }); + * ``` + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * is emitted immediately after queuing the last chunk of payload data to be sent. + * The `http2stream.sendTrailers()` method can then be called to send trailing + * headers to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * When `options.signal` is set with an `AbortSignal` and then `abort` on the + * corresponding `AbortController` is called, the request will emit an `'error'`event with an `AbortError` error. + * + * The `:method` and `:path` pseudo-headers are not specified within `headers`, + * they respectively default to: + * + * * `:method` \= `'GET'` + * * `:path` \= `/` + * @since v8.4.0 + */ + request(headers?: OutgoingHttpHeaders, options?: ClientSessionRequestOptions): ClientHttp2Stream; + addListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + addListener(event: "origin", listener: (origins: string[]) => void): this; + addListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + addListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "altsvc", alt: string, origin: string, stream: number): boolean; + emit(event: "origin", origins: readonly string[]): boolean; + emit(event: "connect", session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit( + event: "stream", + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + on(event: "origin", listener: (origins: string[]) => void): this; + on(event: "connect", listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + once(event: "origin", listener: (origins: string[]) => void): this; + once( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + once( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + prependListener(event: "origin", listener: (origins: string[]) => void): this; + prependListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + prependOnceListener(event: "origin", listener: (origins: string[]) => void): this; + prependOnceListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependOnceListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface AlternativeServiceOptions { + origin: number | string | url.URL; + } + export interface ServerHttp2Session< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + > extends Http2Session { + readonly server: + | Http2Server + | Http2SecureServer; + /** + * Submits an `ALTSVC` frame (as defined by [RFC 7838](https://tools.ietf.org/html/rfc7838)) to the connected client. + * + * ```js + * import http2 from 'node:http2'; + * + * const server = http2.createServer(); + * server.on('session', (session) => { + * // Set altsvc for origin https://example.org:80 + * session.altsvc('h2=":8000"', 'https://example.org:80'); + * }); + * + * server.on('stream', (stream) => { + * // Set altsvc for a specific stream + * stream.session.altsvc('h2=":8000"', stream.id); + * }); + * ``` + * + * Sending an `ALTSVC` frame with a specific stream ID indicates that the alternate + * service is associated with the origin of the given `Http2Stream`. + * + * The `alt` and origin string _must_ contain only ASCII bytes and are + * strictly interpreted as a sequence of ASCII bytes. The special value `'clear'`may be passed to clear any previously set alternative service for a given + * domain. + * + * When a string is passed for the `originOrStream` argument, it will be parsed as + * a URL and the origin will be derived. For instance, the origin for the + * HTTP URL `'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as`originOrStream`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * @since v9.4.0 + * @param alt A description of the alternative service configuration as defined by `RFC 7838`. + * @param originOrStream Either a URL string specifying the origin (or an `Object` with an `origin` property) or the numeric identifier of an active `Http2Stream` as given by the + * `http2stream.id` property. + */ + altsvc(alt: string, originOrStream: number | string | url.URL | AlternativeServiceOptions): void; + /** + * Submits an `ORIGIN` frame (as defined by [RFC 8336](https://tools.ietf.org/html/rfc8336)) to the connected client + * to advertise the set of origins for which the server is capable of providing + * authoritative responses. + * + * ```js + * import http2 from 'node:http2'; + * const options = getSecureOptionsSomehow(); + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * server.on('session', (session) => { + * session.origin('https://example.com', 'https://example.org'); + * }); + * ``` + * + * When a string is passed as an `origin`, it will be parsed as a URL and the + * origin will be derived. For instance, the origin for the HTTP URL `'https://example.org/foo/bar'` is the ASCII string` 'https://example.org'`. An error will be thrown if either the given + * string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as + * an `origin`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * + * Alternatively, the `origins` option may be used when creating a new HTTP/2 + * server using the `http2.createSecureServer()` method: + * + * ```js + * import http2 from 'node:http2'; + * const options = getSecureOptionsSomehow(); + * options.origins = ['https://example.com', 'https://example.org']; + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * ``` + * @since v10.12.0 + * @param origins One or more URL Strings passed as separate arguments. + */ + origin( + ...origins: Array< + | string + | url.URL + | { + origin: string; + } + > + ): void; + addListener( + event: "connect", + listener: ( + session: ServerHttp2Session, + socket: net.Socket | tls.TLSSocket, + ) => void, + ): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit( + event: "connect", + session: ServerHttp2Session, + socket: net.Socket | tls.TLSSocket, + ): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on( + event: "connect", + listener: ( + session: ServerHttp2Session, + socket: net.Socket | tls.TLSSocket, + ) => void, + ): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "connect", + listener: ( + session: ServerHttp2Session, + socket: net.Socket | tls.TLSSocket, + ) => void, + ): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "connect", + listener: ( + session: ServerHttp2Session, + socket: net.Socket | tls.TLSSocket, + ) => void, + ): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "connect", + listener: ( + session: ServerHttp2Session, + socket: net.Socket | tls.TLSSocket, + ) => void, + ): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + // Http2Server + export interface SessionOptions { + maxDeflateDynamicTableSize?: number | undefined; + maxSessionMemory?: number | undefined; + maxHeaderListPairs?: number | undefined; + maxOutstandingPings?: number | undefined; + maxSendHeaderBlockLength?: number | undefined; + paddingStrategy?: number | undefined; + peerMaxConcurrentStreams?: number | undefined; + settings?: Settings | undefined; + remoteCustomSettings?: number[] | undefined; + /** + * Specifies a timeout in milliseconds that + * a server should wait when an [`'unknownProtocol'`][] is emitted. If the + * socket has not been destroyed by that time the server will destroy it. + * @default 100000 + */ + unknownProtocolTimeout?: number | undefined; + selectPadding?(frameLen: number, maxFrameLen: number): number; + } + export interface ClientSessionOptions extends SessionOptions { + maxReservedRemoteStreams?: number | undefined; + createConnection?: ((authority: url.URL, option: SessionOptions) => stream.Duplex) | undefined; + protocol?: "http:" | "https:" | undefined; + } + export interface ServerSessionOptions< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + > extends SessionOptions { + Http1IncomingMessage?: Http1Request | undefined; + Http1ServerResponse?: Http1Response | undefined; + Http2ServerRequest?: Http2Request | undefined; + Http2ServerResponse?: Http2Response | undefined; + } + export interface SecureClientSessionOptions extends ClientSessionOptions, tls.ConnectionOptions {} + export interface SecureServerSessionOptions< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + > extends ServerSessionOptions, tls.TlsOptions {} + export interface ServerOptions< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + > extends ServerSessionOptions { + streamResetBurst?: number | undefined; + streamResetRate?: number | undefined; + } + export interface SecureServerOptions< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + > extends SecureServerSessionOptions { + allowHTTP1?: boolean | undefined; + origins?: string[] | undefined; + } + interface HTTP2ServerCommon { + setTimeout(msec?: number, callback?: () => void): this; + /** + * Throws ERR_HTTP2_INVALID_SETTING_VALUE for invalid settings values. + * Throws ERR_INVALID_ARG_TYPE for invalid settings argument. + */ + updateSettings(settings: Settings): void; + } + export interface Http2Server< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + > extends net.Server, HTTP2ServerCommon { + addListener( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + addListener( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + addListener( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + addListener(event: "sessionError", listener: (err: Error) => void): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit( + event: "checkContinue", + request: InstanceType, + response: InstanceType, + ): boolean; + emit(event: "request", request: InstanceType, response: InstanceType): boolean; + emit( + event: "session", + session: ServerHttp2Session, + ): boolean; + emit(event: "sessionError", err: Error): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "timeout"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + on( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + on( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + on(event: "sessionError", listener: (err: Error) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: "timeout", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + once( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + once( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + once(event: "sessionError", listener: (err: Error) => void): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: "timeout", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + prependListener( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + prependListener( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + prependListener(event: "sessionError", listener: (err: Error) => void): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + prependOnceListener( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + prependOnceListener( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface Http2SecureServer< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + > extends tls.Server, HTTP2ServerCommon { + addListener( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + addListener( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + addListener( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + addListener(event: "sessionError", listener: (err: Error) => void): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit( + event: "checkContinue", + request: InstanceType, + response: InstanceType, + ): boolean; + emit(event: "request", request: InstanceType, response: InstanceType): boolean; + emit( + event: "session", + session: ServerHttp2Session, + ): boolean; + emit(event: "sessionError", err: Error): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "timeout"): boolean; + emit(event: "unknownProtocol", socket: tls.TLSSocket): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + on( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + on( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + on(event: "sessionError", listener: (err: Error) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: "timeout", listener: () => void): this; + on(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + once( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + once( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + once(event: "sessionError", listener: (err: Error) => void): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: "timeout", listener: () => void): this; + once(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + prependListener( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + prependListener( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + prependListener(event: "sessionError", listener: (err: Error) => void): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "checkContinue", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + prependOnceListener( + event: "request", + listener: (request: InstanceType, response: InstanceType) => void, + ): this; + prependOnceListener( + event: "session", + listener: (session: ServerHttp2Session) => void, + ): this; + prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * A `Http2ServerRequest` object is created by {@link Server} or {@link SecureServer} and passed as the first argument to the `'request'` event. It may be used to access a request status, + * headers, and + * data. + * @since v8.4.0 + */ + export class Http2ServerRequest extends stream.Readable { + constructor( + stream: ServerHttp2Stream, + headers: IncomingHttpHeaders, + options: stream.ReadableOptions, + rawHeaders: readonly string[], + ); + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + */ + readonly aborted: boolean; + /** + * The request authority pseudo header field. Because HTTP/2 allows requests + * to set either `:authority` or `host`, this value is derived from `req.headers[':authority']` if present. Otherwise, it is derived from `req.headers['host']`. + * @since v8.4.0 + */ + readonly authority: string; + /** + * See `request.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * The `request.complete` property will be `true` if the request has + * been completed, aborted, or destroyed. + * @since v12.10.0 + */ + readonly complete: boolean; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * See `HTTP/2 Headers Object`. + * + * In HTTP/2, the request path, host name, protocol, and method are represented as + * special headers prefixed with the `:` character (e.g. `':path'`). These special + * headers will be included in the `request.headers` object. Care must be taken not + * to inadvertently modify these special headers or errors may occur. For instance, + * removing all headers from the request will cause errors to occur: + * + * ```js + * removeAllHeaders(request.headers); + * assert(request.url); // Fails because the :path header has been removed + * ``` + * @since v8.4.0 + */ + readonly headers: IncomingHttpHeaders; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. Returns `'2.0'`. + * + * Also `message.httpVersionMajor` is the first integer and `message.httpVersionMinor` is the second. + * @since v8.4.0 + */ + readonly httpVersion: string; + readonly httpVersionMinor: number; + readonly httpVersionMajor: number; + /** + * The request method as a string. Read-only. Examples: `'GET'`, `'DELETE'`. + * @since v8.4.0 + */ + readonly method: string; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v8.4.0 + */ + readonly rawHeaders: string[]; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly rawTrailers: string[]; + /** + * The request scheme pseudo header field indicating the scheme + * portion of the target URL. + * @since v8.4.0 + */ + readonly scheme: string; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `request.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on `request.stream`. + * + * `setTimeout` method will be called on `request.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code `ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. With TLS support, + * use `request.socket.getPeerCertificate()` to obtain the client's + * authentication details. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the request. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly trailers: IncomingHttpHeaders; + /** + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. If the request is: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * Then `request.url` will be: + * + * ```js + * '/status?name=ryan' + * ``` + * + * To parse the url into its parts, `new URL()` can be used: + * + * ```console + * $ node + * > new URL('/status?name=ryan', 'http://example.com') + * URL { + * href: 'http://example.com/status?name=ryan', + * origin: 'http://example.com', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'example.com', + * hostname: 'example.com', + * port: '', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v8.4.0 + */ + url: string; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream`s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + read(size?: number): Buffer | string | null; + addListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "aborted", hadError: boolean, code: number): boolean; + emit(event: "close"): boolean; + emit(event: "data", chunk: Buffer | string): boolean; + emit(event: "end"): boolean; + emit(event: "readable"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v8.4.0 + */ + export class Http2ServerResponse extends stream.Writable { + constructor(stream: ServerHttp2Stream); + /** + * See `response.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * Append a single header value to the header object. + * + * If the value is an array, this is equivalent to calling this method multiple times. + * + * If there were no previous values for the header, this is equivalent to calling {@link setHeader}. + * + * Attempting to set a header field name or value that contains invalid characters will result in a + * [TypeError](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-typeerror) being thrown. + * + * ```js + * // Returns headers including "set-cookie: a" and "set-cookie: b" + * const server = http2.createServer((req, res) => { + * res.setHeader('set-cookie', 'a'); + * res.appendHeader('set-cookie', 'b'); + * res.writeHead(200); + * res.end('ok'); + * }); + * ``` + * @since v20.12.0 + */ + appendHeader(name: string, value: string | string[]): void; + /** + * Boolean value that indicates whether the response has completed. Starts + * as `false`. After `response.end()` executes, the value will be `true`. + * @since v8.4.0 + * @deprecated Since v13.4.0,v12.16.0 - Use `writableEnded`. + */ + readonly finished: boolean; + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * A reference to the original HTTP2 `request` object. + * @since v15.7.0 + */ + readonly req: Request; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `response.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on `response.stream`. + * + * `setTimeout` method will be called on `response.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code `ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. + * + * ```js + * import http2 from 'node:http2'; + * const server = http2.createServer((req, res) => { + * const ip = req.socket.remoteAddress; + * const port = req.socket.remotePort; + * res.end(`Your IP address is ${ip} and your source port is ${port}.`); + * }).listen(3000); + * ``` + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the response. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * When true, the Date header will be automatically generated and sent in + * the response if it is not already present in the headers. Defaults to true. + * + * This should only be disabled for testing; HTTP requires the Date header + * in responses. + * @since v8.4.0 + */ + sendDate: boolean; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v8.4.0 + */ + statusCode: number; + /** + * Status message is not supported by HTTP/2 (RFC 7540 8.1.2.4). It returns + * an empty string. + * @since v8.4.0 + */ + statusMessage: ""; + /** + * This method adds HTTP trailing headers (a header but at the end of the + * message) to the response. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + addTrailers(trailers: OutgoingHttpHeaders): void; + /** + * This method signals to the server that all of the response headers and body + * have been sent; that server should consider this message complete. + * The method, `response.end()`, MUST be called on each response. + * + * If `data` is specified, it is equivalent to calling `response.write(data, encoding)` followed by `response.end(callback)`. + * + * If `callback` is specified, it will be called when the response stream + * is finished. + * @since v8.4.0 + */ + end(callback?: () => void): this; + end(data: string | Uint8Array, callback?: () => void): this; + end(data: string | Uint8Array, encoding: BufferEncoding, callback?: () => void): this; + /** + * Reads out a header that has already been queued but not sent to the client. + * The name is case-insensitive. + * + * ```js + * const contentType = response.getHeader('content-type'); + * ``` + * @since v8.4.0 + */ + getHeader(name: string): string; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All header names are lowercase. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = response.getHeaderNames(); + * // headerNames === ['foo', 'set-cookie'] + * ``` + * @since v8.4.0 + */ + getHeaderNames(): string[]; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow copy + * is used, array values may be mutated without additional calls to various + * header-related http module methods. The keys of the returned object are the + * header names and the values are the respective header values. All header names + * are lowercase. + * + * The object returned by the `response.getHeaders()` method _does not_ prototypically inherit from the JavaScript `Object`. This means that typical `Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = response.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v8.4.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name matching is case-insensitive. + * + * ```js + * const hasContentType = response.hasHeader('content-type'); + * ``` + * @since v8.4.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that has been queued for implicit sending. + * + * ```js + * response.removeHeader('Content-Encoding'); + * ``` + * @since v8.4.0 + */ + removeHeader(name: string): void; + /** + * Sets a single header value for implicit headers. If this header already exists + * in the to-be-sent headers, its value will be replaced. Use an array of strings + * here to send multiple headers with the same name. + * + * ```js + * response.setHeader('Content-Type', 'text/html; charset=utf-8'); + * ``` + * + * or + * + * ```js + * response.setHeader('Set-Cookie', ['type=ninja', 'language=javascript']); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * @since v8.4.0 + */ + setHeader(name: string, value: number | string | readonly string[]): void; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'` events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * If this method is called and `response.writeHead()` has not been called, + * it will switch to implicit header mode and flush the implicit headers. + * + * This sends a chunk of the response body. This method may + * be called multiple times to provide successive parts of the body. + * + * In the `node:http` module, the response body is omitted when the + * request is a HEAD request. Similarly, the `204` and `304` responses _must not_ include a message body. + * + * `chunk` can be a string or a buffer. If `chunk` is a string, + * the second parameter specifies how to encode it into a byte stream. + * By default the `encoding` is `'utf8'`. `callback` will be called when this chunk + * of data is flushed. + * + * This is the raw HTTP body and has nothing to do with higher-level multi-part + * body encodings that may be used. + * + * The first time `response.write()` is called, it will send the buffered + * header information and the first chunk of the body to the client. The second + * time `response.write()` is called, Node.js assumes data will be streamed, + * and sends the new data separately. That is, the response is buffered up to the + * first chunk of the body. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is free again. + * @since v8.4.0 + */ + write(chunk: string | Uint8Array, callback?: (err: Error) => void): boolean; + write(chunk: string | Uint8Array, encoding: BufferEncoding, callback?: (err: Error) => void): boolean; + /** + * Sends a status `100 Continue` to the client, indicating that the request body + * should be sent. See the `'checkContinue'` event on `Http2Server` and `Http2SecureServer`. + * @since v8.4.0 + */ + writeContinue(): void; + /** + * Sends a status `103 Early Hints` to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. + * + * **Example** + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * }); + * ``` + * @since v18.11.0 + */ + writeEarlyHints(hints: Record): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * + * Returns a reference to the `Http2ServerResponse`, so that calls can be chained. + * + * For compatibility with `HTTP/1`, a human-readable `statusMessage` may be + * passed as the second argument. However, because the `statusMessage` has no + * meaning within HTTP/2, the argument will have no effect and a process warning + * will be emitted. + * + * ```js + * const body = 'hello world'; + * response.writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain; charset=utf-8', + * }); + * ``` + * + * `Content-Length` is given in bytes not characters. The`Buffer.byteLength()` API may be used to determine the number of bytes in a + * given encoding. On outbound messages, Node.js does not check if Content-Length + * and the length of the body being transmitted are equal or not. However, when + * receiving messages, Node.js will automatically reject messages when the `Content-Length` does not match the actual payload size. + * + * This method may be called at most one time on a message before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + writeHead(statusCode: number, headers?: OutgoingHttpHeaders): this; + writeHead(statusCode: number, statusMessage: string, headers?: OutgoingHttpHeaders): this; + /** + * Call `http2stream.pushStream()` with the given headers, and wrap the + * given `Http2Stream` on a newly created `Http2ServerResponse` as the callback + * parameter if successful. When `Http2ServerRequest` is closed, the callback is + * called with an error `ERR_HTTP2_INVALID_STREAM`. + * @since v8.4.0 + * @param headers An object describing the headers + * @param callback Called once `http2stream.pushStream()` is finished, or either when the attempt to create the pushed `Http2Stream` has failed or has been rejected, or the state of + * `Http2ServerRequest` is closed prior to calling the `http2stream.pushStream()` method + */ + createPushResponse( + headers: OutgoingHttpHeaders, + callback: (err: Error | null, res: Http2ServerResponse) => void, + ): void; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "drain"): boolean; + emit(event: "error", error: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pipe", src: stream.Readable): boolean; + emit(event: "unpipe", src: stream.Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export namespace constants { + const NGHTTP2_SESSION_SERVER: number; + const NGHTTP2_SESSION_CLIENT: number; + const NGHTTP2_STREAM_STATE_IDLE: number; + const NGHTTP2_STREAM_STATE_OPEN: number; + const NGHTTP2_STREAM_STATE_RESERVED_LOCAL: number; + const NGHTTP2_STREAM_STATE_RESERVED_REMOTE: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_LOCAL: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_REMOTE: number; + const NGHTTP2_STREAM_STATE_CLOSED: number; + const NGHTTP2_NO_ERROR: number; + const NGHTTP2_PROTOCOL_ERROR: number; + const NGHTTP2_INTERNAL_ERROR: number; + const NGHTTP2_FLOW_CONTROL_ERROR: number; + const NGHTTP2_SETTINGS_TIMEOUT: number; + const NGHTTP2_STREAM_CLOSED: number; + const NGHTTP2_FRAME_SIZE_ERROR: number; + const NGHTTP2_REFUSED_STREAM: number; + const NGHTTP2_CANCEL: number; + const NGHTTP2_COMPRESSION_ERROR: number; + const NGHTTP2_CONNECT_ERROR: number; + const NGHTTP2_ENHANCE_YOUR_CALM: number; + const NGHTTP2_INADEQUATE_SECURITY: number; + const NGHTTP2_HTTP_1_1_REQUIRED: number; + const NGHTTP2_ERR_FRAME_SIZE_ERROR: number; + const NGHTTP2_FLAG_NONE: number; + const NGHTTP2_FLAG_END_STREAM: number; + const NGHTTP2_FLAG_END_HEADERS: number; + const NGHTTP2_FLAG_ACK: number; + const NGHTTP2_FLAG_PADDED: number; + const NGHTTP2_FLAG_PRIORITY: number; + const DEFAULT_SETTINGS_HEADER_TABLE_SIZE: number; + const DEFAULT_SETTINGS_ENABLE_PUSH: number; + const DEFAULT_SETTINGS_INITIAL_WINDOW_SIZE: number; + const DEFAULT_SETTINGS_MAX_FRAME_SIZE: number; + const MAX_MAX_FRAME_SIZE: number; + const MIN_MAX_FRAME_SIZE: number; + const MAX_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_DEFAULT_WEIGHT: number; + const NGHTTP2_SETTINGS_HEADER_TABLE_SIZE: number; + const NGHTTP2_SETTINGS_ENABLE_PUSH: number; + const NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS: number; + const NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_SETTINGS_MAX_FRAME_SIZE: number; + const NGHTTP2_SETTINGS_MAX_HEADER_LIST_SIZE: number; + const PADDING_STRATEGY_NONE: number; + const PADDING_STRATEGY_MAX: number; + const PADDING_STRATEGY_CALLBACK: number; + const HTTP2_HEADER_STATUS: string; + const HTTP2_HEADER_METHOD: string; + const HTTP2_HEADER_AUTHORITY: string; + const HTTP2_HEADER_SCHEME: string; + const HTTP2_HEADER_PATH: string; + const HTTP2_HEADER_ACCEPT_CHARSET: string; + const HTTP2_HEADER_ACCEPT_ENCODING: string; + const HTTP2_HEADER_ACCEPT_LANGUAGE: string; + const HTTP2_HEADER_ACCEPT_RANGES: string; + const HTTP2_HEADER_ACCEPT: string; + const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_CREDENTIALS: string; + const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_HEADERS: string; + const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_METHODS: string; + const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN: string; + const HTTP2_HEADER_ACCESS_CONTROL_EXPOSE_HEADERS: string; + const HTTP2_HEADER_ACCESS_CONTROL_REQUEST_HEADERS: string; + const HTTP2_HEADER_ACCESS_CONTROL_REQUEST_METHOD: string; + const HTTP2_HEADER_AGE: string; + const HTTP2_HEADER_ALLOW: string; + const HTTP2_HEADER_AUTHORIZATION: string; + const HTTP2_HEADER_CACHE_CONTROL: string; + const HTTP2_HEADER_CONNECTION: string; + const HTTP2_HEADER_CONTENT_DISPOSITION: string; + const HTTP2_HEADER_CONTENT_ENCODING: string; + const HTTP2_HEADER_CONTENT_LANGUAGE: string; + const HTTP2_HEADER_CONTENT_LENGTH: string; + const HTTP2_HEADER_CONTENT_LOCATION: string; + const HTTP2_HEADER_CONTENT_MD5: string; + const HTTP2_HEADER_CONTENT_RANGE: string; + const HTTP2_HEADER_CONTENT_TYPE: string; + const HTTP2_HEADER_COOKIE: string; + const HTTP2_HEADER_DATE: string; + const HTTP2_HEADER_ETAG: string; + const HTTP2_HEADER_EXPECT: string; + const HTTP2_HEADER_EXPIRES: string; + const HTTP2_HEADER_FROM: string; + const HTTP2_HEADER_HOST: string; + const HTTP2_HEADER_IF_MATCH: string; + const HTTP2_HEADER_IF_MODIFIED_SINCE: string; + const HTTP2_HEADER_IF_NONE_MATCH: string; + const HTTP2_HEADER_IF_RANGE: string; + const HTTP2_HEADER_IF_UNMODIFIED_SINCE: string; + const HTTP2_HEADER_LAST_MODIFIED: string; + const HTTP2_HEADER_LINK: string; + const HTTP2_HEADER_LOCATION: string; + const HTTP2_HEADER_MAX_FORWARDS: string; + const HTTP2_HEADER_PREFER: string; + const HTTP2_HEADER_PROXY_AUTHENTICATE: string; + const HTTP2_HEADER_PROXY_AUTHORIZATION: string; + const HTTP2_HEADER_RANGE: string; + const HTTP2_HEADER_REFERER: string; + const HTTP2_HEADER_REFRESH: string; + const HTTP2_HEADER_RETRY_AFTER: string; + const HTTP2_HEADER_SERVER: string; + const HTTP2_HEADER_SET_COOKIE: string; + const HTTP2_HEADER_STRICT_TRANSPORT_SECURITY: string; + const HTTP2_HEADER_TRANSFER_ENCODING: string; + const HTTP2_HEADER_TE: string; + const HTTP2_HEADER_UPGRADE: string; + const HTTP2_HEADER_USER_AGENT: string; + const HTTP2_HEADER_VARY: string; + const HTTP2_HEADER_VIA: string; + const HTTP2_HEADER_WWW_AUTHENTICATE: string; + const HTTP2_HEADER_HTTP2_SETTINGS: string; + const HTTP2_HEADER_KEEP_ALIVE: string; + const HTTP2_HEADER_PROXY_CONNECTION: string; + const HTTP2_METHOD_ACL: string; + const HTTP2_METHOD_BASELINE_CONTROL: string; + const HTTP2_METHOD_BIND: string; + const HTTP2_METHOD_CHECKIN: string; + const HTTP2_METHOD_CHECKOUT: string; + const HTTP2_METHOD_CONNECT: string; + const HTTP2_METHOD_COPY: string; + const HTTP2_METHOD_DELETE: string; + const HTTP2_METHOD_GET: string; + const HTTP2_METHOD_HEAD: string; + const HTTP2_METHOD_LABEL: string; + const HTTP2_METHOD_LINK: string; + const HTTP2_METHOD_LOCK: string; + const HTTP2_METHOD_MERGE: string; + const HTTP2_METHOD_MKACTIVITY: string; + const HTTP2_METHOD_MKCALENDAR: string; + const HTTP2_METHOD_MKCOL: string; + const HTTP2_METHOD_MKREDIRECTREF: string; + const HTTP2_METHOD_MKWORKSPACE: string; + const HTTP2_METHOD_MOVE: string; + const HTTP2_METHOD_OPTIONS: string; + const HTTP2_METHOD_ORDERPATCH: string; + const HTTP2_METHOD_PATCH: string; + const HTTP2_METHOD_POST: string; + const HTTP2_METHOD_PRI: string; + const HTTP2_METHOD_PROPFIND: string; + const HTTP2_METHOD_PROPPATCH: string; + const HTTP2_METHOD_PUT: string; + const HTTP2_METHOD_REBIND: string; + const HTTP2_METHOD_REPORT: string; + const HTTP2_METHOD_SEARCH: string; + const HTTP2_METHOD_TRACE: string; + const HTTP2_METHOD_UNBIND: string; + const HTTP2_METHOD_UNCHECKOUT: string; + const HTTP2_METHOD_UNLINK: string; + const HTTP2_METHOD_UNLOCK: string; + const HTTP2_METHOD_UPDATE: string; + const HTTP2_METHOD_UPDATEREDIRECTREF: string; + const HTTP2_METHOD_VERSION_CONTROL: string; + const HTTP_STATUS_CONTINUE: number; + const HTTP_STATUS_SWITCHING_PROTOCOLS: number; + const HTTP_STATUS_PROCESSING: number; + const HTTP_STATUS_OK: number; + const HTTP_STATUS_CREATED: number; + const HTTP_STATUS_ACCEPTED: number; + const HTTP_STATUS_NON_AUTHORITATIVE_INFORMATION: number; + const HTTP_STATUS_NO_CONTENT: number; + const HTTP_STATUS_RESET_CONTENT: number; + const HTTP_STATUS_PARTIAL_CONTENT: number; + const HTTP_STATUS_MULTI_STATUS: number; + const HTTP_STATUS_ALREADY_REPORTED: number; + const HTTP_STATUS_IM_USED: number; + const HTTP_STATUS_MULTIPLE_CHOICES: number; + const HTTP_STATUS_MOVED_PERMANENTLY: number; + const HTTP_STATUS_FOUND: number; + const HTTP_STATUS_SEE_OTHER: number; + const HTTP_STATUS_NOT_MODIFIED: number; + const HTTP_STATUS_USE_PROXY: number; + const HTTP_STATUS_TEMPORARY_REDIRECT: number; + const HTTP_STATUS_PERMANENT_REDIRECT: number; + const HTTP_STATUS_BAD_REQUEST: number; + const HTTP_STATUS_UNAUTHORIZED: number; + const HTTP_STATUS_PAYMENT_REQUIRED: number; + const HTTP_STATUS_FORBIDDEN: number; + const HTTP_STATUS_NOT_FOUND: number; + const HTTP_STATUS_METHOD_NOT_ALLOWED: number; + const HTTP_STATUS_NOT_ACCEPTABLE: number; + const HTTP_STATUS_PROXY_AUTHENTICATION_REQUIRED: number; + const HTTP_STATUS_REQUEST_TIMEOUT: number; + const HTTP_STATUS_CONFLICT: number; + const HTTP_STATUS_GONE: number; + const HTTP_STATUS_LENGTH_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_FAILED: number; + const HTTP_STATUS_PAYLOAD_TOO_LARGE: number; + const HTTP_STATUS_URI_TOO_LONG: number; + const HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE: number; + const HTTP_STATUS_RANGE_NOT_SATISFIABLE: number; + const HTTP_STATUS_EXPECTATION_FAILED: number; + const HTTP_STATUS_TEAPOT: number; + const HTTP_STATUS_MISDIRECTED_REQUEST: number; + const HTTP_STATUS_UNPROCESSABLE_ENTITY: number; + const HTTP_STATUS_LOCKED: number; + const HTTP_STATUS_FAILED_DEPENDENCY: number; + const HTTP_STATUS_UNORDERED_COLLECTION: number; + const HTTP_STATUS_UPGRADE_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_REQUIRED: number; + const HTTP_STATUS_TOO_MANY_REQUESTS: number; + const HTTP_STATUS_REQUEST_HEADER_FIELDS_TOO_LARGE: number; + const HTTP_STATUS_UNAVAILABLE_FOR_LEGAL_REASONS: number; + const HTTP_STATUS_INTERNAL_SERVER_ERROR: number; + const HTTP_STATUS_NOT_IMPLEMENTED: number; + const HTTP_STATUS_BAD_GATEWAY: number; + const HTTP_STATUS_SERVICE_UNAVAILABLE: number; + const HTTP_STATUS_GATEWAY_TIMEOUT: number; + const HTTP_STATUS_HTTP_VERSION_NOT_SUPPORTED: number; + const HTTP_STATUS_VARIANT_ALSO_NEGOTIATES: number; + const HTTP_STATUS_INSUFFICIENT_STORAGE: number; + const HTTP_STATUS_LOOP_DETECTED: number; + const HTTP_STATUS_BANDWIDTH_LIMIT_EXCEEDED: number; + const HTTP_STATUS_NOT_EXTENDED: number; + const HTTP_STATUS_NETWORK_AUTHENTICATION_REQUIRED: number; + } + /** + * This symbol can be set as a property on the HTTP/2 headers object with + * an array value in order to provide a list of headers considered sensitive. + */ + export const sensitiveHeaders: symbol; + /** + * Returns an object containing the default settings for an `Http2Session` instance. This method returns a new object instance every time it is called + * so instances returned may be safely modified for use. + * @since v8.4.0 + */ + export function getDefaultSettings(): Settings; + /** + * Returns a `Buffer` instance containing serialized representation of the given + * HTTP/2 settings as specified in the [HTTP/2](https://tools.ietf.org/html/rfc7540) specification. This is intended + * for use with the `HTTP2-Settings` header field. + * + * ```js + * import http2 from 'node:http2'; + * + * const packed = http2.getPackedSettings({ enablePush: false }); + * + * console.log(packed.toString('base64')); + * // Prints: AAIAAAAA + * ``` + * @since v8.4.0 + */ + export function getPackedSettings(settings: Settings): Buffer; + /** + * Returns a `HTTP/2 Settings Object` containing the deserialized settings from + * the given `Buffer` as generated by `http2.getPackedSettings()`. + * @since v8.4.0 + * @param buf The packed settings. + */ + export function getUnpackedSettings(buf: Uint8Array): Settings; + /** + * Returns a `net.Server` instance that creates and manages `Http2Session` instances. + * + * Since there are no browsers known that support [unencrypted HTTP/2](https://http2.github.io/faq/#does-http2-require-encryption), the use of {@link createSecureServer} is necessary when + * communicating + * with browser clients. + * + * ```js + * import http2 from 'node:http2'; + * + * // Create an unencrypted HTTP/2 server. + * // Since there are no browsers known that support + * // unencrypted HTTP/2, the use of `http2.createSecureServer()` + * // is necessary when communicating with browser clients. + * const server = http2.createServer(); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200, + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(8000); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createServer( + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2Server; + export function createServer< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + >( + options: ServerOptions, + onRequestHandler?: (request: InstanceType, response: InstanceType) => void, + ): Http2Server; + /** + * Returns a `tls.Server` instance that creates and manages `Http2Session` instances. + * + * ```js + * import http2 from 'node:http2'; + * import fs from 'node:fs'; + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * }; + * + * // Create a secure HTTP/2 server + * const server = http2.createSecureServer(options); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200, + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(8443); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createSecureServer( + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2SecureServer; + export function createSecureServer< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + >( + options: SecureServerOptions, + onRequestHandler?: (request: InstanceType, response: InstanceType) => void, + ): Http2SecureServer; + /** + * Returns a `ClientHttp2Session` instance. + * + * ```js + * import http2 from 'node:http2'; + * const client = http2.connect('https://localhost:1234'); + * + * // Use the client + * + * client.close(); + * ``` + * @since v8.4.0 + * @param authority The remote HTTP/2 server to connect to. This must be in the form of a minimal, valid URL with the `http://` or `https://` prefix, host name, and IP port (if a non-default port + * is used). Userinfo (user ID and password), path, querystring, and fragment details in the URL will be ignored. + * @param listener Will be registered as a one-time listener of the {@link 'connect'} event. + */ + export function connect( + authority: string | url.URL, + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): ClientHttp2Session; + export function connect( + authority: string | url.URL, + options?: ClientSessionOptions | SecureClientSessionOptions, + listener?: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): ClientHttp2Session; + /** + * Create an HTTP/2 server session from an existing socket. + * @param socket A Duplex Stream + * @param options Any `{@link createServer}` options can be provided. + * @since v20.12.0 + */ + export function performServerHandshake< + Http1Request extends typeof IncomingMessage = typeof IncomingMessage, + Http1Response extends typeof ServerResponse> = typeof ServerResponse, + Http2Request extends typeof Http2ServerRequest = typeof Http2ServerRequest, + Http2Response extends typeof Http2ServerResponse> = typeof Http2ServerResponse, + >( + socket: stream.Duplex, + options?: ServerOptions, + ): ServerHttp2Session; +} +declare module "node:http2" { + export * from "http2"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/https.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/https.d.ts new file mode 100644 index 0000000..c17a316 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/https.d.ts @@ -0,0 +1,543 @@ +/** + * HTTPS is the HTTP protocol over TLS/SSL. In Node.js this is implemented as a + * separate module. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/https.js) + */ +declare module "https" { + import { Duplex } from "node:stream"; + import * as tls from "node:tls"; + import * as http from "node:http"; + import { URL } from "node:url"; + type ServerOptions< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse> = typeof http.ServerResponse, + > = tls.SecureContextOptions & tls.TlsOptions & http.ServerOptions; + type RequestOptions = + & http.RequestOptions + & tls.SecureContextOptions + & { + checkServerIdentity?: typeof tls.checkServerIdentity | undefined; + rejectUnauthorized?: boolean | undefined; // Defaults to true + servername?: string | undefined; // SNI TLS Extension + }; + interface AgentOptions extends http.AgentOptions, tls.ConnectionOptions { + maxCachedSessions?: number | undefined; + } + /** + * An `Agent` object for HTTPS similar to `http.Agent`. See {@link request} for more information. + * @since v0.4.5 + */ + class Agent extends http.Agent { + constructor(options?: AgentOptions); + options: AgentOptions; + } + interface Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse> = typeof http.ServerResponse, + > extends http.Server {} + /** + * See `http.Server` for more information. + * @since v0.3.4 + */ + class Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse> = typeof http.ServerResponse, + > extends tls.Server { + constructor(requestListener?: http.RequestListener); + constructor( + options: ServerOptions, + requestListener?: http.RequestListener, + ); + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + addListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + addListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + addListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Duplex) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "checkContinue", listener: http.RequestListener): this; + addListener(event: "checkExpectation", listener: http.RequestListener): this; + addListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + addListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + addListener(event: "request", listener: http.RequestListener): this; + addListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: "keylog", line: Buffer, tlsSocket: tls.TLSSocket): boolean; + emit( + event: "newSession", + sessionId: Buffer, + sessionData: Buffer, + callback: (err: Error, resp: Buffer) => void, + ): boolean; + emit( + event: "OCSPRequest", + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit(event: "resumeSession", sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void): boolean; + emit(event: "secureConnection", tlsSocket: tls.TLSSocket): boolean; + emit(event: "tlsClientError", err: Error, tlsSocket: tls.TLSSocket): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Duplex): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit( + event: "checkContinue", + req: InstanceType, + res: InstanceType, + ): boolean; + emit( + event: "checkExpectation", + req: InstanceType, + res: InstanceType, + ): boolean; + emit(event: "clientError", err: Error, socket: Duplex): boolean; + emit(event: "connect", req: InstanceType, socket: Duplex, head: Buffer): boolean; + emit( + event: "request", + req: InstanceType, + res: InstanceType, + ): boolean; + emit(event: "upgrade", req: InstanceType, socket: Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + on( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + on( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + on(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + on(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Duplex) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "checkContinue", listener: http.RequestListener): this; + on(event: "checkExpectation", listener: http.RequestListener): this; + on(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + on(event: "connect", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + on(event: "request", listener: http.RequestListener): this; + on(event: "upgrade", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + once( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + once( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + once(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + once(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Duplex) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "checkContinue", listener: http.RequestListener): this; + once(event: "checkExpectation", listener: http.RequestListener): this; + once(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + once(event: "connect", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: "request", listener: http.RequestListener): this; + once(event: "upgrade", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Duplex) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "checkContinue", listener: http.RequestListener): this; + prependListener(event: "checkExpectation", listener: http.RequestListener): this; + prependListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + prependListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependListener(event: "request", listener: http.RequestListener): this; + prependListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependOnceListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependOnceListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Duplex) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "checkContinue", listener: http.RequestListener): this; + prependOnceListener(event: "checkExpectation", listener: http.RequestListener): this; + prependOnceListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + prependOnceListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: "request", listener: http.RequestListener): this; + prependOnceListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + } + /** + * ```js + * // curl -k https://localhost:8000/ + * import https from 'node:https'; + * import fs from 'node:fs'; + * + * const options = { + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * + * Or + * + * ```js + * import https from 'node:https'; + * import fs from 'node:fs'; + * + * const options = { + * pfx: fs.readFileSync('test/fixtures/test_cert.pfx'), + * passphrase: 'sample', + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * @since v0.3.4 + * @param options Accepts `options` from `createServer`, `createSecureContext` and `createServer`. + * @param requestListener A listener to be added to the `'request'` event. + */ + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse> = typeof http.ServerResponse, + >(requestListener?: http.RequestListener): Server; + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse> = typeof http.ServerResponse, + >( + options: ServerOptions, + requestListener?: http.RequestListener, + ): Server; + /** + * Makes a request to a secure web server. + * + * The following additional `options` from `tls.connect()` are also accepted: `ca`, `cert`, `ciphers`, `clientCertEngine`, `crl`, `dhparam`, `ecdhCurve`, `honorCipherOrder`, `key`, `passphrase`, + * `pfx`, `rejectUnauthorized`, `secureOptions`, `secureProtocol`, `servername`, `sessionIdContext`, `highWaterMark`. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * `https.request()` returns an instance of the `http.ClientRequest` class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * import https from 'node:https'; + * + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * }; + * + * const req = https.request(options, (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(e); + * }); + * req.end(); + * ``` + * + * Example using options from `tls.connect()`: + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * }; + * options.agent = new https.Agent(options); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Alternatively, opt out of connection pooling by not using an `Agent`. + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * agent: false, + * }; + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('https://abc:xyz@example.com'); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example pinning on certificate fingerprint, or the public key (similar to`pin-sha256`): + * + * ```js + * import tls from 'node:tls'; + * import https from 'node:https'; + * import crypto from 'node:crypto'; + * + * function sha256(s) { + * return crypto.createHash('sha256').update(s).digest('base64'); + * } + * const options = { + * hostname: 'github.com', + * port: 443, + * path: '/', + * method: 'GET', + * checkServerIdentity: function(host, cert) { + * // Make sure the certificate is issued to the host we are connected to + * const err = tls.checkServerIdentity(host, cert); + * if (err) { + * return err; + * } + * + * // Pin the public key, similar to HPKP pin-sha256 pinning + * const pubkey256 = 'pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU='; + * if (sha256(cert.pubkey) !== pubkey256) { + * const msg = 'Certificate verification error: ' + + * `The public key of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // Pin the exact certificate, rather than the pub key + * const cert256 = '25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:' + + * 'D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16'; + * if (cert.fingerprint256 !== cert256) { + * const msg = 'Certificate verification error: ' + + * `The certificate of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // This loop is informational only. + * // Print the certificate and public key fingerprints of all certs in the + * // chain. Its common to pin the public key of the issuer on the public + * // internet, while pinning the public key of the service in sensitive + * // environments. + * do { + * console.log('Subject Common Name:', cert.subject.CN); + * console.log(' Certificate SHA256 fingerprint:', cert.fingerprint256); + * + * hash = crypto.createHash('sha256'); + * console.log(' Public key ping-sha256:', sha256(cert.pubkey)); + * + * lastprint256 = cert.fingerprint256; + * cert = cert.issuerCertificate; + * } while (cert.fingerprint256 !== lastprint256); + * + * }, + * }; + * + * options.agent = new https.Agent(options); + * const req = https.request(options, (res) => { + * console.log('All OK. Server matched our pinned cert or public key'); + * console.log('statusCode:', res.statusCode); + * // Print the HPKP values + * console.log('headers:', res.headers['public-key-pins']); + * + * res.on('data', (d) => {}); + * }); + * + * req.on('error', (e) => { + * console.error(e.message); + * }); + * req.end(); + * ``` + * + * Outputs for example: + * + * ```text + * Subject Common Name: github.com + * Certificate SHA256 fingerprint: 25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16 + * Public key ping-sha256: pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU= + * Subject Common Name: DigiCert SHA2 Extended Validation Server CA + * Certificate SHA256 fingerprint: 40:3E:06:2A:26:53:05:91:13:28:5B:AF:80:A0:D4:AE:42:2C:84:8C:9F:78:FA:D0:1F:C9:4B:C5:B8:7F:EF:1A + * Public key ping-sha256: RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho= + * Subject Common Name: DigiCert High Assurance EV Root CA + * Certificate SHA256 fingerprint: 74:31:E5:F4:C3:C1:CE:46:90:77:4F:0B:61:E0:54:40:88:3B:A9:A0:1E:D0:0B:A6:AB:D7:80:6E:D3:B1:18:CF + * Public key ping-sha256: WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18= + * All OK. Server matched our pinned cert or public key + * statusCode: 200 + * headers: max-age=0; pin-sha256="WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18="; pin-sha256="RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho="; + * pin-sha256="k2v657xBsOVe1PQRwOsHsw3bsGT2VzIqz5K+59sNQws="; pin-sha256="K87oWBWM9UZfyddvDfoxL+8lpNyoUB2ptGtn0fv6G2Q="; pin-sha256="IQBnNBEiFuhj+8x6X8XLgh01V9Ic5/V3IRQLNFFc7v4="; + * pin-sha256="iie1VXtL7HzAMF+/PVPR9xzT80kQxdZeJ+zduCB3uj0="; pin-sha256="LvRiGEjRqfzurezaWuj8Wie2gyHMrW5Q06LspMnox7A="; includeSubDomains + * ``` + * @since v0.3.6 + * @param options Accepts all `options` from `request`, with some differences in default values: + */ + function request( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + /** + * Like `http.get()` but for HTTPS. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * ```js + * import https from 'node:https'; + * + * https.get('https://encrypted.google.com/', (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * + * }).on('error', (e) => { + * console.error(e); + * }); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the `method` always set to `GET`. + */ + function get( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function get( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + let globalAgent: Agent; +} +declare module "node:https" { + export * from "https"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/index.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/index.d.ts new file mode 100644 index 0000000..e99dc83 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/index.d.ts @@ -0,0 +1,92 @@ +/** + * License for programmatically and manually incorporated + * documentation aka. `JSDoc` from https://github.com/nodejs/node/tree/master/doc + * + * Copyright Node.js contributors. All rights reserved. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ + +// NOTE: These definitions support Node.js and TypeScript 5.7+. + +// Reference required TypeScript libs: +/// + +// TypeScript backwards-compatibility definitions: +/// + +// Definitions specific to TypeScript 5.7+: +/// +/// + +// Definitions for Node.js modules that are not specific to any version of TypeScript: +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/inspector.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/inspector.d.ts new file mode 100644 index 0000000..32d9ba4 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/inspector.d.ts @@ -0,0 +1,3966 @@ +// These definitions are automatically generated by the generate-inspector script. +// Do not edit this file directly. +// See scripts/generate-inspector/README.md for information on how to update the protocol definitions. +// Changes to the module itself should be added to the generator template (scripts/generate-inspector/inspector.d.ts.template). + +/** + * The `node:inspector` module provides an API for interacting with the V8 + * inspector. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/inspector.js) + */ +declare module 'inspector' { + import EventEmitter = require('node:events'); + + interface InspectorNotification { + method: string; + params: T; + } + + namespace Schema { + /** + * Description of the protocol domain. + */ + interface Domain { + /** + * Domain name. + */ + name: string; + /** + * Domain version. + */ + version: string; + } + interface GetDomainsReturnType { + /** + * List of supported domains. + */ + domains: Domain[]; + } + } + namespace Runtime { + /** + * Unique script identifier. + */ + type ScriptId = string; + /** + * Unique object identifier. + */ + type RemoteObjectId = string; + /** + * Primitive value which cannot be JSON-stringified. + */ + type UnserializableValue = string; + /** + * Mirror object referencing original JavaScript object. + */ + interface RemoteObject { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * Object class (constructor) name. Specified for object type values only. + */ + className?: string | undefined; + /** + * Remote object value in case of primitive values or JSON values (if it was requested). + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified does not have value, but gets this property. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * Unique object identifier (for non-primitive values). + */ + objectId?: RemoteObjectId | undefined; + /** + * Preview containing abbreviated property values. Specified for object type values only. + * @experimental + */ + preview?: ObjectPreview | undefined; + /** + * @experimental + */ + customPreview?: CustomPreview | undefined; + } + /** + * @experimental + */ + interface CustomPreview { + header: string; + hasBody: boolean; + formatterObjectId: RemoteObjectId; + bindRemoteObjectFunctionId: RemoteObjectId; + configObjectId?: RemoteObjectId | undefined; + } + /** + * Object containing abbreviated remote object value. + * @experimental + */ + interface ObjectPreview { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * True iff some of the properties or entries of the original object did not fit. + */ + overflow: boolean; + /** + * List of the properties. + */ + properties: PropertyPreview[]; + /** + * List of the entries. Specified for map and set subtype values only. + */ + entries?: EntryPreview[] | undefined; + } + /** + * @experimental + */ + interface PropertyPreview { + /** + * Property name. + */ + name: string; + /** + * Object type. Accessor means that the property itself is an accessor property. + */ + type: string; + /** + * User-friendly property value string. + */ + value?: string | undefined; + /** + * Nested value preview. + */ + valuePreview?: ObjectPreview | undefined; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + } + /** + * @experimental + */ + interface EntryPreview { + /** + * Preview of the key. Specified for map-like collection entries. + */ + key?: ObjectPreview | undefined; + /** + * Preview of the value. + */ + value: ObjectPreview; + } + /** + * Object property descriptor. + */ + interface PropertyDescriptor { + /** + * Property name or symbol description. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + /** + * True if the value associated with the property may be changed (data descriptors only). + */ + writable?: boolean | undefined; + /** + * A function which serves as a getter for the property, or undefined if there is no getter (accessor descriptors only). + */ + get?: RemoteObject | undefined; + /** + * A function which serves as a setter for the property, or undefined if there is no setter (accessor descriptors only). + */ + set?: RemoteObject | undefined; + /** + * True if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. + */ + configurable: boolean; + /** + * True if this property shows up during enumeration of the properties on the corresponding object. + */ + enumerable: boolean; + /** + * True if the result was thrown during the evaluation. + */ + wasThrown?: boolean | undefined; + /** + * True if the property is owned for the object. + */ + isOwn?: boolean | undefined; + /** + * Property symbol object, if the property is of the symbol type. + */ + symbol?: RemoteObject | undefined; + } + /** + * Object internal property descriptor. This property isn't normally visible in JavaScript code. + */ + interface InternalPropertyDescriptor { + /** + * Conventional property name. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + } + /** + * Represents function call argument. Either remote object id objectId, primitive value, unserializable primitive value or neither of (for undefined) them should be specified. + */ + interface CallArgument { + /** + * Primitive value or serializable javascript object. + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * Remote object handle. + */ + objectId?: RemoteObjectId | undefined; + } + /** + * Id of an execution context. + */ + type ExecutionContextId = number; + /** + * Description of an isolated world. + */ + interface ExecutionContextDescription { + /** + * Unique id of the execution context. It can be used to specify in which execution context script evaluation should be performed. + */ + id: ExecutionContextId; + /** + * Execution context origin. + */ + origin: string; + /** + * Human readable name describing given context. + */ + name: string; + /** + * Embedder-specific auxiliary data. + */ + auxData?: {} | undefined; + } + /** + * Detailed information about exception (or error) that was thrown during script compilation or execution. + */ + interface ExceptionDetails { + /** + * Exception id. + */ + exceptionId: number; + /** + * Exception text, which should be used together with exception object when available. + */ + text: string; + /** + * Line number of the exception location (0-based). + */ + lineNumber: number; + /** + * Column number of the exception location (0-based). + */ + columnNumber: number; + /** + * Script ID of the exception location. + */ + scriptId?: ScriptId | undefined; + /** + * URL of the exception location, to be used when the script was not reported. + */ + url?: string | undefined; + /** + * JavaScript stack trace if available. + */ + stackTrace?: StackTrace | undefined; + /** + * Exception object if available. + */ + exception?: RemoteObject | undefined; + /** + * Identifier of the context where exception happened. + */ + executionContextId?: ExecutionContextId | undefined; + } + /** + * Number of milliseconds since epoch. + */ + type Timestamp = number; + /** + * Stack entry for runtime errors and assertions. + */ + interface CallFrame { + /** + * JavaScript function name. + */ + functionName: string; + /** + * JavaScript script id. + */ + scriptId: ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * JavaScript script line number (0-based). + */ + lineNumber: number; + /** + * JavaScript script column number (0-based). + */ + columnNumber: number; + } + /** + * Call frames for assertions or error messages. + */ + interface StackTrace { + /** + * String label of this stack trace. For async traces this may be a name of the function that initiated the async call. + */ + description?: string | undefined; + /** + * JavaScript function name. + */ + callFrames: CallFrame[]; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + */ + parent?: StackTrace | undefined; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + * @experimental + */ + parentId?: StackTraceId | undefined; + } + /** + * Unique identifier of current debugger. + * @experimental + */ + type UniqueDebuggerId = string; + /** + * If debuggerId is set stack trace comes from another debugger and can be resolved there. This allows to track cross-debugger calls. See Runtime.StackTrace and Debugger.paused for usages. + * @experimental + */ + interface StackTraceId { + id: string; + debuggerId?: UniqueDebuggerId | undefined; + } + interface EvaluateParameterType { + /** + * Expression to evaluate. + */ + expression: string; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + contextId?: ExecutionContextId | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface AwaitPromiseParameterType { + /** + * Identifier of the promise. + */ + promiseObjectId: RemoteObjectId; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + } + interface CallFunctionOnParameterType { + /** + * Declaration of the function to call. + */ + functionDeclaration: string; + /** + * Identifier of the object to call function on. Either objectId or executionContextId should be specified. + */ + objectId?: RemoteObjectId | undefined; + /** + * Call arguments. All call arguments must belong to the same JavaScript world as the target object. + */ + arguments?: CallArgument[] | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + /** + * Specifies execution context which global object will be used to call function on. Either executionContextId or objectId should be specified. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object. + */ + objectGroup?: string | undefined; + } + interface GetPropertiesParameterType { + /** + * Identifier of the object to return properties for. + */ + objectId: RemoteObjectId; + /** + * If true, returns properties belonging only to the element itself, not to its prototype chain. + */ + ownProperties?: boolean | undefined; + /** + * If true, returns accessor properties (with getter/setter) only; internal properties are not returned either. + * @experimental + */ + accessorPropertiesOnly?: boolean | undefined; + /** + * Whether preview should be generated for the results. + * @experimental + */ + generatePreview?: boolean | undefined; + } + interface ReleaseObjectParameterType { + /** + * Identifier of the object to release. + */ + objectId: RemoteObjectId; + } + interface ReleaseObjectGroupParameterType { + /** + * Symbolic object group name. + */ + objectGroup: string; + } + interface SetCustomObjectFormatterEnabledParameterType { + enabled: boolean; + } + interface CompileScriptParameterType { + /** + * Expression to compile. + */ + expression: string; + /** + * Source url to be set for the script. + */ + sourceURL: string; + /** + * Specifies whether the compiled script should be persisted. + */ + persistScript: boolean; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface RunScriptParameterType { + /** + * Id of the script to run. + */ + scriptId: ScriptId; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface QueryObjectsParameterType { + /** + * Identifier of the prototype to return objects for. + */ + prototypeObjectId: RemoteObjectId; + } + interface GlobalLexicalScopeNamesParameterType { + /** + * Specifies in which execution context to lookup global scope variables. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface EvaluateReturnType { + /** + * Evaluation result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface AwaitPromiseReturnType { + /** + * Promise result. Will contain rejected value if promise was rejected. + */ + result: RemoteObject; + /** + * Exception details if stack strace is available. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CallFunctionOnReturnType { + /** + * Call result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface GetPropertiesReturnType { + /** + * Object properties. + */ + result: PropertyDescriptor[]; + /** + * Internal object properties (only of the element itself). + */ + internalProperties?: InternalPropertyDescriptor[] | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CompileScriptReturnType { + /** + * Id of the script. + */ + scriptId?: ScriptId | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface RunScriptReturnType { + /** + * Run result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface QueryObjectsReturnType { + /** + * Array with objects. + */ + objects: RemoteObject; + } + interface GlobalLexicalScopeNamesReturnType { + names: string[]; + } + interface ExecutionContextCreatedEventDataType { + /** + * A newly created execution context. + */ + context: ExecutionContextDescription; + } + interface ExecutionContextDestroyedEventDataType { + /** + * Id of the destroyed context + */ + executionContextId: ExecutionContextId; + } + interface ExceptionThrownEventDataType { + /** + * Timestamp of the exception. + */ + timestamp: Timestamp; + exceptionDetails: ExceptionDetails; + } + interface ExceptionRevokedEventDataType { + /** + * Reason describing why exception was revoked. + */ + reason: string; + /** + * The id of revoked exception, as reported in exceptionThrown. + */ + exceptionId: number; + } + interface ConsoleAPICalledEventDataType { + /** + * Type of the call. + */ + type: string; + /** + * Call arguments. + */ + args: RemoteObject[]; + /** + * Identifier of the context where the call was made. + */ + executionContextId: ExecutionContextId; + /** + * Call timestamp. + */ + timestamp: Timestamp; + /** + * Stack trace captured when the call was made. + */ + stackTrace?: StackTrace | undefined; + /** + * Console context descriptor for calls on non-default console context (not console.*): 'anonymous#unique-logger-id' for call on unnamed context, 'name#unique-logger-id' for call on named context. + * @experimental + */ + context?: string | undefined; + } + interface InspectRequestedEventDataType { + object: RemoteObject; + hints: {}; + } + } + namespace Debugger { + /** + * Breakpoint identifier. + */ + type BreakpointId = string; + /** + * Call frame identifier. + */ + type CallFrameId = string; + /** + * Location in the source code. + */ + interface Location { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + } + /** + * Location in the source code. + * @experimental + */ + interface ScriptPosition { + lineNumber: number; + columnNumber: number; + } + /** + * JavaScript call frame. Array of call frames form the call stack. + */ + interface CallFrame { + /** + * Call frame identifier. This identifier is only valid while the virtual machine is paused. + */ + callFrameId: CallFrameId; + /** + * Name of the JavaScript function called on this call frame. + */ + functionName: string; + /** + * Location in the source code. + */ + functionLocation?: Location | undefined; + /** + * Location in the source code. + */ + location: Location; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Scope chain for this call frame. + */ + scopeChain: Scope[]; + /** + * this object for this call frame. + */ + this: Runtime.RemoteObject; + /** + * The value being returned, if the function is at return point. + */ + returnValue?: Runtime.RemoteObject | undefined; + } + /** + * Scope description. + */ + interface Scope { + /** + * Scope type. + */ + type: string; + /** + * Object representing the scope. For global and with scopes it represents the actual object; for the rest of the scopes, it is artificial transient object enumerating scope variables as its properties. + */ + object: Runtime.RemoteObject; + name?: string | undefined; + /** + * Location in the source code where scope starts + */ + startLocation?: Location | undefined; + /** + * Location in the source code where scope ends + */ + endLocation?: Location | undefined; + } + /** + * Search match for resource. + */ + interface SearchMatch { + /** + * Line number in resource content. + */ + lineNumber: number; + /** + * Line with match content. + */ + lineContent: string; + } + interface BreakLocation { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + type?: string | undefined; + } + interface SetBreakpointsActiveParameterType { + /** + * New value for breakpoints active state. + */ + active: boolean; + } + interface SetSkipAllPausesParameterType { + /** + * New value for skip pauses state. + */ + skip: boolean; + } + interface SetBreakpointByUrlParameterType { + /** + * Line number to set breakpoint at. + */ + lineNumber: number; + /** + * URL of the resources to set breakpoint on. + */ + url?: string | undefined; + /** + * Regex pattern for the URLs of the resources to set breakpoints on. Either url or urlRegex must be specified. + */ + urlRegex?: string | undefined; + /** + * Script hash of the resources to set breakpoint on. + */ + scriptHash?: string | undefined; + /** + * Offset in the line to set breakpoint at. + */ + columnNumber?: number | undefined; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface SetBreakpointParameterType { + /** + * Location to set breakpoint in. + */ + location: Location; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface RemoveBreakpointParameterType { + breakpointId: BreakpointId; + } + interface GetPossibleBreakpointsParameterType { + /** + * Start of range to search possible breakpoint locations in. + */ + start: Location; + /** + * End of range to search possible breakpoint locations in (excluding). When not specified, end of scripts is used as end of range. + */ + end?: Location | undefined; + /** + * Only consider locations which are in the same (non-nested) function as start. + */ + restrictToFunction?: boolean | undefined; + } + interface ContinueToLocationParameterType { + /** + * Location to continue to. + */ + location: Location; + targetCallFrames?: string | undefined; + } + interface PauseOnAsyncCallParameterType { + /** + * Debugger will pause when async call with given stack trace is started. + */ + parentStackTraceId: Runtime.StackTraceId; + } + interface StepIntoParameterType { + /** + * Debugger will issue additional Debugger.paused notification if any async task is scheduled before next pause. + * @experimental + */ + breakOnAsyncCall?: boolean | undefined; + } + interface GetStackTraceParameterType { + stackTraceId: Runtime.StackTraceId; + } + interface SearchInContentParameterType { + /** + * Id of the script to search in. + */ + scriptId: Runtime.ScriptId; + /** + * String to search for. + */ + query: string; + /** + * If true, search is case sensitive. + */ + caseSensitive?: boolean | undefined; + /** + * If true, treats string parameter as regex. + */ + isRegex?: boolean | undefined; + } + interface SetScriptSourceParameterType { + /** + * Id of the script to edit. + */ + scriptId: Runtime.ScriptId; + /** + * New content of the script. + */ + scriptSource: string; + /** + * If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code. + */ + dryRun?: boolean | undefined; + } + interface RestartFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + } + interface GetScriptSourceParameterType { + /** + * Id of the script to get source for. + */ + scriptId: Runtime.ScriptId; + } + interface SetPauseOnExceptionsParameterType { + /** + * Pause on exceptions mode. + */ + state: string; + } + interface EvaluateOnCallFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + /** + * Expression to evaluate. + */ + expression: string; + /** + * String object group name to put result into (allows rapid releasing resulting object handles using releaseObjectGroup). + */ + objectGroup?: string | undefined; + /** + * Specifies whether command line API should be available to the evaluated expression, defaults to false. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether to throw an exception if side effect cannot be ruled out during evaluation. + */ + throwOnSideEffect?: boolean | undefined; + } + interface SetVariableValueParameterType { + /** + * 0-based number of scope as was listed in scope chain. Only 'local', 'closure' and 'catch' scope types are allowed. Other scopes could be manipulated manually. + */ + scopeNumber: number; + /** + * Variable name. + */ + variableName: string; + /** + * New variable value. + */ + newValue: Runtime.CallArgument; + /** + * Id of callframe that holds variable. + */ + callFrameId: CallFrameId; + } + interface SetReturnValueParameterType { + /** + * New return value. + */ + newValue: Runtime.CallArgument; + } + interface SetAsyncCallStackDepthParameterType { + /** + * Maximum depth of async call stacks. Setting to 0 will effectively disable collecting async call stacks (default). + */ + maxDepth: number; + } + interface SetBlackboxPatternsParameterType { + /** + * Array of regexps that will be used to check script url for blackbox state. + */ + patterns: string[]; + } + interface SetBlackboxedRangesParameterType { + /** + * Id of the script. + */ + scriptId: Runtime.ScriptId; + positions: ScriptPosition[]; + } + interface EnableReturnType { + /** + * Unique identifier of the debugger. + * @experimental + */ + debuggerId: Runtime.UniqueDebuggerId; + } + interface SetBreakpointByUrlReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * List of the locations this breakpoint resolved into upon addition. + */ + locations: Location[]; + } + interface SetBreakpointReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * Location this breakpoint resolved into. + */ + actualLocation: Location; + } + interface GetPossibleBreakpointsReturnType { + /** + * List of the possible breakpoint locations. + */ + locations: BreakLocation[]; + } + interface GetStackTraceReturnType { + stackTrace: Runtime.StackTrace; + } + interface SearchInContentReturnType { + /** + * List of search matches. + */ + result: SearchMatch[]; + } + interface SetScriptSourceReturnType { + /** + * New stack trace in case editing has happened while VM was stopped. + */ + callFrames?: CallFrame[] | undefined; + /** + * Whether current call stack was modified after applying the changes. + */ + stackChanged?: boolean | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Exception details if any. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface RestartFrameReturnType { + /** + * New stack trace. + */ + callFrames: CallFrame[]; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + } + interface GetScriptSourceReturnType { + /** + * Script source. + */ + scriptSource: string; + } + interface EvaluateOnCallFrameReturnType { + /** + * Object wrapper for the evaluation result. + */ + result: Runtime.RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface ScriptParsedEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * True, if this script is generated as a result of the live edit operation. + * @experimental + */ + isLiveEdit?: boolean | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface ScriptFailedToParseEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface BreakpointResolvedEventDataType { + /** + * Breakpoint unique identifier. + */ + breakpointId: BreakpointId; + /** + * Actual breakpoint location. + */ + location: Location; + } + interface PausedEventDataType { + /** + * Call stack the virtual machine stopped on. + */ + callFrames: CallFrame[]; + /** + * Pause reason. + */ + reason: string; + /** + * Object containing break-specific auxiliary properties. + */ + data?: {} | undefined; + /** + * Hit breakpoints IDs + */ + hitBreakpoints?: string[] | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Just scheduled async call will have this stack trace as parent stack during async execution. This field is available only after Debugger.stepInto call with breakOnAsynCall flag. + * @experimental + */ + asyncCallStackTraceId?: Runtime.StackTraceId | undefined; + } + } + namespace Console { + /** + * Console message. + */ + interface ConsoleMessage { + /** + * Message source. + */ + source: string; + /** + * Message severity. + */ + level: string; + /** + * Message text. + */ + text: string; + /** + * URL of the message origin. + */ + url?: string | undefined; + /** + * Line number in the resource that generated this message (1-based). + */ + line?: number | undefined; + /** + * Column number in the resource that generated this message (1-based). + */ + column?: number | undefined; + } + interface MessageAddedEventDataType { + /** + * Console message that has been added. + */ + message: ConsoleMessage; + } + } + namespace Profiler { + /** + * Profile node. Holds callsite information, execution statistics and child nodes. + */ + interface ProfileNode { + /** + * Unique id of the node. + */ + id: number; + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Number of samples where this node was on top of the call stack. + */ + hitCount?: number | undefined; + /** + * Child node ids. + */ + children?: number[] | undefined; + /** + * The reason of being not optimized. The function may be deoptimized or marked as don't optimize. + */ + deoptReason?: string | undefined; + /** + * An array of source position ticks. + */ + positionTicks?: PositionTickInfo[] | undefined; + } + /** + * Profile. + */ + interface Profile { + /** + * The list of profile nodes. First item is the root node. + */ + nodes: ProfileNode[]; + /** + * Profiling start timestamp in microseconds. + */ + startTime: number; + /** + * Profiling end timestamp in microseconds. + */ + endTime: number; + /** + * Ids of samples top nodes. + */ + samples?: number[] | undefined; + /** + * Time intervals between adjacent samples in microseconds. The first delta is relative to the profile startTime. + */ + timeDeltas?: number[] | undefined; + } + /** + * Specifies a number of samples attributed to a certain source position. + */ + interface PositionTickInfo { + /** + * Source line number (1-based). + */ + line: number; + /** + * Number of samples attributed to the source line. + */ + ticks: number; + } + /** + * Coverage data for a source range. + */ + interface CoverageRange { + /** + * JavaScript script source offset for the range start. + */ + startOffset: number; + /** + * JavaScript script source offset for the range end. + */ + endOffset: number; + /** + * Collected execution count of the source range. + */ + count: number; + } + /** + * Coverage data for a JavaScript function. + */ + interface FunctionCoverage { + /** + * JavaScript function name. + */ + functionName: string; + /** + * Source ranges inside the function with coverage data. + */ + ranges: CoverageRange[]; + /** + * Whether coverage data for this function has block granularity. + */ + isBlockCoverage: boolean; + } + /** + * Coverage data for a JavaScript script. + */ + interface ScriptCoverage { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Functions contained in the script that has coverage data. + */ + functions: FunctionCoverage[]; + } + interface SetSamplingIntervalParameterType { + /** + * New sampling interval in microseconds. + */ + interval: number; + } + interface StartPreciseCoverageParameterType { + /** + * Collect accurate call counts beyond simple 'covered' or 'not covered'. + */ + callCount?: boolean | undefined; + /** + * Collect block-based coverage. + */ + detailed?: boolean | undefined; + } + interface StopReturnType { + /** + * Recorded profile. + */ + profile: Profile; + } + interface TakePreciseCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface GetBestEffortCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface ConsoleProfileStartedEventDataType { + id: string; + /** + * Location of console.profile(). + */ + location: Debugger.Location; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + interface ConsoleProfileFinishedEventDataType { + id: string; + /** + * Location of console.profileEnd(). + */ + location: Debugger.Location; + profile: Profile; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + } + namespace HeapProfiler { + /** + * Heap snapshot object id. + */ + type HeapSnapshotObjectId = string; + /** + * Sampling Heap Profile node. Holds callsite information, allocation statistics and child nodes. + */ + interface SamplingHeapProfileNode { + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Allocations size in bytes for the node excluding children. + */ + selfSize: number; + /** + * Child nodes. + */ + children: SamplingHeapProfileNode[]; + } + /** + * Profile. + */ + interface SamplingHeapProfile { + head: SamplingHeapProfileNode; + } + interface StartTrackingHeapObjectsParameterType { + trackAllocations?: boolean | undefined; + } + interface StopTrackingHeapObjectsParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped. + */ + reportProgress?: boolean | undefined; + } + interface TakeHeapSnapshotParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken. + */ + reportProgress?: boolean | undefined; + } + interface GetObjectByHeapObjectIdParameterType { + objectId: HeapSnapshotObjectId; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + } + interface AddInspectedHeapObjectParameterType { + /** + * Heap snapshot object id to be accessible by means of $x command line API. + */ + heapObjectId: HeapSnapshotObjectId; + } + interface GetHeapObjectIdParameterType { + /** + * Identifier of the object to get heap object id for. + */ + objectId: Runtime.RemoteObjectId; + } + interface StartSamplingParameterType { + /** + * Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes. + */ + samplingInterval?: number | undefined; + } + interface GetObjectByHeapObjectIdReturnType { + /** + * Evaluation result. + */ + result: Runtime.RemoteObject; + } + interface GetHeapObjectIdReturnType { + /** + * Id of the heap snapshot object corresponding to the passed remote object id. + */ + heapSnapshotObjectId: HeapSnapshotObjectId; + } + interface StopSamplingReturnType { + /** + * Recorded sampling heap profile. + */ + profile: SamplingHeapProfile; + } + interface GetSamplingProfileReturnType { + /** + * Return the sampling profile being collected. + */ + profile: SamplingHeapProfile; + } + interface AddHeapSnapshotChunkEventDataType { + chunk: string; + } + interface ReportHeapSnapshotProgressEventDataType { + done: number; + total: number; + finished?: boolean | undefined; + } + interface LastSeenObjectIdEventDataType { + lastSeenObjectId: number; + timestamp: number; + } + interface HeapStatsUpdateEventDataType { + /** + * An array of triplets. Each triplet describes a fragment. The first integer is the fragment index, the second integer is a total count of objects for the fragment, the third integer is a total size of the objects for the fragment. + */ + statsUpdate: number[]; + } + } + namespace NodeTracing { + interface TraceConfig { + /** + * Controls how the trace buffer stores data. + */ + recordMode?: string | undefined; + /** + * Included category filters. + */ + includedCategories: string[]; + } + interface StartParameterType { + traceConfig: TraceConfig; + } + interface GetCategoriesReturnType { + /** + * A list of supported tracing categories. + */ + categories: string[]; + } + interface DataCollectedEventDataType { + value: Array<{}>; + } + } + namespace NodeWorker { + type WorkerID = string; + /** + * Unique identifier of attached debugging session. + */ + type SessionID = string; + interface WorkerInfo { + workerId: WorkerID; + type: string; + title: string; + url: string; + } + interface SendMessageToWorkerParameterType { + message: string; + /** + * Identifier of the session. + */ + sessionId: SessionID; + } + interface EnableParameterType { + /** + * Whether to new workers should be paused until the frontend sends `Runtime.runIfWaitingForDebugger` + * message to run them. + */ + waitForDebuggerOnStart: boolean; + } + interface DetachParameterType { + sessionId: SessionID; + } + interface AttachedToWorkerEventDataType { + /** + * Identifier assigned to the session used to send/receive messages. + */ + sessionId: SessionID; + workerInfo: WorkerInfo; + waitingForDebugger: boolean; + } + interface DetachedFromWorkerEventDataType { + /** + * Detached session identifier. + */ + sessionId: SessionID; + } + interface ReceivedMessageFromWorkerEventDataType { + /** + * Identifier of a session which sends a message. + */ + sessionId: SessionID; + message: string; + } + } + namespace Network { + /** + * Resource type as it was perceived by the rendering engine. + */ + type ResourceType = string; + /** + * Unique request identifier. + */ + type RequestId = string; + /** + * UTC time in seconds, counted from January 1, 1970. + */ + type TimeSinceEpoch = number; + /** + * Monotonically increasing time in seconds since an arbitrary point in the past. + */ + type MonotonicTime = number; + /** + * HTTP request data. + */ + interface Request { + url: string; + method: string; + headers: Headers; + } + /** + * HTTP response data. + */ + interface Response { + url: string; + status: number; + statusText: string; + headers: Headers; + } + /** + * Request / response headers as keys / values of JSON object. + */ + interface Headers { + } + interface RequestWillBeSentEventDataType { + /** + * Request identifier. + */ + requestId: RequestId; + /** + * Request data. + */ + request: Request; + /** + * Timestamp. + */ + timestamp: MonotonicTime; + /** + * Timestamp. + */ + wallTime: TimeSinceEpoch; + } + interface ResponseReceivedEventDataType { + /** + * Request identifier. + */ + requestId: RequestId; + /** + * Timestamp. + */ + timestamp: MonotonicTime; + /** + * Resource type. + */ + type: ResourceType; + /** + * Response data. + */ + response: Response; + } + interface LoadingFailedEventDataType { + /** + * Request identifier. + */ + requestId: RequestId; + /** + * Timestamp. + */ + timestamp: MonotonicTime; + /** + * Resource type. + */ + type: ResourceType; + /** + * Error message. + */ + errorText: string; + } + interface LoadingFinishedEventDataType { + /** + * Request identifier. + */ + requestId: RequestId; + /** + * Timestamp. + */ + timestamp: MonotonicTime; + } + } + namespace NodeRuntime { + interface NotifyWhenWaitingForDisconnectParameterType { + enabled: boolean; + } + } + + /** + * The `inspector.Session` is used for dispatching messages to the V8 inspector + * back-end and receiving message responses and notifications. + */ + class Session extends EventEmitter { + /** + * Create a new instance of the inspector.Session class. + * The inspector session needs to be connected through `session.connect()` before the messages can be dispatched to the inspector backend. + */ + constructor(); + + /** + * Connects a session to the inspector back-end. + */ + connect(): void; + + /** + * Connects a session to the inspector back-end. + * An exception will be thrown if this API was not called on a Worker thread. + * @since v12.11.0 + */ + connectToMainThread(): void; + + /** + * Immediately close the session. All pending message callbacks will be called with an error. + * `session.connect()` will need to be called to be able to send messages again. + * Reconnected session will lose all inspector state, such as enabled agents or configured breakpoints. + */ + disconnect(): void; + + /** + * Posts a message to the inspector back-end. `callback` will be notified when + * a response is received. `callback` is a function that accepts two optional + * arguments: error and message-specific result. + * + * ```js + * session.post('Runtime.evaluate', { expression: '2 + 2' }, + * (error, { result }) => console.log(result)); + * // Output: { type: 'number', value: 4, description: '4' } + * ``` + * + * The latest version of the V8 inspector protocol is published on the + * [Chrome DevTools Protocol Viewer](https://chromedevtools.github.io/devtools-protocol/v8/). + * + * Node.js inspector supports all the Chrome DevTools Protocol domains declared + * by V8. Chrome DevTools Protocol domain provides an interface for interacting + * with one of the runtime agents used to inspect the application state and listen + * to the run-time events. + */ + post(method: string, callback?: (err: Error | null, params?: object) => void): void; + post(method: string, params?: object, callback?: (err: Error | null, params?: object) => void): void; + /** + * Returns supported domains. + */ + post(method: 'Schema.getDomains', callback?: (err: Error | null, params: Schema.GetDomainsReturnType) => void): void; + /** + * Evaluates expression on global object. + */ + post(method: 'Runtime.evaluate', params?: Runtime.EvaluateParameterType, callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + post(method: 'Runtime.evaluate', callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + /** + * Add handler to promise with given promise object id. + */ + post(method: 'Runtime.awaitPromise', params?: Runtime.AwaitPromiseParameterType, callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + post(method: 'Runtime.awaitPromise', callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + /** + * Calls function with given declaration on the given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.callFunctionOn', params?: Runtime.CallFunctionOnParameterType, callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + post(method: 'Runtime.callFunctionOn', callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + /** + * Returns properties of a given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.getProperties', params?: Runtime.GetPropertiesParameterType, callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + post(method: 'Runtime.getProperties', callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + /** + * Releases remote object with given id. + */ + post(method: 'Runtime.releaseObject', params?: Runtime.ReleaseObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObject', callback?: (err: Error | null) => void): void; + /** + * Releases all remote objects that belong to a given group. + */ + post(method: 'Runtime.releaseObjectGroup', params?: Runtime.ReleaseObjectGroupParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObjectGroup', callback?: (err: Error | null) => void): void; + /** + * Tells inspected instance to run if it was waiting for debugger to attach. + */ + post(method: 'Runtime.runIfWaitingForDebugger', callback?: (err: Error | null) => void): void; + /** + * Enables reporting of execution contexts creation by means of executionContextCreated event. When the reporting gets enabled the event will be sent immediately for each existing execution context. + */ + post(method: 'Runtime.enable', callback?: (err: Error | null) => void): void; + /** + * Disables reporting of execution contexts creation. + */ + post(method: 'Runtime.disable', callback?: (err: Error | null) => void): void; + /** + * Discards collected exceptions and console API calls. + */ + post(method: 'Runtime.discardConsoleEntries', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Runtime.setCustomObjectFormatterEnabled', params?: Runtime.SetCustomObjectFormatterEnabledParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.setCustomObjectFormatterEnabled', callback?: (err: Error | null) => void): void; + /** + * Compiles expression. + */ + post(method: 'Runtime.compileScript', params?: Runtime.CompileScriptParameterType, callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + post(method: 'Runtime.compileScript', callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + /** + * Runs script with given id in a given context. + */ + post(method: 'Runtime.runScript', params?: Runtime.RunScriptParameterType, callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.runScript', callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.queryObjects', params?: Runtime.QueryObjectsParameterType, callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + post(method: 'Runtime.queryObjects', callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + /** + * Returns all let, const and class variables from global scope. + */ + post( + method: 'Runtime.globalLexicalScopeNames', + params?: Runtime.GlobalLexicalScopeNamesParameterType, + callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void + ): void; + post(method: 'Runtime.globalLexicalScopeNames', callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void): void; + /** + * Enables debugger for the given page. Clients should not assume that the debugging has been enabled until the result for this command is received. + */ + post(method: 'Debugger.enable', callback?: (err: Error | null, params: Debugger.EnableReturnType) => void): void; + /** + * Disables debugger for given page. + */ + post(method: 'Debugger.disable', callback?: (err: Error | null) => void): void; + /** + * Activates / deactivates all breakpoints on the page. + */ + post(method: 'Debugger.setBreakpointsActive', params?: Debugger.SetBreakpointsActiveParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBreakpointsActive', callback?: (err: Error | null) => void): void; + /** + * Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc). + */ + post(method: 'Debugger.setSkipAllPauses', params?: Debugger.SetSkipAllPausesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setSkipAllPauses', callback?: (err: Error | null) => void): void; + /** + * Sets JavaScript breakpoint at given location specified either by URL or URL regex. Once this command is issued, all existing parsed scripts will have breakpoints resolved and returned in locations property. Further matching script parsing will result in subsequent breakpointResolved events issued. This logical breakpoint will survive page reloads. + */ + post(method: 'Debugger.setBreakpointByUrl', params?: Debugger.SetBreakpointByUrlParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + post(method: 'Debugger.setBreakpointByUrl', callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + /** + * Sets JavaScript breakpoint at a given location. + */ + post(method: 'Debugger.setBreakpoint', params?: Debugger.SetBreakpointParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + post(method: 'Debugger.setBreakpoint', callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + /** + * Removes JavaScript breakpoint. + */ + post(method: 'Debugger.removeBreakpoint', params?: Debugger.RemoveBreakpointParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.removeBreakpoint', callback?: (err: Error | null) => void): void; + /** + * Returns possible locations for breakpoint. scriptId in start and end range locations should be the same. + */ + post( + method: 'Debugger.getPossibleBreakpoints', + params?: Debugger.GetPossibleBreakpointsParameterType, + callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void + ): void; + post(method: 'Debugger.getPossibleBreakpoints', callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void): void; + /** + * Continues execution until specific location is reached. + */ + post(method: 'Debugger.continueToLocation', params?: Debugger.ContinueToLocationParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.continueToLocation', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Debugger.pauseOnAsyncCall', params?: Debugger.PauseOnAsyncCallParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.pauseOnAsyncCall', callback?: (err: Error | null) => void): void; + /** + * Steps over the statement. + */ + post(method: 'Debugger.stepOver', callback?: (err: Error | null) => void): void; + /** + * Steps into the function call. + */ + post(method: 'Debugger.stepInto', params?: Debugger.StepIntoParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.stepInto', callback?: (err: Error | null) => void): void; + /** + * Steps out of the function call. + */ + post(method: 'Debugger.stepOut', callback?: (err: Error | null) => void): void; + /** + * Stops on the next JavaScript statement. + */ + post(method: 'Debugger.pause', callback?: (err: Error | null) => void): void; + /** + * This method is deprecated - use Debugger.stepInto with breakOnAsyncCall and Debugger.pauseOnAsyncTask instead. Steps into next scheduled async task if any is scheduled before next pause. Returns success when async task is actually scheduled, returns error if no task were scheduled or another scheduleStepIntoAsync was called. + * @experimental + */ + post(method: 'Debugger.scheduleStepIntoAsync', callback?: (err: Error | null) => void): void; + /** + * Resumes JavaScript execution. + */ + post(method: 'Debugger.resume', callback?: (err: Error | null) => void): void; + /** + * Returns stack trace with given stackTraceId. + * @experimental + */ + post(method: 'Debugger.getStackTrace', params?: Debugger.GetStackTraceParameterType, callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + post(method: 'Debugger.getStackTrace', callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + /** + * Searches for given string in script content. + */ + post(method: 'Debugger.searchInContent', params?: Debugger.SearchInContentParameterType, callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + post(method: 'Debugger.searchInContent', callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + /** + * Edits JavaScript source live. + */ + post(method: 'Debugger.setScriptSource', params?: Debugger.SetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + post(method: 'Debugger.setScriptSource', callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + /** + * Restarts particular call frame from the beginning. + */ + post(method: 'Debugger.restartFrame', params?: Debugger.RestartFrameParameterType, callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + post(method: 'Debugger.restartFrame', callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + /** + * Returns source for the script with given id. + */ + post(method: 'Debugger.getScriptSource', params?: Debugger.GetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + post(method: 'Debugger.getScriptSource', callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + /** + * Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or no exceptions. Initial pause on exceptions state is none. + */ + post(method: 'Debugger.setPauseOnExceptions', params?: Debugger.SetPauseOnExceptionsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setPauseOnExceptions', callback?: (err: Error | null) => void): void; + /** + * Evaluates expression on a given call frame. + */ + post(method: 'Debugger.evaluateOnCallFrame', params?: Debugger.EvaluateOnCallFrameParameterType, callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + post(method: 'Debugger.evaluateOnCallFrame', callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + /** + * Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually. + */ + post(method: 'Debugger.setVariableValue', params?: Debugger.SetVariableValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setVariableValue', callback?: (err: Error | null) => void): void; + /** + * Changes return value in top frame. Available only at return break position. + * @experimental + */ + post(method: 'Debugger.setReturnValue', params?: Debugger.SetReturnValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setReturnValue', callback?: (err: Error | null) => void): void; + /** + * Enables or disables async call stacks tracking. + */ + post(method: 'Debugger.setAsyncCallStackDepth', params?: Debugger.SetAsyncCallStackDepthParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setAsyncCallStackDepth', callback?: (err: Error | null) => void): void; + /** + * Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. + * @experimental + */ + post(method: 'Debugger.setBlackboxPatterns', params?: Debugger.SetBlackboxPatternsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxPatterns', callback?: (err: Error | null) => void): void; + /** + * Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted. + * @experimental + */ + post(method: 'Debugger.setBlackboxedRanges', params?: Debugger.SetBlackboxedRangesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxedRanges', callback?: (err: Error | null) => void): void; + /** + * Enables console domain, sends the messages collected so far to the client by means of the messageAdded notification. + */ + post(method: 'Console.enable', callback?: (err: Error | null) => void): void; + /** + * Disables console domain, prevents further console messages from being reported to the client. + */ + post(method: 'Console.disable', callback?: (err: Error | null) => void): void; + /** + * Does nothing. + */ + post(method: 'Console.clearMessages', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.disable', callback?: (err: Error | null) => void): void; + /** + * Changes CPU profiler sampling interval. Must be called before CPU profiles recording started. + */ + post(method: 'Profiler.setSamplingInterval', params?: Profiler.SetSamplingIntervalParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.setSamplingInterval', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.start', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.stop', callback?: (err: Error | null, params: Profiler.StopReturnType) => void): void; + /** + * Enable precise code coverage. Coverage data for JavaScript executed before enabling precise code coverage may be incomplete. Enabling prevents running optimized code and resets execution counters. + */ + post(method: 'Profiler.startPreciseCoverage', params?: Profiler.StartPreciseCoverageParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.startPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Disable precise code coverage. Disabling releases unnecessary execution count records and allows executing optimized code. + */ + post(method: 'Profiler.stopPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Collect coverage data for the current isolate, and resets execution counters. Precise code coverage needs to have started. + */ + post(method: 'Profiler.takePreciseCoverage', callback?: (err: Error | null, params: Profiler.TakePreciseCoverageReturnType) => void): void; + /** + * Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection. + */ + post(method: 'Profiler.getBestEffortCoverage', callback?: (err: Error | null, params: Profiler.GetBestEffortCoverageReturnType) => void): void; + post(method: 'HeapProfiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.disable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', params?: HeapProfiler.StartTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', params?: HeapProfiler.StopTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', params?: HeapProfiler.TakeHeapSnapshotParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.collectGarbage', callback?: (err: Error | null) => void): void; + post( + method: 'HeapProfiler.getObjectByHeapObjectId', + params?: HeapProfiler.GetObjectByHeapObjectIdParameterType, + callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void + ): void; + post(method: 'HeapProfiler.getObjectByHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void): void; + /** + * Enables console to refer to the node with given id via $x (see Command Line API for more details $x functions). + */ + post(method: 'HeapProfiler.addInspectedHeapObject', params?: HeapProfiler.AddInspectedHeapObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.addInspectedHeapObject', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', params?: HeapProfiler.GetHeapObjectIdParameterType, callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.startSampling', params?: HeapProfiler.StartSamplingParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startSampling', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopSampling', callback?: (err: Error | null, params: HeapProfiler.StopSamplingReturnType) => void): void; + post(method: 'HeapProfiler.getSamplingProfile', callback?: (err: Error | null, params: HeapProfiler.GetSamplingProfileReturnType) => void): void; + /** + * Gets supported tracing categories. + */ + post(method: 'NodeTracing.getCategories', callback?: (err: Error | null, params: NodeTracing.GetCategoriesReturnType) => void): void; + /** + * Start trace events collection. + */ + post(method: 'NodeTracing.start', params?: NodeTracing.StartParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeTracing.start', callback?: (err: Error | null) => void): void; + /** + * Stop trace events collection. Remaining collected events will be sent as a sequence of + * dataCollected events followed by tracingComplete event. + */ + post(method: 'NodeTracing.stop', callback?: (err: Error | null) => void): void; + /** + * Sends protocol message over session with given id. + */ + post(method: 'NodeWorker.sendMessageToWorker', params?: NodeWorker.SendMessageToWorkerParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.sendMessageToWorker', callback?: (err: Error | null) => void): void; + /** + * Instructs the inspector to attach to running workers. Will also attach to new workers + * as they start + */ + post(method: 'NodeWorker.enable', params?: NodeWorker.EnableParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.enable', callback?: (err: Error | null) => void): void; + /** + * Detaches from all running workers and disables attaching to new workers as they are started. + */ + post(method: 'NodeWorker.disable', callback?: (err: Error | null) => void): void; + /** + * Detached from the worker with given sessionId. + */ + post(method: 'NodeWorker.detach', params?: NodeWorker.DetachParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.detach', callback?: (err: Error | null) => void): void; + /** + * Disables network tracking, prevents network events from being sent to the client. + */ + post(method: 'Network.disable', callback?: (err: Error | null) => void): void; + /** + * Enables network tracking, network events will now be delivered to the client. + */ + post(method: 'Network.enable', callback?: (err: Error | null) => void): void; + /** + * Enable the NodeRuntime events except by `NodeRuntime.waitingForDisconnect`. + */ + post(method: 'NodeRuntime.enable', callback?: (err: Error | null) => void): void; + /** + * Disable NodeRuntime events + */ + post(method: 'NodeRuntime.disable', callback?: (err: Error | null) => void): void; + /** + * Enable the `NodeRuntime.waitingForDisconnect`. + */ + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', params?: NodeRuntime.NotifyWhenWaitingForDisconnectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', callback?: (err: Error | null) => void): void; + + addListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + addListener(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + addListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + addListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + addListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + addListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + addListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + addListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + addListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + addListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + addListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + addListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + addListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + addListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + addListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + addListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + addListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + addListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + addListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + addListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + addListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + addListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + addListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + addListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + addListener(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + addListener(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + addListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + addListener(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'inspectorNotification', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextCreated', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextDestroyed', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextsCleared'): boolean; + emit(event: 'Runtime.exceptionThrown', message: InspectorNotification): boolean; + emit(event: 'Runtime.exceptionRevoked', message: InspectorNotification): boolean; + emit(event: 'Runtime.consoleAPICalled', message: InspectorNotification): boolean; + emit(event: 'Runtime.inspectRequested', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptParsed', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptFailedToParse', message: InspectorNotification): boolean; + emit(event: 'Debugger.breakpointResolved', message: InspectorNotification): boolean; + emit(event: 'Debugger.paused', message: InspectorNotification): boolean; + emit(event: 'Debugger.resumed'): boolean; + emit(event: 'Console.messageAdded', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileStarted', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileFinished', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.addHeapSnapshotChunk', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.resetProfiles'): boolean; + emit(event: 'HeapProfiler.reportHeapSnapshotProgress', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.lastSeenObjectId', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.heapStatsUpdate', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.dataCollected', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.tracingComplete'): boolean; + emit(event: 'NodeWorker.attachedToWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.detachedFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.receivedMessageFromWorker', message: InspectorNotification): boolean; + emit(event: 'Network.requestWillBeSent', message: InspectorNotification): boolean; + emit(event: 'Network.responseReceived', message: InspectorNotification): boolean; + emit(event: 'Network.loadingFailed', message: InspectorNotification): boolean; + emit(event: 'Network.loadingFinished', message: InspectorNotification): boolean; + emit(event: 'NodeRuntime.waitingForDisconnect'): boolean; + emit(event: 'NodeRuntime.waitingForDebugger'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + on(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + on(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + on(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + on(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + on(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + on(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + on(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + on(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + on(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + on(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + on(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + on(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + on(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + on(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + on(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + on(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + on(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + on(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + on(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + on(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + on(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + on(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + on(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + on(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + on(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + on(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + on(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + on(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + on(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + on(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + once(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + once(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + once(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + once(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + once(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + once(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + once(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + once(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + once(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + once(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + once(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + once(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + once(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + once(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + once(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + once(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + once(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + once(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + once(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + once(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + once(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + once(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + once(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + once(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + once(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + once(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + once(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + once(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + once(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + once(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependListener(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + prependListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + prependListener(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + prependListener(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + prependListener(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependOnceListener(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + prependOnceListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependOnceListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependOnceListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependOnceListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependOnceListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependOnceListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependOnceListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependOnceListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependOnceListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependOnceListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependOnceListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependOnceListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependOnceListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependOnceListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependOnceListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependOnceListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependOnceListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependOnceListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependOnceListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependOnceListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependOnceListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependOnceListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + prependOnceListener(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + prependOnceListener(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependOnceListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + prependOnceListener(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + } + + /** + * Activate inspector on host and port. Equivalent to `node --inspect=[[host:]port]`, but can be done programmatically after node has + * started. + * + * If wait is `true`, will block until a client has connected to the inspect port + * and flow control has been passed to the debugger client. + * + * See the [security warning](https://nodejs.org/docs/latest-v22.x/api/cli.html#warning-binding-inspector-to-a-public-ipport-combination-is-insecure) + * regarding the `host` parameter usage. + * @param port Port to listen on for inspector connections. Defaults to what was specified on the CLI. + * @param host Host to listen on for inspector connections. Defaults to what was specified on the CLI. + * @param wait Block until a client has connected. Defaults to what was specified on the CLI. + * @returns Disposable that calls `inspector.close()`. + */ + function open(port?: number, host?: string, wait?: boolean): Disposable; + + /** + * Deactivate the inspector. Blocks until there are no active connections. + */ + function close(): void; + + /** + * Return the URL of the active inspector, or `undefined` if there is none. + * + * ```console + * $ node --inspect -p 'inspector.url()' + * Debugger listening on ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * For help, see: https://nodejs.org/en/docs/inspector + * ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * + * $ node --inspect=localhost:3000 -p 'inspector.url()' + * Debugger listening on ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * For help, see: https://nodejs.org/en/docs/inspector + * ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * + * $ node -p 'inspector.url()' + * undefined + * ``` + */ + function url(): string | undefined; + + /** + * Blocks until a client (existing or connected later) has sent `Runtime.runIfWaitingForDebugger` command. + * + * An exception will be thrown if there is no active inspector. + * @since v12.7.0 + */ + function waitForDebugger(): void; + + // These methods are exposed by the V8 inspector console API (inspector/v8-console.h). + // The method signatures differ from those of the Node.js console, and are deliberately + // typed permissively. + interface InspectorConsole { + debug(...data: any[]): void; + error(...data: any[]): void; + info(...data: any[]): void; + log(...data: any[]): void; + warn(...data: any[]): void; + dir(...data: any[]): void; + dirxml(...data: any[]): void; + table(...data: any[]): void; + trace(...data: any[]): void; + group(...data: any[]): void; + groupCollapsed(...data: any[]): void; + groupEnd(...data: any[]): void; + clear(...data: any[]): void; + count(label?: any): void; + countReset(label?: any): void; + assert(value?: any, ...data: any[]): void; + profile(label?: any): void; + profileEnd(label?: any): void; + time(label?: any): void; + timeLog(label?: any): void; + timeStamp(label?: any): void; + } + + /** + * An object to send messages to the remote inspector console. + * @since v11.0.0 + */ + const console: InspectorConsole; + + // DevTools protocol event broadcast methods + namespace Network { + /** + * This feature is only available with the `--experimental-network-inspection` flag enabled. + * + * Broadcasts the `Network.requestWillBeSent` event to connected frontends. This event indicates that + * the application is about to send an HTTP request. + * @since v22.6.0 + * @experimental + */ + function requestWillBeSent(params: RequestWillBeSentEventDataType): void; + /** + * This feature is only available with the `--experimental-network-inspection` flag enabled. + * + * Broadcasts the `Network.responseReceived` event to connected frontends. This event indicates that + * HTTP response is available. + * @since v22.6.0 + * @experimental + */ + function responseReceived(params: ResponseReceivedEventDataType): void; + /** + * This feature is only available with the `--experimental-network-inspection` flag enabled. + * + * Broadcasts the `Network.loadingFinished` event to connected frontends. This event indicates that + * HTTP request has finished loading. + * @since v22.6.0 + * @experimental + */ + function loadingFinished(params: LoadingFinishedEventDataType): void; + /** + * This feature is only available with the `--experimental-network-inspection` flag enabled. + * + * Broadcasts the `Network.loadingFailed` event to connected frontends. This event indicates that + * HTTP request has failed to load. + * @since v22.7.0 + * @experimental + */ + function loadingFailed(params: LoadingFailedEventDataType): void; + } +} + +/** + * The `node:inspector` module provides an API for interacting with the V8 + * inspector. + */ +declare module 'node:inspector' { + export * from 'inspector'; +} + +/** + * The `node:inspector/promises` module provides an API for interacting with the V8 + * inspector. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/inspector/promises.js) + * @since v19.0.0 + */ +declare module 'inspector/promises' { + import EventEmitter = require('node:events'); + import { + open, + close, + url, + waitForDebugger, + console, + InspectorNotification, + Schema, + Runtime, + Debugger, + Console, + Profiler, + HeapProfiler, + NodeTracing, + NodeWorker, + Network, + NodeRuntime, + } from 'inspector'; + + /** + * The `inspector.Session` is used for dispatching messages to the V8 inspector + * back-end and receiving message responses and notifications. + * @since v19.0.0 + */ + class Session extends EventEmitter { + /** + * Create a new instance of the `inspector.Session` class. + * The inspector session needs to be connected through `session.connect()` before the messages can be dispatched to the inspector backend. + */ + constructor(); + + /** + * Connects a session to the inspector back-end. + */ + connect(): void; + + /** + * Connects a session to the inspector back-end. + * An exception will be thrown if this API was not called on a Worker thread. + */ + connectToMainThread(): void; + + /** + * Immediately close the session. All pending message callbacks will be called with an error. + * `session.connect()` will need to be called to be able to send messages again. + * Reconnected session will lose all inspector state, such as enabled agents or configured breakpoints. + */ + disconnect(): void; + + /** + * Posts a message to the inspector back-end. + * + * ```js + * import { Session } from 'node:inspector/promises'; + * try { + * const session = new Session(); + * session.connect(); + * const result = await session.post('Runtime.evaluate', { expression: '2 + 2' }); + * console.log(result); + * } catch (error) { + * console.error(error); + * } + * // Output: { result: { type: 'number', value: 4, description: '4' } } + * ``` + * + * The latest version of the V8 inspector protocol is published on the + * [Chrome DevTools Protocol Viewer](https://chromedevtools.github.io/devtools-protocol/v8/). + * + * Node.js inspector supports all the Chrome DevTools Protocol domains declared + * by V8. Chrome DevTools Protocol domain provides an interface for interacting + * with one of the runtime agents used to inspect the application state and listen + * to the run-time events. + */ + post(method: string, params?: object): Promise; + /** + * Returns supported domains. + */ + post(method: 'Schema.getDomains'): Promise; + /** + * Evaluates expression on global object. + */ + post(method: 'Runtime.evaluate', params?: Runtime.EvaluateParameterType): Promise; + /** + * Add handler to promise with given promise object id. + */ + post(method: 'Runtime.awaitPromise', params?: Runtime.AwaitPromiseParameterType): Promise; + /** + * Calls function with given declaration on the given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.callFunctionOn', params?: Runtime.CallFunctionOnParameterType): Promise; + /** + * Returns properties of a given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.getProperties', params?: Runtime.GetPropertiesParameterType): Promise; + /** + * Releases remote object with given id. + */ + post(method: 'Runtime.releaseObject', params?: Runtime.ReleaseObjectParameterType): Promise; + /** + * Releases all remote objects that belong to a given group. + */ + post(method: 'Runtime.releaseObjectGroup', params?: Runtime.ReleaseObjectGroupParameterType): Promise; + /** + * Tells inspected instance to run if it was waiting for debugger to attach. + */ + post(method: 'Runtime.runIfWaitingForDebugger'): Promise; + /** + * Enables reporting of execution contexts creation by means of executionContextCreated event. When the reporting gets enabled the event will be sent immediately for each existing execution context. + */ + post(method: 'Runtime.enable'): Promise; + /** + * Disables reporting of execution contexts creation. + */ + post(method: 'Runtime.disable'): Promise; + /** + * Discards collected exceptions and console API calls. + */ + post(method: 'Runtime.discardConsoleEntries'): Promise; + /** + * @experimental + */ + post(method: 'Runtime.setCustomObjectFormatterEnabled', params?: Runtime.SetCustomObjectFormatterEnabledParameterType): Promise; + /** + * Compiles expression. + */ + post(method: 'Runtime.compileScript', params?: Runtime.CompileScriptParameterType): Promise; + /** + * Runs script with given id in a given context. + */ + post(method: 'Runtime.runScript', params?: Runtime.RunScriptParameterType): Promise; + post(method: 'Runtime.queryObjects', params?: Runtime.QueryObjectsParameterType): Promise; + /** + * Returns all let, const and class variables from global scope. + */ + post(method: 'Runtime.globalLexicalScopeNames', params?: Runtime.GlobalLexicalScopeNamesParameterType): Promise; + /** + * Enables debugger for the given page. Clients should not assume that the debugging has been enabled until the result for this command is received. + */ + post(method: 'Debugger.enable'): Promise; + /** + * Disables debugger for given page. + */ + post(method: 'Debugger.disable'): Promise; + /** + * Activates / deactivates all breakpoints on the page. + */ + post(method: 'Debugger.setBreakpointsActive', params?: Debugger.SetBreakpointsActiveParameterType): Promise; + /** + * Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc). + */ + post(method: 'Debugger.setSkipAllPauses', params?: Debugger.SetSkipAllPausesParameterType): Promise; + /** + * Sets JavaScript breakpoint at given location specified either by URL or URL regex. Once this command is issued, all existing parsed scripts will have breakpoints resolved and returned in locations property. Further matching script parsing will result in subsequent breakpointResolved events issued. This logical breakpoint will survive page reloads. + */ + post(method: 'Debugger.setBreakpointByUrl', params?: Debugger.SetBreakpointByUrlParameterType): Promise; + /** + * Sets JavaScript breakpoint at a given location. + */ + post(method: 'Debugger.setBreakpoint', params?: Debugger.SetBreakpointParameterType): Promise; + /** + * Removes JavaScript breakpoint. + */ + post(method: 'Debugger.removeBreakpoint', params?: Debugger.RemoveBreakpointParameterType): Promise; + /** + * Returns possible locations for breakpoint. scriptId in start and end range locations should be the same. + */ + post(method: 'Debugger.getPossibleBreakpoints', params?: Debugger.GetPossibleBreakpointsParameterType): Promise; + /** + * Continues execution until specific location is reached. + */ + post(method: 'Debugger.continueToLocation', params?: Debugger.ContinueToLocationParameterType): Promise; + /** + * @experimental + */ + post(method: 'Debugger.pauseOnAsyncCall', params?: Debugger.PauseOnAsyncCallParameterType): Promise; + /** + * Steps over the statement. + */ + post(method: 'Debugger.stepOver'): Promise; + /** + * Steps into the function call. + */ + post(method: 'Debugger.stepInto', params?: Debugger.StepIntoParameterType): Promise; + /** + * Steps out of the function call. + */ + post(method: 'Debugger.stepOut'): Promise; + /** + * Stops on the next JavaScript statement. + */ + post(method: 'Debugger.pause'): Promise; + /** + * This method is deprecated - use Debugger.stepInto with breakOnAsyncCall and Debugger.pauseOnAsyncTask instead. Steps into next scheduled async task if any is scheduled before next pause. Returns success when async task is actually scheduled, returns error if no task were scheduled or another scheduleStepIntoAsync was called. + * @experimental + */ + post(method: 'Debugger.scheduleStepIntoAsync'): Promise; + /** + * Resumes JavaScript execution. + */ + post(method: 'Debugger.resume'): Promise; + /** + * Returns stack trace with given stackTraceId. + * @experimental + */ + post(method: 'Debugger.getStackTrace', params?: Debugger.GetStackTraceParameterType): Promise; + /** + * Searches for given string in script content. + */ + post(method: 'Debugger.searchInContent', params?: Debugger.SearchInContentParameterType): Promise; + /** + * Edits JavaScript source live. + */ + post(method: 'Debugger.setScriptSource', params?: Debugger.SetScriptSourceParameterType): Promise; + /** + * Restarts particular call frame from the beginning. + */ + post(method: 'Debugger.restartFrame', params?: Debugger.RestartFrameParameterType): Promise; + /** + * Returns source for the script with given id. + */ + post(method: 'Debugger.getScriptSource', params?: Debugger.GetScriptSourceParameterType): Promise; + /** + * Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or no exceptions. Initial pause on exceptions state is none. + */ + post(method: 'Debugger.setPauseOnExceptions', params?: Debugger.SetPauseOnExceptionsParameterType): Promise; + /** + * Evaluates expression on a given call frame. + */ + post(method: 'Debugger.evaluateOnCallFrame', params?: Debugger.EvaluateOnCallFrameParameterType): Promise; + /** + * Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually. + */ + post(method: 'Debugger.setVariableValue', params?: Debugger.SetVariableValueParameterType): Promise; + /** + * Changes return value in top frame. Available only at return break position. + * @experimental + */ + post(method: 'Debugger.setReturnValue', params?: Debugger.SetReturnValueParameterType): Promise; + /** + * Enables or disables async call stacks tracking. + */ + post(method: 'Debugger.setAsyncCallStackDepth', params?: Debugger.SetAsyncCallStackDepthParameterType): Promise; + /** + * Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. + * @experimental + */ + post(method: 'Debugger.setBlackboxPatterns', params?: Debugger.SetBlackboxPatternsParameterType): Promise; + /** + * Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted. + * @experimental + */ + post(method: 'Debugger.setBlackboxedRanges', params?: Debugger.SetBlackboxedRangesParameterType): Promise; + /** + * Enables console domain, sends the messages collected so far to the client by means of the messageAdded notification. + */ + post(method: 'Console.enable'): Promise; + /** + * Disables console domain, prevents further console messages from being reported to the client. + */ + post(method: 'Console.disable'): Promise; + /** + * Does nothing. + */ + post(method: 'Console.clearMessages'): Promise; + post(method: 'Profiler.enable'): Promise; + post(method: 'Profiler.disable'): Promise; + /** + * Changes CPU profiler sampling interval. Must be called before CPU profiles recording started. + */ + post(method: 'Profiler.setSamplingInterval', params?: Profiler.SetSamplingIntervalParameterType): Promise; + post(method: 'Profiler.start'): Promise; + post(method: 'Profiler.stop'): Promise; + /** + * Enable precise code coverage. Coverage data for JavaScript executed before enabling precise code coverage may be incomplete. Enabling prevents running optimized code and resets execution counters. + */ + post(method: 'Profiler.startPreciseCoverage', params?: Profiler.StartPreciseCoverageParameterType): Promise; + /** + * Disable precise code coverage. Disabling releases unnecessary execution count records and allows executing optimized code. + */ + post(method: 'Profiler.stopPreciseCoverage'): Promise; + /** + * Collect coverage data for the current isolate, and resets execution counters. Precise code coverage needs to have started. + */ + post(method: 'Profiler.takePreciseCoverage'): Promise; + /** + * Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection. + */ + post(method: 'Profiler.getBestEffortCoverage'): Promise; + post(method: 'HeapProfiler.enable'): Promise; + post(method: 'HeapProfiler.disable'): Promise; + post(method: 'HeapProfiler.startTrackingHeapObjects', params?: HeapProfiler.StartTrackingHeapObjectsParameterType): Promise; + post(method: 'HeapProfiler.stopTrackingHeapObjects', params?: HeapProfiler.StopTrackingHeapObjectsParameterType): Promise; + post(method: 'HeapProfiler.takeHeapSnapshot', params?: HeapProfiler.TakeHeapSnapshotParameterType): Promise; + post(method: 'HeapProfiler.collectGarbage'): Promise; + post(method: 'HeapProfiler.getObjectByHeapObjectId', params?: HeapProfiler.GetObjectByHeapObjectIdParameterType): Promise; + /** + * Enables console to refer to the node with given id via $x (see Command Line API for more details $x functions). + */ + post(method: 'HeapProfiler.addInspectedHeapObject', params?: HeapProfiler.AddInspectedHeapObjectParameterType): Promise; + post(method: 'HeapProfiler.getHeapObjectId', params?: HeapProfiler.GetHeapObjectIdParameterType): Promise; + post(method: 'HeapProfiler.startSampling', params?: HeapProfiler.StartSamplingParameterType): Promise; + post(method: 'HeapProfiler.stopSampling'): Promise; + post(method: 'HeapProfiler.getSamplingProfile'): Promise; + /** + * Gets supported tracing categories. + */ + post(method: 'NodeTracing.getCategories'): Promise; + /** + * Start trace events collection. + */ + post(method: 'NodeTracing.start', params?: NodeTracing.StartParameterType): Promise; + /** + * Stop trace events collection. Remaining collected events will be sent as a sequence of + * dataCollected events followed by tracingComplete event. + */ + post(method: 'NodeTracing.stop'): Promise; + /** + * Sends protocol message over session with given id. + */ + post(method: 'NodeWorker.sendMessageToWorker', params?: NodeWorker.SendMessageToWorkerParameterType): Promise; + /** + * Instructs the inspector to attach to running workers. Will also attach to new workers + * as they start + */ + post(method: 'NodeWorker.enable', params?: NodeWorker.EnableParameterType): Promise; + /** + * Detaches from all running workers and disables attaching to new workers as they are started. + */ + post(method: 'NodeWorker.disable'): Promise; + /** + * Detached from the worker with given sessionId. + */ + post(method: 'NodeWorker.detach', params?: NodeWorker.DetachParameterType): Promise; + /** + * Disables network tracking, prevents network events from being sent to the client. + */ + post(method: 'Network.disable'): Promise; + /** + * Enables network tracking, network events will now be delivered to the client. + */ + post(method: 'Network.enable'): Promise; + /** + * Enable the NodeRuntime events except by `NodeRuntime.waitingForDisconnect`. + */ + post(method: 'NodeRuntime.enable'): Promise; + /** + * Disable NodeRuntime events + */ + post(method: 'NodeRuntime.disable'): Promise; + /** + * Enable the `NodeRuntime.waitingForDisconnect`. + */ + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', params?: NodeRuntime.NotifyWhenWaitingForDisconnectParameterType): Promise; + + addListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + addListener(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + addListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + addListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + addListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + addListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + addListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + addListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + addListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + addListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + addListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + addListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + addListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + addListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + addListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + addListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + addListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + addListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + addListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + addListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + addListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + addListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + addListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + addListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + addListener(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + addListener(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + addListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + addListener(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'inspectorNotification', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextCreated', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextDestroyed', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextsCleared'): boolean; + emit(event: 'Runtime.exceptionThrown', message: InspectorNotification): boolean; + emit(event: 'Runtime.exceptionRevoked', message: InspectorNotification): boolean; + emit(event: 'Runtime.consoleAPICalled', message: InspectorNotification): boolean; + emit(event: 'Runtime.inspectRequested', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptParsed', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptFailedToParse', message: InspectorNotification): boolean; + emit(event: 'Debugger.breakpointResolved', message: InspectorNotification): boolean; + emit(event: 'Debugger.paused', message: InspectorNotification): boolean; + emit(event: 'Debugger.resumed'): boolean; + emit(event: 'Console.messageAdded', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileStarted', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileFinished', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.addHeapSnapshotChunk', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.resetProfiles'): boolean; + emit(event: 'HeapProfiler.reportHeapSnapshotProgress', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.lastSeenObjectId', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.heapStatsUpdate', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.dataCollected', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.tracingComplete'): boolean; + emit(event: 'NodeWorker.attachedToWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.detachedFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.receivedMessageFromWorker', message: InspectorNotification): boolean; + emit(event: 'Network.requestWillBeSent', message: InspectorNotification): boolean; + emit(event: 'Network.responseReceived', message: InspectorNotification): boolean; + emit(event: 'Network.loadingFailed', message: InspectorNotification): boolean; + emit(event: 'Network.loadingFinished', message: InspectorNotification): boolean; + emit(event: 'NodeRuntime.waitingForDisconnect'): boolean; + emit(event: 'NodeRuntime.waitingForDebugger'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + on(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + on(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + on(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + on(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + on(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + on(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + on(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + on(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + on(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + on(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + on(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + on(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + on(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + on(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + on(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + on(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + on(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + on(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + on(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + on(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + on(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + on(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + on(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + on(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + on(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + on(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + on(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + on(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + on(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + on(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + once(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + once(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + once(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + once(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + once(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + once(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + once(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + once(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + once(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + once(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + once(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + once(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + once(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + once(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + once(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + once(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + once(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + once(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + once(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + once(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + once(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + once(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + once(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + once(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + once(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + once(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + once(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + once(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + once(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + once(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependListener(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + prependListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + prependListener(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + prependListener(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + prependListener(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependOnceListener(event: 'inspectorNotification', listener: (message: InspectorNotification) => void): this; + /** + * Issued when new execution context is created. + */ + prependOnceListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependOnceListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependOnceListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependOnceListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependOnceListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependOnceListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependOnceListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependOnceListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependOnceListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependOnceListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependOnceListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependOnceListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependOnceListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependOnceListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependOnceListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependOnceListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependOnceListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependOnceListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependOnceListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependOnceListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependOnceListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependOnceListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Fired when page is about to send HTTP request. + */ + prependOnceListener(event: 'Network.requestWillBeSent', listener: (message: InspectorNotification) => void): this; + /** + * Fired when HTTP response is available. + */ + prependOnceListener(event: 'Network.responseReceived', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Network.loadingFailed', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Network.loadingFinished', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependOnceListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + /** + * This event is fired when the runtime is waiting for the debugger. For + * example, when inspector.waitingForDebugger is called + */ + prependOnceListener(event: 'NodeRuntime.waitingForDebugger', listener: () => void): this; + } + + export { + Session, + open, + close, + url, + waitForDebugger, + console, + InspectorNotification, + Schema, + Runtime, + Debugger, + Console, + Profiler, + HeapProfiler, + NodeTracing, + NodeWorker, + Network, + NodeRuntime, + }; +} + +/** + * The `node:inspector/promises` module provides an API for interacting with the V8 + * inspector. + * @since v19.0.0 + */ +declare module 'node:inspector/promises' { + export * from 'inspector/promises'; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/module.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/module.d.ts new file mode 100644 index 0000000..2ab0f87 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/module.d.ts @@ -0,0 +1,402 @@ +/** + * @since v0.3.7 + * @experimental + */ +declare module "module" { + import { URL } from "node:url"; + import { MessagePort } from "node:worker_threads"; + namespace Module { + /** + * The `module.syncBuiltinESMExports()` method updates all the live bindings for + * builtin `ES Modules` to match the properties of the `CommonJS` exports. It + * does not add or remove exported names from the `ES Modules`. + * + * ```js + * import fs from 'node:fs'; + * import assert from 'node:assert'; + * import { syncBuiltinESMExports } from 'node:module'; + * + * fs.readFile = newAPI; + * + * delete fs.readFileSync; + * + * function newAPI() { + * // ... + * } + * + * fs.newAPI = newAPI; + * + * syncBuiltinESMExports(); + * + * import('node:fs').then((esmFS) => { + * // It syncs the existing readFile property with the new value + * assert.strictEqual(esmFS.readFile, newAPI); + * // readFileSync has been deleted from the required fs + * assert.strictEqual('readFileSync' in fs, false); + * // syncBuiltinESMExports() does not remove readFileSync from esmFS + * assert.strictEqual('readFileSync' in esmFS, true); + * // syncBuiltinESMExports() does not add names + * assert.strictEqual(esmFS.newAPI, undefined); + * }); + * ``` + * @since v12.12.0 + */ + function syncBuiltinESMExports(): void; + /** + * `path` is the resolved path for the file for which a corresponding source map + * should be fetched. + * @since v13.7.0, v12.17.0 + * @return Returns `module.SourceMap` if a source map is found, `undefined` otherwise. + */ + function findSourceMap(path: string, error?: Error): SourceMap | undefined; + interface SourceMapPayload { + file: string; + version: number; + sources: string[]; + sourcesContent: string[]; + names: string[]; + mappings: string; + sourceRoot: string; + } + interface SourceMapping { + generatedLine: number; + generatedColumn: number; + originalSource: string; + originalLine: number; + originalColumn: number; + } + interface SourceOrigin { + /** + * The name of the range in the source map, if one was provided + */ + name?: string; + /** + * The file name of the original source, as reported in the SourceMap + */ + fileName: string; + /** + * The 1-indexed lineNumber of the corresponding call site in the original source + */ + lineNumber: number; + /** + * The 1-indexed columnNumber of the corresponding call site in the original source + */ + columnNumber: number; + } + /** + * @since v13.7.0, v12.17.0 + */ + class SourceMap { + /** + * Getter for the payload used to construct the `SourceMap` instance. + */ + readonly payload: SourceMapPayload; + constructor(payload: SourceMapPayload); + /** + * Given a line offset and column offset in the generated source + * file, returns an object representing the SourceMap range in the + * original file if found, or an empty object if not. + * + * The object returned contains the following keys: + * + * The returned value represents the raw range as it appears in the + * SourceMap, based on zero-indexed offsets, _not_ 1-indexed line and + * column numbers as they appear in Error messages and CallSite + * objects. + * + * To get the corresponding 1-indexed line and column numbers from a + * lineNumber and columnNumber as they are reported by Error stacks + * and CallSite objects, use `sourceMap.findOrigin(lineNumber, columnNumber)` + * @param lineOffset The zero-indexed line number offset in the generated source + * @param columnOffset The zero-indexed column number offset in the generated source + */ + findEntry(lineOffset: number, columnOffset: number): SourceMapping; + /** + * Given a 1-indexed `lineNumber` and `columnNumber` from a call site in the generated source, + * find the corresponding call site location in the original source. + * + * If the `lineNumber` and `columnNumber` provided are not found in any source map, + * then an empty object is returned. + * @param lineNumber The 1-indexed line number of the call site in the generated source + * @param columnNumber The 1-indexed column number of the call site in the generated source + */ + findOrigin(lineNumber: number, columnNumber: number): SourceOrigin | {}; + } + interface ImportAttributes extends NodeJS.Dict { + type?: string | undefined; + } + type ModuleFormat = "builtin" | "commonjs" | "json" | "module" | "wasm"; + type ModuleSource = string | ArrayBuffer | NodeJS.TypedArray; + interface GlobalPreloadContext { + port: MessagePort; + } + /** + * @deprecated This hook will be removed in a future version. + * Use `initialize` instead. When a loader has an `initialize` export, `globalPreload` will be ignored. + * + * Sometimes it might be necessary to run some code inside of the same global scope that the application runs in. + * This hook allows the return of a string that is run as a sloppy-mode script on startup. + * + * @param context Information to assist the preload code + * @return Code to run before application startup + */ + type GlobalPreloadHook = (context: GlobalPreloadContext) => string; + /** + * The `initialize` hook provides a way to define a custom function that runs in the hooks thread + * when the hooks module is initialized. Initialization happens when the hooks module is registered via `register`. + * + * This hook can receive data from a `register` invocation, including ports and other transferrable objects. + * The return value of `initialize` can be a `Promise`, in which case it will be awaited before the main application thread execution resumes. + */ + type InitializeHook = (data: Data) => void | Promise; + interface ResolveHookContext { + /** + * Export conditions of the relevant `package.json` + */ + conditions: string[]; + /** + * An object whose key-value pairs represent the assertions for the module to import + */ + importAttributes: ImportAttributes; + /** + * The module importing this one, or undefined if this is the Node.js entry point + */ + parentURL: string | undefined; + } + interface ResolveFnOutput { + /** + * A hint to the load hook (it might be ignored) + */ + format?: ModuleFormat | null | undefined; + /** + * The import attributes to use when caching the module (optional; if excluded the input will be used) + */ + importAttributes?: ImportAttributes | undefined; + /** + * A signal that this hook intends to terminate the chain of `resolve` hooks. + * @default false + */ + shortCircuit?: boolean | undefined; + /** + * The absolute URL to which this input resolves + */ + url: string; + } + /** + * The `resolve` hook chain is responsible for resolving file URL for a given module specifier and parent URL, and optionally its format (such as `'module'`) as a hint to the `load` hook. + * If a format is specified, the load hook is ultimately responsible for providing the final `format` value (and it is free to ignore the hint provided by `resolve`); + * if `resolve` provides a format, a custom `load` hook is required even if only to pass the value to the Node.js default `load` hook. + * + * @param specifier The specified URL path of the module to be resolved + * @param context + * @param nextResolve The subsequent `resolve` hook in the chain, or the Node.js default `resolve` hook after the last user-supplied resolve hook + */ + type ResolveHook = ( + specifier: string, + context: ResolveHookContext, + nextResolve: ( + specifier: string, + context?: ResolveHookContext, + ) => ResolveFnOutput | Promise, + ) => ResolveFnOutput | Promise; + interface LoadHookContext { + /** + * Export conditions of the relevant `package.json` + */ + conditions: string[]; + /** + * The format optionally supplied by the `resolve` hook chain + */ + format: ModuleFormat; + /** + * An object whose key-value pairs represent the assertions for the module to import + */ + importAttributes: ImportAttributes; + } + interface LoadFnOutput { + format: ModuleFormat; + /** + * A signal that this hook intends to terminate the chain of `resolve` hooks. + * @default false + */ + shortCircuit?: boolean | undefined; + /** + * The source for Node.js to evaluate + */ + source?: ModuleSource; + } + /** + * The `load` hook provides a way to define a custom method of determining how a URL should be interpreted, retrieved, and parsed. + * It is also in charge of validating the import assertion. + * + * @param url The URL/path of the module to be loaded + * @param context Metadata about the module + * @param nextLoad The subsequent `load` hook in the chain, or the Node.js default `load` hook after the last user-supplied `load` hook + */ + type LoadHook = ( + url: string, + context: LoadHookContext, + nextLoad: (url: string, context?: LoadHookContext) => LoadFnOutput | Promise, + ) => LoadFnOutput | Promise; + namespace constants { + /** + * The following constants are returned as the `status` field in the object returned by + * {@link enableCompileCache} to indicate the result of the attempt to enable the + * [module compile cache](https://nodejs.org/docs/latest-v22.x/api/module.html#module-compile-cache). + * @since v22.8.0 + */ + namespace compileCacheStatus { + /** + * Node.js has enabled the compile cache successfully. The directory used to store the + * compile cache will be returned in the `directory` field in the + * returned object. + */ + const ENABLED: number; + /** + * The compile cache has already been enabled before, either by a previous call to + * {@link enableCompileCache}, or by the `NODE_COMPILE_CACHE=dir` + * environment variable. The directory used to store the + * compile cache will be returned in the `directory` field in the + * returned object. + */ + const ALREADY_ENABLED: number; + /** + * Node.js fails to enable the compile cache. This can be caused by the lack of + * permission to use the specified directory, or various kinds of file system errors. + * The detail of the failure will be returned in the `message` field in the + * returned object. + */ + const FAILED: number; + /** + * Node.js cannot enable the compile cache because the environment variable + * `NODE_DISABLE_COMPILE_CACHE=1` has been set. + */ + const DISABLED: number; + } + } + } + interface RegisterOptions { + parentURL: string | URL; + data?: Data | undefined; + transferList?: any[] | undefined; + } + interface EnableCompileCacheResult { + /** + * One of the {@link constants.compileCacheStatus} + */ + status: number; + /** + * If Node.js cannot enable the compile cache, this contains + * the error message. Only set if `status` is `module.constants.compileCacheStatus.FAILED`. + */ + message?: string; + /** + * If the compile cache is enabled, this contains the directory + * where the compile cache is stored. Only set if `status` is + * `module.constants.compileCacheStatus.ENABLED` or + * `module.constants.compileCacheStatus.ALREADY_ENABLED`. + */ + directory?: string; + } + interface Module extends NodeModule {} + class Module { + static runMain(): void; + static wrap(code: string): string; + static createRequire(path: string | URL): NodeRequire; + static builtinModules: string[]; + static isBuiltin(moduleName: string): boolean; + static Module: typeof Module; + static register( + specifier: string | URL, + parentURL?: string | URL, + options?: RegisterOptions, + ): void; + static register(specifier: string | URL, options?: RegisterOptions): void; + /** + * Enable [module compile cache](https://nodejs.org/docs/latest-v22.x/api/module.html#module-compile-cache) + * in the current Node.js instance. + * + * If `cacheDir` is not specified, Node.js will either use the directory specified by the + * `NODE_COMPILE_CACHE=dir` environment variable if it's set, or use + * `path.join(os.tmpdir(), 'node-compile-cache')` otherwise. For general use cases, it's + * recommended to call `module.enableCompileCache()` without specifying the `cacheDir`, + * so that the directory can be overridden by the `NODE_COMPILE_CACHE` environment + * variable when necessary. + * + * Since compile cache is supposed to be a quiet optimization that is not required for the + * application to be functional, this method is designed to not throw any exception when the + * compile cache cannot be enabled. Instead, it will return an object containing an error + * message in the `message` field to aid debugging. + * If compile cache is enabled successfully, the `directory` field in the returned object + * contains the path to the directory where the compile cache is stored. The `status` + * field in the returned object would be one of the `module.constants.compileCacheStatus` + * values to indicate the result of the attempt to enable the + * [module compile cache](https://nodejs.org/docs/latest-v22.x/api/module.html#module-compile-cache). + * + * This method only affects the current Node.js instance. To enable it in child worker threads, + * either call this method in child worker threads too, or set the + * `process.env.NODE_COMPILE_CACHE` value to compile cache directory so the behavior can + * be inherited into the child workers. The directory can be obtained either from the + * `directory` field returned by this method, or with {@link getCompileCacheDir}. + * @since v22.8.0 + * @param cacheDir Optional path to specify the directory where the compile cache + * will be stored/retrieved. + */ + static enableCompileCache(cacheDir?: string): EnableCompileCacheResult; + /** + * @since v22.8.0 + * @return Path to the [module compile cache](https://nodejs.org/docs/latest-v22.x/api/module.html#module-compile-cache) + * directory if it is enabled, or `undefined` otherwise. + */ + static getCompileCacheDir(): string | undefined; + /** + * Flush the [module compile cache](https://nodejs.org/docs/latest-v22.x/api/module.html#module-compile-cache) + * accumulated from modules already loaded + * in the current Node.js instance to disk. This returns after all the flushing + * file system operations come to an end, no matter they succeed or not. If there + * are any errors, this will fail silently, since compile cache misses should not + * interfere with the actual operation of the application. + * @since v22.10.0 + */ + static flushCompileCache(): void; + constructor(id: string, parent?: Module); + } + global { + interface ImportMeta { + /** + * The directory name of the current module. This is the same as the `path.dirname()` of the `import.meta.filename`. + * **Caveat:** only present on `file:` modules. + */ + dirname: string; + /** + * The full absolute path and filename of the current module, with symlinks resolved. + * This is the same as the `url.fileURLToPath()` of the `import.meta.url`. + * **Caveat:** only local modules support this property. Modules not using the `file:` protocol will not provide it. + */ + filename: string; + /** + * The absolute `file:` URL of the module. + */ + url: string; + /** + * Provides a module-relative resolution function scoped to each module, returning + * the URL string. + * + * Second `parent` parameter is only used when the `--experimental-import-meta-resolve` + * command flag enabled. + * + * @since v20.6.0 + * + * @param specifier The module specifier to resolve relative to `parent`. + * @param parent The absolute parent module URL to resolve from. + * @returns The absolute (`file:`) URL string for the resolved module. + */ + resolve(specifier: string, parent?: string | URL | undefined): string; + } + } + export = Module; +} +declare module "node:module" { + import module = require("module"); + export = module; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/net.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/net.d.ts new file mode 100644 index 0000000..d113e21 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/net.d.ts @@ -0,0 +1,1001 @@ +/** + * > Stability: 2 - Stable + * + * The `node:net` module provides an asynchronous network API for creating stream-based + * TCP or `IPC` servers ({@link createServer}) and clients + * ({@link createConnection}). + * + * It can be accessed using: + * + * ```js + * import net from 'node:net'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/net.js) + */ +declare module "net" { + import * as stream from "node:stream"; + import { Abortable, EventEmitter } from "node:events"; + import * as dns from "node:dns"; + type LookupFunction = ( + hostname: string, + options: dns.LookupOptions, + callback: (err: NodeJS.ErrnoException | null, address: string | dns.LookupAddress[], family?: number) => void, + ) => void; + interface AddressInfo { + address: string; + family: string; + port: number; + } + interface SocketConstructorOpts { + fd?: number | undefined; + allowHalfOpen?: boolean | undefined; + onread?: OnReadOpts | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + signal?: AbortSignal; + } + interface OnReadOpts { + buffer: Uint8Array | (() => Uint8Array); + /** + * This function is called for every chunk of incoming data. + * Two arguments are passed to it: the number of bytes written to `buffer` and a reference to `buffer`. + * Return `false` from this function to implicitly `pause()` the socket. + */ + callback(bytesWritten: number, buffer: Uint8Array): boolean; + } + // TODO: remove empty ConnectOpts placeholder at next major @types/node version. + /** @deprecated */ + interface ConnectOpts {} + interface TcpSocketConnectOpts { + port: number; + host?: string | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + hints?: number | undefined; + family?: number | undefined; + lookup?: LookupFunction | undefined; + noDelay?: boolean | undefined; + keepAlive?: boolean | undefined; + keepAliveInitialDelay?: number | undefined; + /** + * @since v18.13.0 + */ + autoSelectFamily?: boolean | undefined; + /** + * @since v18.13.0 + */ + autoSelectFamilyAttemptTimeout?: number | undefined; + } + interface IpcSocketConnectOpts { + path: string; + } + type SocketConnectOpts = TcpSocketConnectOpts | IpcSocketConnectOpts; + type SocketReadyState = "opening" | "open" | "readOnly" | "writeOnly" | "closed"; + /** + * This class is an abstraction of a TCP socket or a streaming `IPC` endpoint + * (uses named pipes on Windows, and Unix domain sockets otherwise). It is also + * an `EventEmitter`. + * + * A `net.Socket` can be created by the user and used directly to interact with + * a server. For example, it is returned by {@link createConnection}, + * so the user can use it to talk to the server. + * + * It can also be created by Node.js and passed to the user when a connection + * is received. For example, it is passed to the listeners of a `'connection'` event emitted on a {@link Server}, so the user can use + * it to interact with the client. + * @since v0.3.4 + */ + class Socket extends stream.Duplex { + constructor(options?: SocketConstructorOpts); + /** + * Destroys the socket after all data is written. If the `finish` event was already emitted the socket is destroyed immediately. + * If the socket is still writable it implicitly calls `socket.end()`. + * @since v0.3.4 + */ + destroySoon(): void; + /** + * Sends data on the socket. The second parameter specifies the encoding in the + * case of a string. It defaults to UTF8 encoding. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is again free. + * + * The optional `callback` parameter will be executed when the data is finally + * written out, which may not be immediately. + * + * See `Writable` stream `write()` method for more + * information. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + */ + write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean; + write(str: Uint8Array | string, encoding?: BufferEncoding, cb?: (err?: Error) => void): boolean; + /** + * Initiate a connection on a given socket. + * + * Possible signatures: + * + * * `socket.connect(options[, connectListener])` + * * `socket.connect(path[, connectListener])` for `IPC` connections. + * * `socket.connect(port[, host][, connectListener])` for TCP connections. + * * Returns: `net.Socket` The socket itself. + * + * This function is asynchronous. When the connection is established, the `'connect'` event will be emitted. If there is a problem connecting, + * instead of a `'connect'` event, an `'error'` event will be emitted with + * the error passed to the `'error'` listener. + * The last parameter `connectListener`, if supplied, will be added as a listener + * for the `'connect'` event **once**. + * + * This function should only be used for reconnecting a socket after`'close'` has been emitted or otherwise it may lead to undefined + * behavior. + */ + connect(options: SocketConnectOpts, connectionListener?: () => void): this; + connect(port: number, host: string, connectionListener?: () => void): this; + connect(port: number, connectionListener?: () => void): this; + connect(path: string, connectionListener?: () => void): this; + /** + * Set the encoding for the socket as a `Readable Stream`. See `readable.setEncoding()` for more information. + * @since v0.1.90 + * @return The socket itself. + */ + setEncoding(encoding?: BufferEncoding): this; + /** + * Pauses the reading of data. That is, `'data'` events will not be emitted. + * Useful to throttle back an upload. + * @return The socket itself. + */ + pause(): this; + /** + * Close the TCP connection by sending an RST packet and destroy the stream. + * If this TCP socket is in connecting status, it will send an RST packet and destroy this TCP socket once it is connected. + * Otherwise, it will call `socket.destroy` with an `ERR_SOCKET_CLOSED` Error. + * If this is not a TCP socket (for example, a pipe), calling this method will immediately throw an `ERR_INVALID_HANDLE_TYPE` Error. + * @since v18.3.0, v16.17.0 + */ + resetAndDestroy(): this; + /** + * Resumes reading after a call to `socket.pause()`. + * @return The socket itself. + */ + resume(): this; + /** + * Sets the socket to timeout after `timeout` milliseconds of inactivity on + * the socket. By default `net.Socket` do not have a timeout. + * + * When an idle timeout is triggered the socket will receive a `'timeout'` event but the connection will not be severed. The user must manually call `socket.end()` or `socket.destroy()` to + * end the connection. + * + * ```js + * socket.setTimeout(3000); + * socket.on('timeout', () => { + * console.log('socket timeout'); + * socket.end(); + * }); + * ``` + * + * If `timeout` is 0, then the existing idle timeout is disabled. + * + * The optional `callback` parameter will be added as a one-time listener for the `'timeout'` event. + * @since v0.1.90 + * @return The socket itself. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Enable/disable the use of Nagle's algorithm. + * + * When a TCP connection is created, it will have Nagle's algorithm enabled. + * + * Nagle's algorithm delays data before it is sent via the network. It attempts + * to optimize throughput at the expense of latency. + * + * Passing `true` for `noDelay` or not passing an argument will disable Nagle's + * algorithm for the socket. Passing `false` for `noDelay` will enable Nagle's + * algorithm. + * @since v0.1.90 + * @param [noDelay=true] + * @return The socket itself. + */ + setNoDelay(noDelay?: boolean): this; + /** + * Enable/disable keep-alive functionality, and optionally set the initial + * delay before the first keepalive probe is sent on an idle socket. + * + * Set `initialDelay` (in milliseconds) to set the delay between the last + * data packet received and the first keepalive probe. Setting `0` for`initialDelay` will leave the value unchanged from the default + * (or previous) setting. + * + * Enabling the keep-alive functionality will set the following socket options: + * + * * `SO_KEEPALIVE=1` + * * `TCP_KEEPIDLE=initialDelay` + * * `TCP_KEEPCNT=10` + * * `TCP_KEEPINTVL=1` + * @since v0.1.92 + * @param [enable=false] + * @param [initialDelay=0] + * @return The socket itself. + */ + setKeepAlive(enable?: boolean, initialDelay?: number): this; + /** + * Returns the bound `address`, the address `family` name and `port` of the + * socket as reported by the operating system:`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }` + * @since v0.1.90 + */ + address(): AddressInfo | {}; + /** + * Calling `unref()` on a socket will allow the program to exit if this is the only + * active socket in the event system. If the socket is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + unref(): this; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed socket will _not_ let the program exit if it's the only socket left (the default behavior). + * If the socket is `ref`ed calling `ref` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + ref(): this; + /** + * This property is only present if the family autoselection algorithm is enabled in `socket.connect(options)` + * and it is an array of the addresses that have been attempted. + * + * Each address is a string in the form of `$IP:$PORT`. + * If the connection was successful, then the last address is the one that the socket is currently connected to. + * @since v19.4.0 + */ + readonly autoSelectFamilyAttemptedAddresses: string[]; + /** + * This property shows the number of characters buffered for writing. The buffer + * may contain strings whose length after encoding is not yet known. So this number + * is only an approximation of the number of bytes in the buffer. + * + * `net.Socket` has the property that `socket.write()` always works. This is to + * help users get up and running quickly. The computer cannot always keep up + * with the amount of data that is written to a socket. The network connection + * simply might be too slow. Node.js will internally queue up the data written to a + * socket and send it out over the wire when it is possible. + * + * The consequence of this internal buffering is that memory may grow. + * Users who experience large or growing `bufferSize` should attempt to + * "throttle" the data flows in their program with `socket.pause()` and `socket.resume()`. + * @since v0.3.8 + * @deprecated Since v14.6.0 - Use `writableLength` instead. + */ + readonly bufferSize: number; + /** + * The amount of received bytes. + * @since v0.5.3 + */ + readonly bytesRead: number; + /** + * The amount of bytes sent. + * @since v0.5.3 + */ + readonly bytesWritten: number; + /** + * If `true`, `socket.connect(options[, connectListener])` was + * called and has not yet finished. It will stay `true` until the socket becomes + * connected, then it is set to `false` and the `'connect'` event is emitted. Note + * that the `socket.connect(options[, connectListener])` callback is a listener for the `'connect'` event. + * @since v6.1.0 + */ + readonly connecting: boolean; + /** + * This is `true` if the socket is not connected yet, either because `.connect()`has not yet been called or because it is still in the process of connecting + * (see `socket.connecting`). + * @since v11.2.0, v10.16.0 + */ + readonly pending: boolean; + /** + * See `writable.destroyed` for further details. + */ + readonly destroyed: boolean; + /** + * The string representation of the local IP address the remote client is + * connecting on. For example, in a server listening on `'0.0.0.0'`, if a client + * connects on `'192.168.1.1'`, the value of `socket.localAddress` would be`'192.168.1.1'`. + * @since v0.9.6 + */ + readonly localAddress?: string; + /** + * The numeric representation of the local port. For example, `80` or `21`. + * @since v0.9.6 + */ + readonly localPort?: number; + /** + * The string representation of the local IP family. `'IPv4'` or `'IPv6'`. + * @since v18.8.0, v16.18.0 + */ + readonly localFamily?: string; + /** + * This property represents the state of the connection as a string. + * + * * If the stream is connecting `socket.readyState` is `opening`. + * * If the stream is readable and writable, it is `open`. + * * If the stream is readable and not writable, it is `readOnly`. + * * If the stream is not readable and writable, it is `writeOnly`. + * @since v0.5.0 + */ + readonly readyState: SocketReadyState; + /** + * The string representation of the remote IP address. For example,`'74.125.127.100'` or `'2001:4860:a005::68'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remoteAddress?: string | undefined; + /** + * The string representation of the remote IP family. `'IPv4'` or `'IPv6'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.11.14 + */ + readonly remoteFamily?: string | undefined; + /** + * The numeric representation of the remote port. For example, `80` or `21`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remotePort?: number | undefined; + /** + * The socket timeout in milliseconds as set by `socket.setTimeout()`. + * It is `undefined` if a timeout has not been set. + * @since v10.7.0 + */ + readonly timeout?: number | undefined; + /** + * Half-closes the socket. i.e., it sends a FIN packet. It is possible the + * server will still send some data. + * + * See `writable.end()` for further details. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + * @param callback Optional callback for when the socket is finished. + * @return The socket itself. + */ + end(callback?: () => void): this; + end(buffer: Uint8Array | string, callback?: () => void): this; + end(str: Uint8Array | string, encoding?: BufferEncoding, callback?: () => void): this; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. connectionAttempt + * 4. connectionAttemptFailed + * 5. connectionAttemptTimeout + * 6. data + * 7. drain + * 8. end + * 9. error + * 10. lookup + * 11. ready + * 12. timeout + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: (hadError: boolean) => void): this; + addListener(event: "connect", listener: () => void): this; + addListener(event: "connectionAttempt", listener: (ip: string, port: number, family: number) => void): this; + addListener( + event: "connectionAttemptFailed", + listener: (ip: string, port: number, family: number) => void, + ): this; + addListener( + event: "connectionAttemptTimeout", + listener: (ip: string, port: number, family: number) => void, + ): this; + addListener(event: "data", listener: (data: Buffer) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "timeout", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close", hadError: boolean): boolean; + emit(event: "connect"): boolean; + emit(event: "connectionAttempt", ip: string, port: number, family: number): boolean; + emit(event: "connectionAttemptFailed", ip: string, port: number, family: number): boolean; + emit(event: "connectionAttemptTimeout", ip: string, port: number, family: number): boolean; + emit(event: "data", data: Buffer): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "lookup", err: Error, address: string, family: string | number, host: string): boolean; + emit(event: "ready"): boolean; + emit(event: "timeout"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: (hadError: boolean) => void): this; + on(event: "connect", listener: () => void): this; + on(event: "connectionAttempt", listener: (ip: string, port: number, family: number) => void): this; + on(event: "connectionAttemptFailed", listener: (ip: string, port: number, family: number) => void): this; + on(event: "connectionAttemptTimeout", listener: (ip: string, port: number, family: number) => void): this; + on(event: "data", listener: (data: Buffer) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + on(event: "ready", listener: () => void): this; + on(event: "timeout", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: (hadError: boolean) => void): this; + once(event: "connectionAttempt", listener: (ip: string, port: number, family: number) => void): this; + once(event: "connectionAttemptFailed", listener: (ip: string, port: number, family: number) => void): this; + once(event: "connectionAttemptTimeout", listener: (ip: string, port: number, family: number) => void): this; + once(event: "connect", listener: () => void): this; + once(event: "data", listener: (data: Buffer) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + once(event: "ready", listener: () => void): this; + once(event: "timeout", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: (hadError: boolean) => void): this; + prependListener(event: "connect", listener: () => void): this; + prependListener(event: "connectionAttempt", listener: (ip: string, port: number, family: number) => void): this; + prependListener( + event: "connectionAttemptFailed", + listener: (ip: string, port: number, family: number) => void, + ): this; + prependListener( + event: "connectionAttemptTimeout", + listener: (ip: string, port: number, family: number) => void, + ): this; + prependListener(event: "data", listener: (data: Buffer) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: (hadError: boolean) => void): this; + prependOnceListener(event: "connect", listener: () => void): this; + prependOnceListener( + event: "connectionAttempt", + listener: (ip: string, port: number, family: number) => void, + ): this; + prependOnceListener( + event: "connectionAttemptFailed", + listener: (ip: string, port: number, family: number) => void, + ): this; + prependOnceListener( + event: "connectionAttemptTimeout", + listener: (ip: string, port: number, family: number) => void, + ): this; + prependOnceListener(event: "data", listener: (data: Buffer) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + } + interface ListenOptions extends Abortable { + port?: number | undefined; + host?: string | undefined; + backlog?: number | undefined; + path?: string | undefined; + exclusive?: boolean | undefined; + readableAll?: boolean | undefined; + writableAll?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + } + interface ServerOpts { + /** + * Indicates whether half-opened TCP connections are allowed. + * @default false + */ + allowHalfOpen?: boolean | undefined; + /** + * Indicates whether the socket should be paused on incoming connections. + * @default false + */ + pauseOnConnect?: boolean | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default false + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + /** + * Optionally overrides all `net.Socket`s' `readableHighWaterMark` and `writableHighWaterMark`. + * @default See [stream.getDefaultHighWaterMark()](https://nodejs.org/docs/latest-v22.x/api/stream.html#streamgetdefaulthighwatermarkobjectmode). + * @since v18.17.0, v20.1.0 + */ + highWaterMark?: number | undefined; + } + interface DropArgument { + localAddress?: string; + localPort?: number; + localFamily?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + } + /** + * This class is used to create a TCP or `IPC` server. + * @since v0.1.90 + */ + class Server extends EventEmitter { + constructor(connectionListener?: (socket: Socket) => void); + constructor(options?: ServerOpts, connectionListener?: (socket: Socket) => void); + /** + * Start a server listening for connections. A `net.Server` can be a TCP or + * an `IPC` server depending on what it listens to. + * + * Possible signatures: + * + * * `server.listen(handle[, backlog][, callback])` + * * `server.listen(options[, callback])` + * * `server.listen(path[, backlog][, callback])` for `IPC` servers + * * `server.listen([port[, host[, backlog]]][, callback])` for TCP servers + * + * This function is asynchronous. When the server starts listening, the `'listening'` event will be emitted. The last parameter `callback`will be added as a listener for the `'listening'` + * event. + * + * All `listen()` methods can take a `backlog` parameter to specify the maximum + * length of the queue of pending connections. The actual length will be determined + * by the OS through sysctl settings such as `tcp_max_syn_backlog` and `somaxconn` on Linux. The default value of this parameter is 511 (not 512). + * + * All {@link Socket} are set to `SO_REUSEADDR` (see [`socket(7)`](https://man7.org/linux/man-pages/man7/socket.7.html) for + * details). + * + * The `server.listen()` method can be called again if and only if there was an + * error during the first `server.listen()` call or `server.close()` has been + * called. Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. + * + * One of the most common errors raised when listening is `EADDRINUSE`. + * This happens when another server is already listening on the requested`port`/`path`/`handle`. One way to handle this would be to retry + * after a certain amount of time: + * + * ```js + * server.on('error', (e) => { + * if (e.code === 'EADDRINUSE') { + * console.error('Address in use, retrying...'); + * setTimeout(() => { + * server.close(); + * server.listen(PORT, HOST); + * }, 1000); + * } + * }); + * ``` + */ + listen(port?: number, hostname?: string, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, hostname?: string, listeningListener?: () => void): this; + listen(port?: number, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, listeningListener?: () => void): this; + listen(path: string, backlog?: number, listeningListener?: () => void): this; + listen(path: string, listeningListener?: () => void): this; + listen(options: ListenOptions, listeningListener?: () => void): this; + listen(handle: any, backlog?: number, listeningListener?: () => void): this; + listen(handle: any, listeningListener?: () => void): this; + /** + * Stops the server from accepting new connections and keeps existing + * connections. This function is asynchronous, the server is finally closed + * when all connections are ended and the server emits a `'close'` event. + * The optional `callback` will be called once the `'close'` event occurs. Unlike + * that event, it will be called with an `Error` as its only argument if the server + * was not open when it was closed. + * @since v0.1.90 + * @param callback Called when the server is closed. + */ + close(callback?: (err?: Error) => void): this; + /** + * Returns the bound `address`, the address `family` name, and `port` of the server + * as reported by the operating system if listening on an IP socket + * (useful to find which port was assigned when getting an OS-assigned address):`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }`. + * + * For a server listening on a pipe or Unix domain socket, the name is returned + * as a string. + * + * ```js + * const server = net.createServer((socket) => { + * socket.end('goodbye\n'); + * }).on('error', (err) => { + * // Handle errors here. + * throw err; + * }); + * + * // Grab an arbitrary unused port. + * server.listen(() => { + * console.log('opened server on', server.address()); + * }); + * ``` + * + * `server.address()` returns `null` before the `'listening'` event has been + * emitted or after calling `server.close()`. + * @since v0.1.90 + */ + address(): AddressInfo | string | null; + /** + * Asynchronously get the number of concurrent connections on the server. Works + * when sockets were sent to forks. + * + * Callback should take two arguments `err` and `count`. + * @since v0.9.7 + */ + getConnections(cb: (error: Error | null, count: number) => void): void; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed server will _not_ let the program exit if it's the only server left (the default behavior). + * If the server is `ref`ed calling `ref()` again will have no effect. + * @since v0.9.1 + */ + ref(): this; + /** + * Calling `unref()` on a server will allow the program to exit if this is the only + * active server in the event system. If the server is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + */ + unref(): this; + /** + * Set this property to reject connections when the server's connection count gets + * high. + * + * It is not recommended to use this option once a socket has been sent to a child + * with `child_process.fork()`. + * @since v0.2.0 + */ + maxConnections: number; + connections: number; + /** + * Indicates whether or not the server is listening for connections. + * @since v5.7.0 + */ + readonly listening: boolean; + /** + * events.EventEmitter + * 1. close + * 2. connection + * 3. error + * 4. listening + * 5. drop + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Socket) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "drop", listener: (data?: DropArgument) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Socket): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit(event: "drop", data?: DropArgument): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Socket) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "drop", listener: (data?: DropArgument) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Socket) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "drop", listener: (data?: DropArgument) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Socket) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "drop", listener: (data?: DropArgument) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "drop", listener: (data?: DropArgument) => void): this; + /** + * Calls {@link Server.close()} and returns a promise that fulfills when the server has closed. + * @since v20.5.0 + */ + [Symbol.asyncDispose](): Promise; + } + type IPVersion = "ipv4" | "ipv6"; + /** + * The `BlockList` object can be used with some network APIs to specify rules for + * disabling inbound or outbound access to specific IP addresses, IP ranges, or + * IP subnets. + * @since v15.0.0, v14.18.0 + */ + class BlockList { + /** + * Adds a rule to block the given IP address. + * @since v15.0.0, v14.18.0 + * @param address An IPv4 or IPv6 address. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addAddress(address: string, type?: IPVersion): void; + addAddress(address: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses from `start` (inclusive) to`end` (inclusive). + * @since v15.0.0, v14.18.0 + * @param start The starting IPv4 or IPv6 address in the range. + * @param end The ending IPv4 or IPv6 address in the range. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addRange(start: string, end: string, type?: IPVersion): void; + addRange(start: SocketAddress, end: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses specified as a subnet mask. + * @since v15.0.0, v14.18.0 + * @param net The network IPv4 or IPv6 address. + * @param prefix The number of CIDR prefix bits. For IPv4, this must be a value between `0` and `32`. For IPv6, this must be between `0` and `128`. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addSubnet(net: SocketAddress, prefix: number): void; + addSubnet(net: string, prefix: number, type?: IPVersion): void; + /** + * Returns `true` if the given IP address matches any of the rules added to the`BlockList`. + * + * ```js + * const blockList = new net.BlockList(); + * blockList.addAddress('123.123.123.123'); + * blockList.addRange('10.0.0.1', '10.0.0.10'); + * blockList.addSubnet('8592:757c:efae:4e45::', 64, 'ipv6'); + * + * console.log(blockList.check('123.123.123.123')); // Prints: true + * console.log(blockList.check('10.0.0.3')); // Prints: true + * console.log(blockList.check('222.111.111.222')); // Prints: false + * + * // IPv6 notation for IPv4 addresses works: + * console.log(blockList.check('::ffff:7b7b:7b7b', 'ipv6')); // Prints: true + * console.log(blockList.check('::ffff:123.123.123.123', 'ipv6')); // Prints: true + * ``` + * @since v15.0.0, v14.18.0 + * @param address The IP address to check + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + check(address: SocketAddress): boolean; + check(address: string, type?: IPVersion): boolean; + /** + * The list of rules added to the blocklist. + * @since v15.0.0, v14.18.0 + */ + rules: readonly string[]; + } + interface TcpNetConnectOpts extends TcpSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + interface IpcNetConnectOpts extends IpcSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + type NetConnectOpts = TcpNetConnectOpts | IpcNetConnectOpts; + /** + * Creates a new TCP or `IPC` server. + * + * If `allowHalfOpen` is set to `true`, when the other end of the socket + * signals the end of transmission, the server will only send back the end of + * transmission when `socket.end()` is explicitly called. For example, in the + * context of TCP, when a FIN packed is received, a FIN packed is sent + * back only when `socket.end()` is explicitly called. Until then the + * connection is half-closed (non-readable but still writable). See `'end'` event and [RFC 1122](https://tools.ietf.org/html/rfc1122) (section 4.2.2.13) for more information. + * + * If `pauseOnConnect` is set to `true`, then the socket associated with each + * incoming connection will be paused, and no data will be read from its handle. + * This allows connections to be passed between processes without any data being + * read by the original process. To begin reading data from a paused socket, call `socket.resume()`. + * + * The server can be a TCP server or an `IPC` server, depending on what it `listen()` to. + * + * Here is an example of a TCP echo server which listens for connections + * on port 8124: + * + * ```js + * import net from 'node:net'; + * const server = net.createServer((c) => { + * // 'connection' listener. + * console.log('client connected'); + * c.on('end', () => { + * console.log('client disconnected'); + * }); + * c.write('hello\r\n'); + * c.pipe(c); + * }); + * server.on('error', (err) => { + * throw err; + * }); + * server.listen(8124, () => { + * console.log('server bound'); + * }); + * ``` + * + * Test this by using `telnet`: + * + * ```bash + * telnet localhost 8124 + * ``` + * + * To listen on the socket `/tmp/echo.sock`: + * + * ```js + * server.listen('/tmp/echo.sock', () => { + * console.log('server bound'); + * }); + * ``` + * + * Use `nc` to connect to a Unix domain socket server: + * + * ```bash + * nc -U /tmp/echo.sock + * ``` + * @since v0.5.0 + * @param connectionListener Automatically set as a listener for the {@link 'connection'} event. + */ + function createServer(connectionListener?: (socket: Socket) => void): Server; + function createServer(options?: ServerOpts, connectionListener?: (socket: Socket) => void): Server; + /** + * Aliases to {@link createConnection}. + * + * Possible signatures: + * + * * {@link connect} + * * {@link connect} for `IPC` connections. + * * {@link connect} for TCP connections. + */ + function connect(options: NetConnectOpts, connectionListener?: () => void): Socket; + function connect(port: number, host?: string, connectionListener?: () => void): Socket; + function connect(path: string, connectionListener?: () => void): Socket; + /** + * A factory function, which creates a new {@link Socket}, + * immediately initiates connection with `socket.connect()`, + * then returns the `net.Socket` that starts the connection. + * + * When the connection is established, a `'connect'` event will be emitted + * on the returned socket. The last parameter `connectListener`, if supplied, + * will be added as a listener for the `'connect'` event **once**. + * + * Possible signatures: + * + * * {@link createConnection} + * * {@link createConnection} for `IPC` connections. + * * {@link createConnection} for TCP connections. + * + * The {@link connect} function is an alias to this function. + */ + function createConnection(options: NetConnectOpts, connectionListener?: () => void): Socket; + function createConnection(port: number, host?: string, connectionListener?: () => void): Socket; + function createConnection(path: string, connectionListener?: () => void): Socket; + /** + * Gets the current default value of the `autoSelectFamily` option of `socket.connect(options)`. + * The initial default value is `true`, unless the command line option`--no-network-family-autoselection` is provided. + * @since v19.4.0 + */ + function getDefaultAutoSelectFamily(): boolean; + /** + * Sets the default value of the `autoSelectFamily` option of `socket.connect(options)`. + * @since v19.4.0 + */ + function setDefaultAutoSelectFamily(value: boolean): void; + /** + * Gets the current default value of the `autoSelectFamilyAttemptTimeout` option of `socket.connect(options)`. + * The initial default value is `250` or the value specified via the command line option `--network-family-autoselection-attempt-timeout`. + * @returns The current default value of the `autoSelectFamilyAttemptTimeout` option. + * @since v19.8.0, v18.8.0 + */ + function getDefaultAutoSelectFamilyAttemptTimeout(): number; + /** + * Sets the default value of the `autoSelectFamilyAttemptTimeout` option of `socket.connect(options)`. + * @param value The new default value, which must be a positive number. If the number is less than `10`, the value `10` is used instead. The initial default value is `250` or the value specified via the command line + * option `--network-family-autoselection-attempt-timeout`. + * @since v19.8.0, v18.8.0 + */ + function setDefaultAutoSelectFamilyAttemptTimeout(value: number): void; + /** + * Returns `6` if `input` is an IPv6 address. Returns `4` if `input` is an IPv4 + * address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no leading zeroes. Otherwise, returns`0`. + * + * ```js + * net.isIP('::1'); // returns 6 + * net.isIP('127.0.0.1'); // returns 4 + * net.isIP('127.000.000.001'); // returns 0 + * net.isIP('127.0.0.1/24'); // returns 0 + * net.isIP('fhqwhgads'); // returns 0 + * ``` + * @since v0.3.0 + */ + function isIP(input: string): number; + /** + * Returns `true` if `input` is an IPv4 address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no + * leading zeroes. Otherwise, returns `false`. + * + * ```js + * net.isIPv4('127.0.0.1'); // returns true + * net.isIPv4('127.000.000.001'); // returns false + * net.isIPv4('127.0.0.1/24'); // returns false + * net.isIPv4('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv4(input: string): boolean; + /** + * Returns `true` if `input` is an IPv6 address. Otherwise, returns `false`. + * + * ```js + * net.isIPv6('::1'); // returns true + * net.isIPv6('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv6(input: string): boolean; + interface SocketAddressInitOptions { + /** + * The network address as either an IPv4 or IPv6 string. + * @default 127.0.0.1 + */ + address?: string | undefined; + /** + * @default `'ipv4'` + */ + family?: IPVersion | undefined; + /** + * An IPv6 flow-label used only if `family` is `'ipv6'`. + * @default 0 + */ + flowlabel?: number | undefined; + /** + * An IP port. + * @default 0 + */ + port?: number | undefined; + } + /** + * @since v15.14.0, v14.18.0 + */ + class SocketAddress { + constructor(options: SocketAddressInitOptions); + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly address: string; + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly family: IPVersion; + /** + * @since v15.14.0, v14.18.0 + */ + readonly port: number; + /** + * @since v15.14.0, v14.18.0 + */ + readonly flowlabel: number; + } +} +declare module "node:net" { + export * from "net"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/os.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/os.d.ts new file mode 100644 index 0000000..7f30535 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/os.d.ts @@ -0,0 +1,495 @@ +/** + * The `node:os` module provides operating system-related utility methods and + * properties. It can be accessed using: + * + * ```js + * import os from 'node:os'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/os.js) + */ +declare module "os" { + interface CpuInfo { + model: string; + speed: number; + times: { + /** The number of milliseconds the CPU has spent in user mode. */ + user: number; + /** The number of milliseconds the CPU has spent in nice mode. */ + nice: number; + /** The number of milliseconds the CPU has spent in sys mode. */ + sys: number; + /** The number of milliseconds the CPU has spent in idle mode. */ + idle: number; + /** The number of milliseconds the CPU has spent in irq mode. */ + irq: number; + }; + } + interface NetworkInterfaceBase { + address: string; + netmask: string; + mac: string; + internal: boolean; + cidr: string | null; + } + interface NetworkInterfaceInfoIPv4 extends NetworkInterfaceBase { + family: "IPv4"; + scopeid?: undefined; + } + interface NetworkInterfaceInfoIPv6 extends NetworkInterfaceBase { + family: "IPv6"; + scopeid: number; + } + interface UserInfo { + username: T; + uid: number; + gid: number; + shell: T | null; + homedir: T; + } + type NetworkInterfaceInfo = NetworkInterfaceInfoIPv4 | NetworkInterfaceInfoIPv6; + /** + * Returns the host name of the operating system as a string. + * @since v0.3.3 + */ + function hostname(): string; + /** + * Returns an array containing the 1, 5, and 15 minute load averages. + * + * The load average is a measure of system activity calculated by the operating + * system and expressed as a fractional number. + * + * The load average is a Unix-specific concept. On Windows, the return value is + * always `[0, 0, 0]`. + * @since v0.3.3 + */ + function loadavg(): number[]; + /** + * Returns the system uptime in number of seconds. + * @since v0.3.3 + */ + function uptime(): number; + /** + * Returns the amount of free system memory in bytes as an integer. + * @since v0.3.3 + */ + function freemem(): number; + /** + * Returns the total amount of system memory in bytes as an integer. + * @since v0.3.3 + */ + function totalmem(): number; + /** + * Returns an array of objects containing information about each logical CPU core. + * The array will be empty if no CPU information is available, such as if the `/proc` file system is unavailable. + * + * The properties included on each object include: + * + * ```js + * [ + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 252020, + * nice: 0, + * sys: 30340, + * idle: 1070356870, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 306960, + * nice: 0, + * sys: 26980, + * idle: 1071569080, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 248450, + * nice: 0, + * sys: 21750, + * idle: 1070919370, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 256880, + * nice: 0, + * sys: 19430, + * idle: 1070905480, + * irq: 20, + * }, + * }, + * ] + * ``` + * + * `nice` values are POSIX-only. On Windows, the `nice` values of all processors + * are always 0. + * + * `os.cpus().length` should not be used to calculate the amount of parallelism + * available to an application. Use {@link availableParallelism} for this purpose. + * @since v0.3.3 + */ + function cpus(): CpuInfo[]; + /** + * Returns an estimate of the default amount of parallelism a program should use. + * Always returns a value greater than zero. + * + * This function is a small wrapper about libuv's [`uv_available_parallelism()`](https://docs.libuv.org/en/v1.x/misc.html#c.uv_available_parallelism). + * @since v19.4.0, v18.14.0 + */ + function availableParallelism(): number; + /** + * Returns the operating system name as returned by [`uname(3)`](https://linux.die.net/man/3/uname). For example, it + * returns `'Linux'` on Linux, `'Darwin'` on macOS, and `'Windows_NT'` on Windows. + * + * See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for additional information + * about the output of running [`uname(3)`](https://linux.die.net/man/3/uname) on various operating systems. + * @since v0.3.3 + */ + function type(): string; + /** + * Returns the operating system as a string. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `GetVersionExW()` is used. See + * [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v0.3.3 + */ + function release(): string; + /** + * Returns an object containing network interfaces that have been assigned a + * network address. + * + * Each key on the returned object identifies a network interface. The associated + * value is an array of objects that each describe an assigned network address. + * + * The properties available on the assigned network address object include: + * + * ```js + * { + * lo: [ + * { + * address: '127.0.0.1', + * netmask: '255.0.0.0', + * family: 'IPv4', + * mac: '00:00:00:00:00:00', + * internal: true, + * cidr: '127.0.0.1/8' + * }, + * { + * address: '::1', + * netmask: 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', + * family: 'IPv6', + * mac: '00:00:00:00:00:00', + * scopeid: 0, + * internal: true, + * cidr: '::1/128' + * } + * ], + * eth0: [ + * { + * address: '192.168.1.108', + * netmask: '255.255.255.0', + * family: 'IPv4', + * mac: '01:02:03:0a:0b:0c', + * internal: false, + * cidr: '192.168.1.108/24' + * }, + * { + * address: 'fe80::a00:27ff:fe4e:66a1', + * netmask: 'ffff:ffff:ffff:ffff::', + * family: 'IPv6', + * mac: '01:02:03:0a:0b:0c', + * scopeid: 1, + * internal: false, + * cidr: 'fe80::a00:27ff:fe4e:66a1/64' + * } + * ] + * } + * ``` + * @since v0.6.0 + */ + function networkInterfaces(): NodeJS.Dict; + /** + * Returns the string path of the current user's home directory. + * + * On POSIX, it uses the `$HOME` environment variable if defined. Otherwise it + * uses the [effective UID](https://en.wikipedia.org/wiki/User_identifier#Effective_user_ID) to look up the user's home directory. + * + * On Windows, it uses the `USERPROFILE` environment variable if defined. + * Otherwise it uses the path to the profile directory of the current user. + * @since v2.3.0 + */ + function homedir(): string; + /** + * Returns information about the currently effective user. On POSIX platforms, + * this is typically a subset of the password file. The returned object includes + * the `username`, `uid`, `gid`, `shell`, and `homedir`. On Windows, the `uid` and `gid` fields are `-1`, and `shell` is `null`. + * + * The value of `homedir` returned by `os.userInfo()` is provided by the operating + * system. This differs from the result of `os.homedir()`, which queries + * environment variables for the home directory before falling back to the + * operating system response. + * + * Throws a [`SystemError`](https://nodejs.org/docs/latest-v22.x/api/errors.html#class-systemerror) if a user has no `username` or `homedir`. + * @since v6.0.0 + */ + function userInfo(options: { encoding: "buffer" }): UserInfo; + function userInfo(options?: { encoding: BufferEncoding }): UserInfo; + type SignalConstants = { + [key in NodeJS.Signals]: number; + }; + namespace constants { + const UV_UDP_REUSEADDR: number; + namespace signals {} + const signals: SignalConstants; + namespace errno { + const E2BIG: number; + const EACCES: number; + const EADDRINUSE: number; + const EADDRNOTAVAIL: number; + const EAFNOSUPPORT: number; + const EAGAIN: number; + const EALREADY: number; + const EBADF: number; + const EBADMSG: number; + const EBUSY: number; + const ECANCELED: number; + const ECHILD: number; + const ECONNABORTED: number; + const ECONNREFUSED: number; + const ECONNRESET: number; + const EDEADLK: number; + const EDESTADDRREQ: number; + const EDOM: number; + const EDQUOT: number; + const EEXIST: number; + const EFAULT: number; + const EFBIG: number; + const EHOSTUNREACH: number; + const EIDRM: number; + const EILSEQ: number; + const EINPROGRESS: number; + const EINTR: number; + const EINVAL: number; + const EIO: number; + const EISCONN: number; + const EISDIR: number; + const ELOOP: number; + const EMFILE: number; + const EMLINK: number; + const EMSGSIZE: number; + const EMULTIHOP: number; + const ENAMETOOLONG: number; + const ENETDOWN: number; + const ENETRESET: number; + const ENETUNREACH: number; + const ENFILE: number; + const ENOBUFS: number; + const ENODATA: number; + const ENODEV: number; + const ENOENT: number; + const ENOEXEC: number; + const ENOLCK: number; + const ENOLINK: number; + const ENOMEM: number; + const ENOMSG: number; + const ENOPROTOOPT: number; + const ENOSPC: number; + const ENOSR: number; + const ENOSTR: number; + const ENOSYS: number; + const ENOTCONN: number; + const ENOTDIR: number; + const ENOTEMPTY: number; + const ENOTSOCK: number; + const ENOTSUP: number; + const ENOTTY: number; + const ENXIO: number; + const EOPNOTSUPP: number; + const EOVERFLOW: number; + const EPERM: number; + const EPIPE: number; + const EPROTO: number; + const EPROTONOSUPPORT: number; + const EPROTOTYPE: number; + const ERANGE: number; + const EROFS: number; + const ESPIPE: number; + const ESRCH: number; + const ESTALE: number; + const ETIME: number; + const ETIMEDOUT: number; + const ETXTBSY: number; + const EWOULDBLOCK: number; + const EXDEV: number; + const WSAEINTR: number; + const WSAEBADF: number; + const WSAEACCES: number; + const WSAEFAULT: number; + const WSAEINVAL: number; + const WSAEMFILE: number; + const WSAEWOULDBLOCK: number; + const WSAEINPROGRESS: number; + const WSAEALREADY: number; + const WSAENOTSOCK: number; + const WSAEDESTADDRREQ: number; + const WSAEMSGSIZE: number; + const WSAEPROTOTYPE: number; + const WSAENOPROTOOPT: number; + const WSAEPROTONOSUPPORT: number; + const WSAESOCKTNOSUPPORT: number; + const WSAEOPNOTSUPP: number; + const WSAEPFNOSUPPORT: number; + const WSAEAFNOSUPPORT: number; + const WSAEADDRINUSE: number; + const WSAEADDRNOTAVAIL: number; + const WSAENETDOWN: number; + const WSAENETUNREACH: number; + const WSAENETRESET: number; + const WSAECONNABORTED: number; + const WSAECONNRESET: number; + const WSAENOBUFS: number; + const WSAEISCONN: number; + const WSAENOTCONN: number; + const WSAESHUTDOWN: number; + const WSAETOOMANYREFS: number; + const WSAETIMEDOUT: number; + const WSAECONNREFUSED: number; + const WSAELOOP: number; + const WSAENAMETOOLONG: number; + const WSAEHOSTDOWN: number; + const WSAEHOSTUNREACH: number; + const WSAENOTEMPTY: number; + const WSAEPROCLIM: number; + const WSAEUSERS: number; + const WSAEDQUOT: number; + const WSAESTALE: number; + const WSAEREMOTE: number; + const WSASYSNOTREADY: number; + const WSAVERNOTSUPPORTED: number; + const WSANOTINITIALISED: number; + const WSAEDISCON: number; + const WSAENOMORE: number; + const WSAECANCELLED: number; + const WSAEINVALIDPROCTABLE: number; + const WSAEINVALIDPROVIDER: number; + const WSAEPROVIDERFAILEDINIT: number; + const WSASYSCALLFAILURE: number; + const WSASERVICE_NOT_FOUND: number; + const WSATYPE_NOT_FOUND: number; + const WSA_E_NO_MORE: number; + const WSA_E_CANCELLED: number; + const WSAEREFUSED: number; + } + namespace dlopen { + const RTLD_LAZY: number; + const RTLD_NOW: number; + const RTLD_GLOBAL: number; + const RTLD_LOCAL: number; + const RTLD_DEEPBIND: number; + } + namespace priority { + const PRIORITY_LOW: number; + const PRIORITY_BELOW_NORMAL: number; + const PRIORITY_NORMAL: number; + const PRIORITY_ABOVE_NORMAL: number; + const PRIORITY_HIGH: number; + const PRIORITY_HIGHEST: number; + } + } + const devNull: string; + /** + * The operating system-specific end-of-line marker. + * * `\n` on POSIX + * * `\r\n` on Windows + */ + const EOL: string; + /** + * Returns the operating system CPU architecture for which the Node.js binary was + * compiled. Possible values are `'arm'`, `'arm64'`, `'ia32'`, `'loong64'`, `'mips'`, `'mipsel'`, `'ppc'`, `'ppc64'`, `'riscv64'`, `'s390'`, `'s390x'`, + * and `'x64'`. + * + * The return value is equivalent to [process.arch](https://nodejs.org/docs/latest-v22.x/api/process.html#processarch). + * @since v0.5.0 + */ + function arch(): string; + /** + * Returns a string identifying the kernel version. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v13.11.0, v12.17.0 + */ + function version(): string; + /** + * Returns a string identifying the operating system platform for which + * the Node.js binary was compiled. The value is set at compile time. + * Possible values are `'aix'`, `'darwin'`, `'freebsd'`, `'linux'`, `'openbsd'`, `'sunos'`, and `'win32'`. + * + * The return value is equivalent to `process.platform`. + * + * The value `'android'` may also be returned if Node.js is built on the Android + * operating system. [Android support is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.5.0 + */ + function platform(): NodeJS.Platform; + /** + * Returns the machine type as a string, such as `arm`, `arm64`, `aarch64`, `mips`, `mips64`, `ppc64`, `ppc64le`, `s390`, `s390x`, `i386`, `i686`, `x86_64`. + * + * On POSIX systems, the machine type is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v18.9.0, v16.18.0 + */ + function machine(): string; + /** + * Returns the operating system's default directory for temporary files as a + * string. + * @since v0.9.9 + */ + function tmpdir(): string; + /** + * Returns a string identifying the endianness of the CPU for which the Node.js + * binary was compiled. + * + * Possible values are `'BE'` for big endian and `'LE'` for little endian. + * @since v0.9.4 + */ + function endianness(): "BE" | "LE"; + /** + * Returns the scheduling priority for the process specified by `pid`. If `pid` is + * not provided or is `0`, the priority of the current process is returned. + * @since v10.10.0 + * @param [pid=0] The process ID to retrieve scheduling priority for. + */ + function getPriority(pid?: number): number; + /** + * Attempts to set the scheduling priority for the process specified by `pid`. If `pid` is not provided or is `0`, the process ID of the current process is used. + * + * The `priority` input must be an integer between `-20` (high priority) and `19` (low priority). Due to differences between Unix priority levels and Windows + * priority classes, `priority` is mapped to one of six priority constants in `os.constants.priority`. When retrieving a process priority level, this range + * mapping may cause the return value to be slightly different on Windows. To avoid + * confusion, set `priority` to one of the priority constants. + * + * On Windows, setting priority to `PRIORITY_HIGHEST` requires elevated user + * privileges. Otherwise the set priority will be silently reduced to `PRIORITY_HIGH`. + * @since v10.10.0 + * @param [pid=0] The process ID to set scheduling priority for. + * @param priority The scheduling priority to assign to the process. + */ + function setPriority(priority: number): void; + function setPriority(pid: number, priority: number): void; +} +declare module "node:os" { + export * from "os"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/package.json b/node_modules/@types/node-fetch/node_modules/@types/node/package.json new file mode 100644 index 0000000..92f98b2 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/package.json @@ -0,0 +1,220 @@ +{ + "name": "@types/node", + "version": "22.10.2", + "description": "TypeScript definitions for node", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node", + "license": "MIT", + "contributors": [ + { + "name": "Microsoft TypeScript", + "githubUsername": "Microsoft", + "url": "https://github.com/Microsoft" + }, + { + "name": "Alberto Schiabel", + "githubUsername": "jkomyno", + "url": "https://github.com/jkomyno" + }, + { + "name": "Alvis HT Tang", + "githubUsername": "alvis", + "url": "https://github.com/alvis" + }, + { + "name": "Andrew Makarov", + "githubUsername": "r3nya", + "url": "https://github.com/r3nya" + }, + { + "name": "Benjamin Toueg", + "githubUsername": "btoueg", + "url": "https://github.com/btoueg" + }, + { + "name": "Chigozirim C.", + "githubUsername": "smac89", + "url": "https://github.com/smac89" + }, + { + "name": "David Junger", + "githubUsername": "touffy", + "url": "https://github.com/touffy" + }, + { + "name": "Deividas Bakanas", + "githubUsername": "DeividasBakanas", + "url": "https://github.com/DeividasBakanas" + }, + { + "name": "Eugene Y. Q. Shen", + "githubUsername": "eyqs", + "url": "https://github.com/eyqs" + }, + { + "name": "Hannes Magnusson", + "githubUsername": "Hannes-Magnusson-CK", + "url": "https://github.com/Hannes-Magnusson-CK" + }, + { + "name": "Huw", + "githubUsername": "hoo29", + "url": "https://github.com/hoo29" + }, + { + "name": "Kelvin Jin", + "githubUsername": "kjin", + "url": "https://github.com/kjin" + }, + { + "name": "Klaus Meinhardt", + "githubUsername": "ajafff", + "url": "https://github.com/ajafff" + }, + { + "name": "Lishude", + "githubUsername": "islishude", + "url": "https://github.com/islishude" + }, + { + "name": "Mariusz Wiktorczyk", + "githubUsername": "mwiktorczyk", + "url": "https://github.com/mwiktorczyk" + }, + { + "name": "Mohsen Azimi", + "githubUsername": "mohsen1", + "url": "https://github.com/mohsen1" + }, + { + "name": "Nikita Galkin", + "githubUsername": "galkin", + "url": "https://github.com/galkin" + }, + { + "name": "Parambir Singh", + "githubUsername": "parambirs", + "url": "https://github.com/parambirs" + }, + { + "name": "Sebastian Silbermann", + "githubUsername": "eps1lon", + "url": "https://github.com/eps1lon" + }, + { + "name": "Thomas den Hollander", + "githubUsername": "ThomasdenH", + "url": "https://github.com/ThomasdenH" + }, + { + "name": "Wilco Bakker", + "githubUsername": "WilcoBakker", + "url": "https://github.com/WilcoBakker" + }, + { + "name": "wwwy3y3", + "githubUsername": "wwwy3y3", + "url": "https://github.com/wwwy3y3" + }, + { + "name": "Samuel Ainsworth", + "githubUsername": "samuela", + "url": "https://github.com/samuela" + }, + { + "name": "Kyle Uehlein", + "githubUsername": "kuehlein", + "url": "https://github.com/kuehlein" + }, + { + "name": "Thanik Bhongbhibhat", + "githubUsername": "bhongy", + "url": "https://github.com/bhongy" + }, + { + "name": "Marcin Kopacz", + "githubUsername": "chyzwar", + "url": "https://github.com/chyzwar" + }, + { + "name": "Trivikram Kamat", + "githubUsername": "trivikr", + "url": "https://github.com/trivikr" + }, + { + "name": "Junxiao Shi", + "githubUsername": "yoursunny", + "url": "https://github.com/yoursunny" + }, + { + "name": "Ilia Baryshnikov", + "githubUsername": "qwelias", + "url": "https://github.com/qwelias" + }, + { + "name": "ExE Boss", + "githubUsername": "ExE-Boss", + "url": "https://github.com/ExE-Boss" + }, + { + "name": "Piotr Błażejewicz", + "githubUsername": "peterblazejewicz", + "url": "https://github.com/peterblazejewicz" + }, + { + "name": "Anna Henningsen", + "githubUsername": "addaleax", + "url": "https://github.com/addaleax" + }, + { + "name": "Victor Perin", + "githubUsername": "victorperin", + "url": "https://github.com/victorperin" + }, + { + "name": "NodeJS Contributors", + "githubUsername": "NodeJS", + "url": "https://github.com/NodeJS" + }, + { + "name": "Linus Unnebäck", + "githubUsername": "LinusU", + "url": "https://github.com/LinusU" + }, + { + "name": "wafuwafu13", + "githubUsername": "wafuwafu13", + "url": "https://github.com/wafuwafu13" + }, + { + "name": "Matteo Collina", + "githubUsername": "mcollina", + "url": "https://github.com/mcollina" + }, + { + "name": "Dmitry Semigradsky", + "githubUsername": "Semigradsky", + "url": "https://github.com/Semigradsky" + } + ], + "main": "", + "types": "index.d.ts", + "typesVersions": { + "<=5.6": { + "*": [ + "ts5.6/*" + ] + } + }, + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/node" + }, + "scripts": {}, + "dependencies": { + "undici-types": "~6.20.0" + }, + "peerDependencies": {}, + "typesPublisherContentHash": "1c1003be2fa8d4f16936ac129ec72142249d4a14af58831bef4147ca7035833b", + "typeScriptVersion": "5.0" +} \ No newline at end of file diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/path.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/path.d.ts new file mode 100644 index 0000000..25bfc80 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/path.d.ts @@ -0,0 +1,200 @@ +declare module "path/posix" { + import path = require("path"); + export = path; +} +declare module "path/win32" { + import path = require("path"); + export = path; +} +/** + * The `node:path` module provides utilities for working with file and directory + * paths. It can be accessed using: + * + * ```js + * import path from 'node:path'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/path.js) + */ +declare module "path" { + namespace path { + /** + * A parsed path object generated by path.parse() or consumed by path.format(). + */ + interface ParsedPath { + /** + * The root of the path such as '/' or 'c:\' + */ + root: string; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir: string; + /** + * The file name including extension (if any) such as 'index.html' + */ + base: string; + /** + * The file extension (if any) such as '.html' + */ + ext: string; + /** + * The file name without extension (if any) such as 'index' + */ + name: string; + } + interface FormatInputPathObject { + /** + * The root of the path such as '/' or 'c:\' + */ + root?: string | undefined; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir?: string | undefined; + /** + * The file name including extension (if any) such as 'index.html' + */ + base?: string | undefined; + /** + * The file extension (if any) such as '.html' + */ + ext?: string | undefined; + /** + * The file name without extension (if any) such as 'index' + */ + name?: string | undefined; + } + interface PlatformPath { + /** + * Normalize a string path, reducing '..' and '.' parts. + * When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used. + * + * @param path string path to normalize. + * @throws {TypeError} if `path` is not a string. + */ + normalize(path: string): string; + /** + * Join all arguments together and normalize the resulting path. + * + * @param paths paths to join. + * @throws {TypeError} if any of the path segments is not a string. + */ + join(...paths: string[]): string; + /** + * The right-most parameter is considered {to}. Other parameters are considered an array of {from}. + * + * Starting from leftmost {from} parameter, resolves {to} to an absolute path. + * + * If {to} isn't already absolute, {from} arguments are prepended in right to left order, + * until an absolute path is found. If after using all {from} paths still no absolute path is found, + * the current working directory is used as well. The resulting path is normalized, + * and trailing slashes are removed unless the path gets resolved to the root directory. + * + * @param paths A sequence of paths or path segments. + * @throws {TypeError} if any of the arguments is not a string. + */ + resolve(...paths: string[]): string; + /** + * The `path.matchesGlob()` method determines if `path` matches the `pattern`. + * @param path The path to glob-match against. + * @param pattern The glob to check the path against. + * @returns Whether or not the `path` matched the `pattern`. + * @throws {TypeError} if `path` or `pattern` are not strings. + * @since v22.5.0 + */ + matchesGlob(path: string, pattern: string): boolean; + /** + * Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory. + * + * If the given {path} is a zero-length string, `false` will be returned. + * + * @param path path to test. + * @throws {TypeError} if `path` is not a string. + */ + isAbsolute(path: string): boolean; + /** + * Solve the relative path from {from} to {to} based on the current working directory. + * At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve. + * + * @throws {TypeError} if either `from` or `to` is not a string. + */ + relative(from: string, to: string): string; + /** + * Return the directory name of a path. Similar to the Unix dirname command. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + dirname(path: string): string; + /** + * Return the last portion of a path. Similar to the Unix basename command. + * Often used to extract the file name from a fully qualified path. + * + * @param path the path to evaluate. + * @param suffix optionally, an extension to remove from the result. + * @throws {TypeError} if `path` is not a string or if `ext` is given and is not a string. + */ + basename(path: string, suffix?: string): string; + /** + * Return the extension of the path, from the last '.' to end of string in the last portion of the path. + * If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + extname(path: string): string; + /** + * The platform-specific file separator. '\\' or '/'. + */ + readonly sep: "\\" | "/"; + /** + * The platform-specific file delimiter. ';' or ':'. + */ + readonly delimiter: ";" | ":"; + /** + * Returns an object from a path string - the opposite of format(). + * + * @param path path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + parse(path: string): ParsedPath; + /** + * Returns a path string from an object - the opposite of parse(). + * + * @param pathObject path to evaluate. + */ + format(pathObject: FormatInputPathObject): string; + /** + * On Windows systems only, returns an equivalent namespace-prefixed path for the given path. + * If path is not a string, path will be returned without modifications. + * This method is meaningful only on Windows system. + * On POSIX systems, the method is non-operational and always returns path without modifications. + */ + toNamespacedPath(path: string): string; + /** + * Posix specific pathing. + * Same as parent object on posix. + */ + readonly posix: PlatformPath; + /** + * Windows specific pathing. + * Same as parent object on windows + */ + readonly win32: PlatformPath; + } + } + const path: path.PlatformPath; + export = path; +} +declare module "node:path" { + import path = require("path"); + export = path; +} +declare module "node:path/posix" { + import path = require("path/posix"); + export = path; +} +declare module "node:path/win32" { + import path = require("path/win32"); + export = path; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/perf_hooks.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/perf_hooks.d.ts new file mode 100644 index 0000000..8c39ad0 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/perf_hooks.d.ts @@ -0,0 +1,965 @@ +/** + * This module provides an implementation of a subset of the W3C [Web Performance APIs](https://w3c.github.io/perf-timing-primer/) as well as additional APIs for + * Node.js-specific performance measurements. + * + * Node.js supports the following [Web Performance APIs](https://w3c.github.io/perf-timing-primer/): + * + * * [High Resolution Time](https://www.w3.org/TR/hr-time-2) + * * [Performance Timeline](https://w3c.github.io/performance-timeline/) + * * [User Timing](https://www.w3.org/TR/user-timing/) + * * [Resource Timing](https://www.w3.org/TR/resource-timing-2/) + * + * ```js + * import { PerformanceObserver, performance } from 'node:perf_hooks'; + * + * const obs = new PerformanceObserver((items) => { + * console.log(items.getEntries()[0].duration); + * performance.clearMarks(); + * }); + * obs.observe({ type: 'measure' }); + * performance.measure('Start to Now'); + * + * performance.mark('A'); + * doSomeLongRunningProcess(() => { + * performance.measure('A to Now', 'A'); + * + * performance.mark('B'); + * performance.measure('A to B', 'A', 'B'); + * }); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/perf_hooks.js) + */ +declare module "perf_hooks" { + import { AsyncResource } from "node:async_hooks"; + type EntryType = + | "dns" // Node.js only + | "function" // Node.js only + | "gc" // Node.js only + | "http2" // Node.js only + | "http" // Node.js only + | "mark" // available on the Web + | "measure" // available on the Web + | "net" // Node.js only + | "node" // Node.js only + | "resource"; // available on the Web + interface NodeGCPerformanceDetail { + /** + * When `performanceEntry.entryType` is equal to 'gc', the `performance.kind` property identifies + * the type of garbage collection operation that occurred. + * See perf_hooks.constants for valid values. + */ + readonly kind?: number | undefined; + /** + * When `performanceEntry.entryType` is equal to 'gc', the `performance.flags` + * property contains additional information about garbage collection operation. + * See perf_hooks.constants for valid values. + */ + readonly flags?: number | undefined; + } + /** + * The constructor of this class is not exposed to users directly. + * @since v8.5.0 + */ + class PerformanceEntry { + protected constructor(); + /** + * The total number of milliseconds elapsed for this entry. This value will not + * be meaningful for all Performance Entry types. + * @since v8.5.0 + */ + readonly duration: number; + /** + * The name of the performance entry. + * @since v8.5.0 + */ + readonly name: string; + /** + * The high resolution millisecond timestamp marking the starting time of the + * Performance Entry. + * @since v8.5.0 + */ + readonly startTime: number; + /** + * The type of the performance entry. It may be one of: + * + * * `'node'` (Node.js only) + * * `'mark'` (available on the Web) + * * `'measure'` (available on the Web) + * * `'gc'` (Node.js only) + * * `'function'` (Node.js only) + * * `'http2'` (Node.js only) + * * `'http'` (Node.js only) + * @since v8.5.0 + */ + readonly entryType: EntryType; + /** + * Additional detail specific to the `entryType`. + * @since v16.0.0 + */ + readonly detail?: NodeGCPerformanceDetail | unknown | undefined; // TODO: Narrow this based on entry type. + toJSON(): any; + } + /** + * Exposes marks created via the `Performance.mark()` method. + * @since v18.2.0, v16.17.0 + */ + class PerformanceMark extends PerformanceEntry { + readonly duration: 0; + readonly entryType: "mark"; + } + /** + * Exposes measures created via the `Performance.measure()` method. + * + * The constructor of this class is not exposed to users directly. + * @since v18.2.0, v16.17.0 + */ + class PerformanceMeasure extends PerformanceEntry { + readonly entryType: "measure"; + } + interface UVMetrics { + /** + * Number of event loop iterations. + */ + readonly loopCount: number; + /** + * Number of events that have been processed by the event handler. + */ + readonly events: number; + /** + * Number of events that were waiting to be processed when the event provider was called. + */ + readonly eventsWaiting: number; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Provides timing details for Node.js itself. The constructor of this class + * is not exposed to users. + * @since v8.5.0 + */ + class PerformanceNodeTiming extends PerformanceEntry { + readonly entryType: "node"; + /** + * The high resolution millisecond timestamp at which the Node.js process + * completed bootstrapping. If bootstrapping has not yet finished, the property + * has the value of -1. + * @since v8.5.0 + */ + readonly bootstrapComplete: number; + /** + * The high resolution millisecond timestamp at which the Node.js environment was + * initialized. + * @since v8.5.0 + */ + readonly environment: number; + /** + * The high resolution millisecond timestamp of the amount of time the event loop + * has been idle within the event loop's event provider (e.g. `epoll_wait`). This + * does not take CPU usage into consideration. If the event loop has not yet + * started (e.g., in the first tick of the main script), the property has the + * value of 0. + * @since v14.10.0, v12.19.0 + */ + readonly idleTime: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * exited. If the event loop has not yet exited, the property has the value of -1\. + * It can only have a value of not -1 in a handler of the `'exit'` event. + * @since v8.5.0 + */ + readonly loopExit: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * started. If the event loop has not yet started (e.g., in the first tick of the + * main script), the property has the value of -1. + * @since v8.5.0 + */ + readonly loopStart: number; + /** + * The high resolution millisecond timestamp at which the Node.js process was initialized. + * @since v8.5.0 + */ + readonly nodeStart: number; + /** + * This is a wrapper to the `uv_metrics_info` function. + * It returns the current set of event loop metrics. + * + * It is recommended to use this property inside a function whose execution was + * scheduled using `setImmediate` to avoid collecting metrics before finishing all + * operations scheduled during the current loop iteration. + * @since v22.8.0, v20.18.0 + */ + readonly uvMetricsInfo: UVMetrics; + /** + * The high resolution millisecond timestamp at which the V8 platform was + * initialized. + * @since v8.5.0 + */ + readonly v8Start: number; + } + interface EventLoopUtilization { + idle: number; + active: number; + utilization: number; + } + /** + * @param utilization1 The result of a previous call to `eventLoopUtilization()`. + * @param utilization2 The result of a previous call to `eventLoopUtilization()` prior to `utilization1`. + */ + type EventLoopUtilityFunction = ( + utilization1?: EventLoopUtilization, + utilization2?: EventLoopUtilization, + ) => EventLoopUtilization; + interface MarkOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * An optional timestamp to be used as the mark time. + * @default `performance.now()` + */ + startTime?: number | undefined; + } + interface MeasureOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * Duration between start and end times. + */ + duration?: number | undefined; + /** + * Timestamp to be used as the end time, or a string identifying a previously recorded mark. + */ + end?: number | string | undefined; + /** + * Timestamp to be used as the start time, or a string identifying a previously recorded mark. + */ + start?: number | string | undefined; + } + interface TimerifyOptions { + /** + * A histogram object created using `perf_hooks.createHistogram()` that will record runtime + * durations in nanoseconds. + */ + histogram?: RecordableHistogram | undefined; + } + interface Performance { + /** + * If `name` is not provided, removes all `PerformanceMark` objects from the Performance Timeline. + * If `name` is provided, removes only the named mark. + * @since v8.5.0 + */ + clearMarks(name?: string): void; + /** + * If `name` is not provided, removes all `PerformanceMeasure` objects from the Performance Timeline. + * If `name` is provided, removes only the named measure. + * @since v16.7.0 + */ + clearMeasures(name?: string): void; + /** + * If `name` is not provided, removes all `PerformanceResourceTiming` objects from the Resource Timeline. + * If `name` is provided, removes only the named resource. + * @since v18.2.0, v16.17.0 + */ + clearResourceTimings(name?: string): void; + /** + * eventLoopUtilization is similar to CPU utilization except that it is calculated using high precision wall-clock time. + * It represents the percentage of time the event loop has spent outside the event loop's event provider (e.g. epoll_wait). + * No other CPU idle time is taken into consideration. + */ + eventLoopUtilization: EventLoopUtilityFunction; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime`. + * If you are only interested in performance entries of certain types or that have certain names, see + * `performance.getEntriesByType()` and `performance.getEntriesByName()`. + * @since v16.7.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.name` is equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to `type`. + * @param name + * @param type + * @since v16.7.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.entryType` is equal to `type`. + * @param type + * @since v16.7.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + /** + * Creates a new `PerformanceMark` entry in the Performance Timeline. + * A `PerformanceMark` is a subclass of `PerformanceEntry` whose `performanceEntry.entryType` is always `'mark'`, + * and whose `performanceEntry.duration` is always `0`. + * Performance marks are used to mark specific significant moments in the Performance Timeline. + * + * The created `PerformanceMark` entry is put in the global Performance Timeline and can be queried with + * `performance.getEntries`, `performance.getEntriesByName`, and `performance.getEntriesByType`. When the observation is + * performed, the entries should be cleared from the global Performance Timeline manually with `performance.clearMarks`. + * @param name + */ + mark(name: string, options?: MarkOptions): PerformanceMark; + /** + * Creates a new `PerformanceResourceTiming` entry in the Resource Timeline. + * A `PerformanceResourceTiming` is a subclass of `PerformanceEntry` whose `performanceEntry.entryType` is always `'resource'`. + * Performance resources are used to mark moments in the Resource Timeline. + * @param timingInfo [Fetch Timing Info](https://fetch.spec.whatwg.org/#fetch-timing-info) + * @param requestedUrl The resource url + * @param initiatorType The initiator name, e.g: 'fetch' + * @param global + * @param cacheMode The cache mode must be an empty string ('') or 'local' + * @param bodyInfo [Fetch Response Body Info](https://fetch.spec.whatwg.org/#response-body-info) + * @param responseStatus The response's status code + * @param deliveryType The delivery type. Default: ''. + * @since v18.2.0, v16.17.0 + */ + markResourceTiming( + timingInfo: object, + requestedUrl: string, + initiatorType: string, + global: object, + cacheMode: "" | "local", + bodyInfo: object, + responseStatus: number, + deliveryType?: string, + ): PerformanceResourceTiming; + /** + * Creates a new PerformanceMeasure entry in the Performance Timeline. + * A PerformanceMeasure is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'measure', + * and whose performanceEntry.duration measures the number of milliseconds elapsed since startMark and endMark. + * + * The startMark argument may identify any existing PerformanceMark in the the Performance Timeline, or may identify + * any of the timestamp properties provided by the PerformanceNodeTiming class. If the named startMark does not exist, + * then startMark is set to timeOrigin by default. + * + * The endMark argument must identify any existing PerformanceMark in the the Performance Timeline or any of the timestamp + * properties provided by the PerformanceNodeTiming class. If the named endMark does not exist, an error will be thrown. + * @param name + * @param startMark + * @param endMark + * @return The PerformanceMeasure entry that was created + */ + measure(name: string, startMark?: string, endMark?: string): PerformanceMeasure; + measure(name: string, options: MeasureOptions): PerformanceMeasure; + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * An instance of the `PerformanceNodeTiming` class that provides performance metrics for specific Node.js operational milestones. + * @since v8.5.0 + */ + readonly nodeTiming: PerformanceNodeTiming; + /** + * Returns the current high resolution millisecond timestamp, where 0 represents the start of the current `node` process. + * @since v8.5.0 + */ + now(): number; + /** + * Sets the global performance resource timing buffer size to the specified number of "resource" type performance entry objects. + * + * By default the max buffer size is set to 250. + * @since v18.8.0 + */ + setResourceTimingBufferSize(maxSize: number): void; + /** + * The [`timeOrigin`](https://w3c.github.io/hr-time/#dom-performance-timeorigin) specifies the high resolution millisecond timestamp + * at which the current `node` process began, measured in Unix time. + * @since v8.5.0 + */ + readonly timeOrigin: number; + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Wraps a function within a new function that measures the running time of the wrapped function. + * A `PerformanceObserver` must be subscribed to the `'function'` event type in order for the timing details to be accessed. + * + * ```js + * import { + * performance, + * PerformanceObserver, + * } from 'node:perf_hooks'; + * + * function someFunction() { + * console.log('hello world'); + * } + * + * const wrapped = performance.timerify(someFunction); + * + * const obs = new PerformanceObserver((list) => { + * console.log(list.getEntries()[0].duration); + * + * performance.clearMarks(); + * performance.clearMeasures(); + * obs.disconnect(); + * }); + * obs.observe({ entryTypes: ['function'] }); + * + * // A performance timeline entry will be created + * wrapped(); + * ``` + * + * If the wrapped function returns a promise, a finally handler will be attached to the promise and the duration will be reported + * once the finally handler is invoked. + * @param fn + */ + timerify any>(fn: T, options?: TimerifyOptions): T; + /** + * An object which is JSON representation of the performance object. It is similar to + * [`window.performance.toJSON`](https://developer.mozilla.org/en-US/docs/Web/API/Performance/toJSON) in browsers. + * @since v16.1.0 + */ + toJSON(): any; + } + class PerformanceObserverEntryList { + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime`. + * + * ```js + * import { + * performance, + * PerformanceObserver, + * } from 'node:perf_hooks'; + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntries()); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 81.465639, + * * duration: 0, + * * detail: null + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 81.860064, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.name` is + * equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to`type`. + * + * ```js + * import { + * performance, + * PerformanceObserver, + * } from 'node:perf_hooks'; + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByName('meow')); + * + * * [ + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 98.545991, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('nope')); // [] + * + * console.log(perfObserverList.getEntriesByName('test', 'mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 63.518931, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('test', 'measure')); // [] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ entryTypes: ['mark', 'measure'] }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.entryType` is equal to `type`. + * + * ```js + * import { + * performance, + * PerformanceObserver, + * } from 'node:perf_hooks'; + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByType('mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 55.897834, + * * duration: 0, + * * detail: null + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 56.350146, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + } + type PerformanceObserverCallback = (list: PerformanceObserverEntryList, observer: PerformanceObserver) => void; + /** + * @since v8.5.0 + */ + class PerformanceObserver extends AsyncResource { + constructor(callback: PerformanceObserverCallback); + /** + * Disconnects the `PerformanceObserver` instance from all notifications. + * @since v8.5.0 + */ + disconnect(): void; + /** + * Subscribes the `PerformanceObserver` instance to notifications of new `PerformanceEntry` instances identified either by `options.entryTypes` or `options.type`: + * + * ```js + * import { + * performance, + * PerformanceObserver, + * } from 'node:perf_hooks'; + * + * const obs = new PerformanceObserver((list, observer) => { + * // Called once asynchronously. `list` contains three items. + * }); + * obs.observe({ type: 'mark' }); + * + * for (let n = 0; n < 3; n++) + * performance.mark(`test${n}`); + * ``` + * @since v8.5.0 + */ + observe( + options: + | { + entryTypes: readonly EntryType[]; + buffered?: boolean | undefined; + } + | { + type: EntryType; + buffered?: boolean | undefined; + }, + ): void; + } + /** + * Provides detailed network timing data regarding the loading of an application's resources. + * + * The constructor of this class is not exposed to users directly. + * @since v18.2.0, v16.17.0 + */ + class PerformanceResourceTiming extends PerformanceEntry { + readonly entryType: "resource"; + protected constructor(); + /** + * The high resolution millisecond timestamp at immediately before dispatching the `fetch` + * request. If the resource is not intercepted by a worker the property will always return 0. + * @since v18.2.0, v16.17.0 + */ + readonly workerStart: number; + /** + * The high resolution millisecond timestamp that represents the start time of the fetch which + * initiates the redirect. + * @since v18.2.0, v16.17.0 + */ + readonly redirectStart: number; + /** + * The high resolution millisecond timestamp that will be created immediately after receiving + * the last byte of the response of the last redirect. + * @since v18.2.0, v16.17.0 + */ + readonly redirectEnd: number; + /** + * The high resolution millisecond timestamp immediately before the Node.js starts to fetch the resource. + * @since v18.2.0, v16.17.0 + */ + readonly fetchStart: number; + /** + * The high resolution millisecond timestamp immediately before the Node.js starts the domain name lookup + * for the resource. + * @since v18.2.0, v16.17.0 + */ + readonly domainLookupStart: number; + /** + * The high resolution millisecond timestamp representing the time immediately after the Node.js finished + * the domain name lookup for the resource. + * @since v18.2.0, v16.17.0 + */ + readonly domainLookupEnd: number; + /** + * The high resolution millisecond timestamp representing the time immediately before Node.js starts to + * establish the connection to the server to retrieve the resource. + * @since v18.2.0, v16.17.0 + */ + readonly connectStart: number; + /** + * The high resolution millisecond timestamp representing the time immediately after Node.js finishes + * establishing the connection to the server to retrieve the resource. + * @since v18.2.0, v16.17.0 + */ + readonly connectEnd: number; + /** + * The high resolution millisecond timestamp representing the time immediately before Node.js starts the + * handshake process to secure the current connection. + * @since v18.2.0, v16.17.0 + */ + readonly secureConnectionStart: number; + /** + * The high resolution millisecond timestamp representing the time immediately before Node.js receives the + * first byte of the response from the server. + * @since v18.2.0, v16.17.0 + */ + readonly requestStart: number; + /** + * The high resolution millisecond timestamp representing the time immediately after Node.js receives the + * last byte of the resource or immediately before the transport connection is closed, whichever comes first. + * @since v18.2.0, v16.17.0 + */ + readonly responseEnd: number; + /** + * A number representing the size (in octets) of the fetched resource. The size includes the response header + * fields plus the response payload body. + * @since v18.2.0, v16.17.0 + */ + readonly transferSize: number; + /** + * A number representing the size (in octets) received from the fetch (HTTP or cache), of the payload body, before + * removing any applied content-codings. + * @since v18.2.0, v16.17.0 + */ + readonly encodedBodySize: number; + /** + * A number representing the size (in octets) received from the fetch (HTTP or cache), of the message body, after + * removing any applied content-codings. + * @since v18.2.0, v16.17.0 + */ + readonly decodedBodySize: number; + /** + * Returns a `object` that is the JSON representation of the `PerformanceResourceTiming` object + * @since v18.2.0, v16.17.0 + */ + toJSON(): any; + } + namespace constants { + const NODE_PERFORMANCE_GC_MAJOR: number; + const NODE_PERFORMANCE_GC_MINOR: number; + const NODE_PERFORMANCE_GC_INCREMENTAL: number; + const NODE_PERFORMANCE_GC_WEAKCB: number; + const NODE_PERFORMANCE_GC_FLAGS_NO: number; + const NODE_PERFORMANCE_GC_FLAGS_CONSTRUCT_RETAINED: number; + const NODE_PERFORMANCE_GC_FLAGS_FORCED: number; + const NODE_PERFORMANCE_GC_FLAGS_SYNCHRONOUS_PHANTOM_PROCESSING: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_AVAILABLE_GARBAGE: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_EXTERNAL_MEMORY: number; + const NODE_PERFORMANCE_GC_FLAGS_SCHEDULE_IDLE: number; + } + const performance: Performance; + interface EventLoopMonitorOptions { + /** + * The sampling rate in milliseconds. + * Must be greater than zero. + * @default 10 + */ + resolution?: number | undefined; + } + interface Histogram { + /** + * The number of samples recorded by the histogram. + * @since v17.4.0, v16.14.0 + */ + readonly count: number; + /** + * The number of samples recorded by the histogram. + * v17.4.0, v16.14.0 + */ + readonly countBigInt: bigint; + /** + * The number of times the event loop delay exceeded the maximum 1 hour event + * loop delay threshold. + * @since v11.10.0 + */ + readonly exceeds: number; + /** + * The number of times the event loop delay exceeded the maximum 1 hour event loop delay threshold. + * @since v17.4.0, v16.14.0 + */ + readonly exceedsBigInt: bigint; + /** + * The maximum recorded event loop delay. + * @since v11.10.0 + */ + readonly max: number; + /** + * The maximum recorded event loop delay. + * v17.4.0, v16.14.0 + */ + readonly maxBigInt: number; + /** + * The mean of the recorded event loop delays. + * @since v11.10.0 + */ + readonly mean: number; + /** + * The minimum recorded event loop delay. + * @since v11.10.0 + */ + readonly min: number; + /** + * The minimum recorded event loop delay. + * v17.4.0, v16.14.0 + */ + readonly minBigInt: bigint; + /** + * Returns the value at the given percentile. + * @since v11.10.0 + * @param percentile A percentile value in the range (0, 100]. + */ + percentile(percentile: number): number; + /** + * Returns the value at the given percentile. + * @since v17.4.0, v16.14.0 + * @param percentile A percentile value in the range (0, 100]. + */ + percentileBigInt(percentile: number): bigint; + /** + * Returns a `Map` object detailing the accumulated percentile distribution. + * @since v11.10.0 + */ + readonly percentiles: Map; + /** + * Returns a `Map` object detailing the accumulated percentile distribution. + * @since v17.4.0, v16.14.0 + */ + readonly percentilesBigInt: Map; + /** + * Resets the collected histogram data. + * @since v11.10.0 + */ + reset(): void; + /** + * The standard deviation of the recorded event loop delays. + * @since v11.10.0 + */ + readonly stddev: number; + } + interface IntervalHistogram extends Histogram { + /** + * Enables the update interval timer. Returns `true` if the timer was + * started, `false` if it was already started. + * @since v11.10.0 + */ + enable(): boolean; + /** + * Disables the update interval timer. Returns `true` if the timer was + * stopped, `false` if it was already stopped. + * @since v11.10.0 + */ + disable(): boolean; + } + interface RecordableHistogram extends Histogram { + /** + * @since v15.9.0, v14.18.0 + * @param val The amount to record in the histogram. + */ + record(val: number | bigint): void; + /** + * Calculates the amount of time (in nanoseconds) that has passed since the + * previous call to `recordDelta()` and records that amount in the histogram. + * @since v15.9.0, v14.18.0 + */ + recordDelta(): void; + /** + * Adds the values from `other` to this histogram. + * @since v17.4.0, v16.14.0 + */ + add(other: RecordableHistogram): void; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Creates an `IntervalHistogram` object that samples and reports the event loop + * delay over time. The delays will be reported in nanoseconds. + * + * Using a timer to detect approximate event loop delay works because the + * execution of timers is tied specifically to the lifecycle of the libuv + * event loop. That is, a delay in the loop will cause a delay in the execution + * of the timer, and those delays are specifically what this API is intended to + * detect. + * + * ```js + * import { monitorEventLoopDelay } from 'node:perf_hooks'; + * const h = monitorEventLoopDelay({ resolution: 20 }); + * h.enable(); + * // Do something. + * h.disable(); + * console.log(h.min); + * console.log(h.max); + * console.log(h.mean); + * console.log(h.stddev); + * console.log(h.percentiles); + * console.log(h.percentile(50)); + * console.log(h.percentile(99)); + * ``` + * @since v11.10.0 + */ + function monitorEventLoopDelay(options?: EventLoopMonitorOptions): IntervalHistogram; + interface CreateHistogramOptions { + /** + * The minimum recordable value. Must be an integer value greater than 0. + * @default 1 + */ + min?: number | bigint | undefined; + /** + * The maximum recordable value. Must be an integer value greater than min. + * @default Number.MAX_SAFE_INTEGER + */ + max?: number | bigint | undefined; + /** + * The number of accuracy digits. Must be a number between 1 and 5. + * @default 3 + */ + figures?: number | undefined; + } + /** + * Returns a `RecordableHistogram`. + * @since v15.9.0, v14.18.0 + */ + function createHistogram(options?: CreateHistogramOptions): RecordableHistogram; + import { + performance as _performance, + PerformanceEntry as _PerformanceEntry, + PerformanceMark as _PerformanceMark, + PerformanceMeasure as _PerformanceMeasure, + PerformanceObserver as _PerformanceObserver, + PerformanceObserverEntryList as _PerformanceObserverEntryList, + PerformanceResourceTiming as _PerformanceResourceTiming, + } from "perf_hooks"; + global { + /** + * `PerformanceEntry` is a global reference for `import { PerformanceEntry } from 'node:perf_hooks'` + * @see https://nodejs.org/docs/latest-v22.x/api/globals.html#performanceentry + * @since v19.0.0 + */ + var PerformanceEntry: typeof globalThis extends { + onmessage: any; + PerformanceEntry: infer T; + } ? T + : typeof _PerformanceEntry; + /** + * `PerformanceMark` is a global reference for `import { PerformanceMark } from 'node:perf_hooks'` + * @see https://nodejs.org/docs/latest-v22.x/api/globals.html#performancemark + * @since v19.0.0 + */ + var PerformanceMark: typeof globalThis extends { + onmessage: any; + PerformanceMark: infer T; + } ? T + : typeof _PerformanceMark; + /** + * `PerformanceMeasure` is a global reference for `import { PerformanceMeasure } from 'node:perf_hooks'` + * @see https://nodejs.org/docs/latest-v22.x/api/globals.html#performancemeasure + * @since v19.0.0 + */ + var PerformanceMeasure: typeof globalThis extends { + onmessage: any; + PerformanceMeasure: infer T; + } ? T + : typeof _PerformanceMeasure; + /** + * `PerformanceObserver` is a global reference for `import { PerformanceObserver } from 'node:perf_hooks'` + * @see https://nodejs.org/docs/latest-v22.x/api/globals.html#performanceobserver + * @since v19.0.0 + */ + var PerformanceObserver: typeof globalThis extends { + onmessage: any; + PerformanceObserver: infer T; + } ? T + : typeof _PerformanceObserver; + /** + * `PerformanceObserverEntryList` is a global reference for `import { PerformanceObserverEntryList } from 'node:perf_hooks'` + * @see https://nodejs.org/docs/latest-v22.x/api/globals.html#performanceobserverentrylist + * @since v19.0.0 + */ + var PerformanceObserverEntryList: typeof globalThis extends { + onmessage: any; + PerformanceObserverEntryList: infer T; + } ? T + : typeof _PerformanceObserverEntryList; + /** + * `PerformanceResourceTiming` is a global reference for `import { PerformanceResourceTiming } from 'node:perf_hooks'` + * @see https://nodejs.org/docs/latest-v22.x/api/globals.html#performanceresourcetiming + * @since v19.0.0 + */ + var PerformanceResourceTiming: typeof globalThis extends { + onmessage: any; + PerformanceResourceTiming: infer T; + } ? T + : typeof _PerformanceResourceTiming; + /** + * `performance` is a global reference for `import { performance } from 'node:perf_hooks'` + * @see https://nodejs.org/docs/latest-v22.x/api/globals.html#performance + * @since v16.0.0 + */ + var performance: typeof globalThis extends { + onmessage: any; + performance: infer T; + } ? T + : typeof _performance; + } +} +declare module "node:perf_hooks" { + export * from "perf_hooks"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/process.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/process.d.ts new file mode 100644 index 0000000..2729c72 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/process.d.ts @@ -0,0 +1,1963 @@ +declare module "process" { + import * as tty from "node:tty"; + import { Worker } from "node:worker_threads"; + + interface BuiltInModule { + "assert": typeof import("assert"); + "node:assert": typeof import("node:assert"); + "assert/strict": typeof import("assert/strict"); + "node:assert/strict": typeof import("node:assert/strict"); + "async_hooks": typeof import("async_hooks"); + "node:async_hooks": typeof import("node:async_hooks"); + "buffer": typeof import("buffer"); + "node:buffer": typeof import("node:buffer"); + "child_process": typeof import("child_process"); + "node:child_process": typeof import("node:child_process"); + "cluster": typeof import("cluster"); + "node:cluster": typeof import("node:cluster"); + "console": typeof import("console"); + "node:console": typeof import("node:console"); + "constants": typeof import("constants"); + "node:constants": typeof import("node:constants"); + "crypto": typeof import("crypto"); + "node:crypto": typeof import("node:crypto"); + "dgram": typeof import("dgram"); + "node:dgram": typeof import("node:dgram"); + "diagnostics_channel": typeof import("diagnostics_channel"); + "node:diagnostics_channel": typeof import("node:diagnostics_channel"); + "dns": typeof import("dns"); + "node:dns": typeof import("node:dns"); + "dns/promises": typeof import("dns/promises"); + "node:dns/promises": typeof import("node:dns/promises"); + "domain": typeof import("domain"); + "node:domain": typeof import("node:domain"); + "events": typeof import("events"); + "node:events": typeof import("node:events"); + "fs": typeof import("fs"); + "node:fs": typeof import("node:fs"); + "fs/promises": typeof import("fs/promises"); + "node:fs/promises": typeof import("node:fs/promises"); + "http": typeof import("http"); + "node:http": typeof import("node:http"); + "http2": typeof import("http2"); + "node:http2": typeof import("node:http2"); + "https": typeof import("https"); + "node:https": typeof import("node:https"); + "inspector": typeof import("inspector"); + "node:inspector": typeof import("node:inspector"); + "inspector/promises": typeof import("inspector/promises"); + "node:inspector/promises": typeof import("node:inspector/promises"); + "module": typeof import("module"); + "node:module": typeof import("node:module"); + "net": typeof import("net"); + "node:net": typeof import("node:net"); + "os": typeof import("os"); + "node:os": typeof import("node:os"); + "path": typeof import("path"); + "node:path": typeof import("node:path"); + "path/posix": typeof import("path/posix"); + "node:path/posix": typeof import("node:path/posix"); + "path/win32": typeof import("path/win32"); + "node:path/win32": typeof import("node:path/win32"); + "perf_hooks": typeof import("perf_hooks"); + "node:perf_hooks": typeof import("node:perf_hooks"); + "process": typeof import("process"); + "node:process": typeof import("node:process"); + "punycode": typeof import("punycode"); + "node:punycode": typeof import("node:punycode"); + "querystring": typeof import("querystring"); + "node:querystring": typeof import("node:querystring"); + "readline": typeof import("readline"); + "node:readline": typeof import("node:readline"); + "readline/promises": typeof import("readline/promises"); + "node:readline/promises": typeof import("node:readline/promises"); + "repl": typeof import("repl"); + "node:repl": typeof import("node:repl"); + "node:sea": typeof import("node:sea"); + "node:sqlite": typeof import("node:sqlite"); + "stream": typeof import("stream"); + "node:stream": typeof import("node:stream"); + "stream/consumers": typeof import("stream/consumers"); + "node:stream/consumers": typeof import("node:stream/consumers"); + "stream/promises": typeof import("stream/promises"); + "node:stream/promises": typeof import("node:stream/promises"); + "stream/web": typeof import("stream/web"); + "node:stream/web": typeof import("node:stream/web"); + "string_decoder": typeof import("string_decoder"); + "node:string_decoder": typeof import("node:string_decoder"); + "node:test": typeof import("node:test"); + "node:test/reporters": typeof import("node:test/reporters"); + "timers": typeof import("timers"); + "node:timers": typeof import("node:timers"); + "timers/promises": typeof import("timers/promises"); + "node:timers/promises": typeof import("node:timers/promises"); + "tls": typeof import("tls"); + "node:tls": typeof import("node:tls"); + "trace_events": typeof import("trace_events"); + "node:trace_events": typeof import("node:trace_events"); + "tty": typeof import("tty"); + "node:tty": typeof import("node:tty"); + "url": typeof import("url"); + "node:url": typeof import("node:url"); + "util": typeof import("util"); + "node:util": typeof import("node:util"); + "sys": typeof import("util"); + "node:sys": typeof import("node:util"); + "util/types": typeof import("util/types"); + "node:util/types": typeof import("node:util/types"); + "v8": typeof import("v8"); + "node:v8": typeof import("node:v8"); + "vm": typeof import("vm"); + "node:vm": typeof import("node:vm"); + "wasi": typeof import("wasi"); + "node:wasi": typeof import("node:wasi"); + "worker_threads": typeof import("worker_threads"); + "node:worker_threads": typeof import("node:worker_threads"); + "zlib": typeof import("zlib"); + "node:zlib": typeof import("node:zlib"); + } + global { + var process: NodeJS.Process; + namespace NodeJS { + // this namespace merge is here because these are specifically used + // as the type for process.stdin, process.stdout, and process.stderr. + // they can't live in tty.d.ts because we need to disambiguate the imported name. + interface ReadStream extends tty.ReadStream {} + interface WriteStream extends tty.WriteStream {} + interface MemoryUsageFn { + /** + * The `process.memoryUsage()` method iterate over each page to gather informations about memory + * usage which can be slow depending on the program memory allocations. + */ + (): MemoryUsage; + /** + * method returns an integer representing the Resident Set Size (RSS) in bytes. + */ + rss(): number; + } + interface MemoryUsage { + /** + * Resident Set Size, is the amount of space occupied in the main memory device (that is a subset of the total allocated memory) for the + * process, including all C++ and JavaScript objects and code. + */ + rss: number; + /** + * Refers to V8's memory usage. + */ + heapTotal: number; + /** + * Refers to V8's memory usage. + */ + heapUsed: number; + external: number; + /** + * Refers to memory allocated for `ArrayBuffer`s and `SharedArrayBuffer`s, including all Node.js Buffers. This is also included + * in the external value. When Node.js is used as an embedded library, this value may be `0` because allocations for `ArrayBuffer`s + * may not be tracked in that case. + */ + arrayBuffers: number; + } + interface CpuUsage { + user: number; + system: number; + } + interface ProcessRelease { + name: string; + sourceUrl?: string | undefined; + headersUrl?: string | undefined; + libUrl?: string | undefined; + lts?: string | undefined; + } + interface ProcessFeatures { + /** + * A boolean value that is `true` if the current Node.js build is caching builtin modules. + * @since v12.0.0 + */ + readonly cached_builtins: boolean; + /** + * A boolean value that is `true` if the current Node.js build is a debug build. + * @since v0.5.5 + */ + readonly debug: boolean; + /** + * A boolean value that is `true` if the current Node.js build includes the inspector. + * @since v11.10.0 + */ + readonly inspector: boolean; + /** + * A boolean value that is `true` if the current Node.js build includes support for IPv6. + * @since v0.5.3 + */ + readonly ipv6: boolean; + /** + * A boolean value that is `true` if the current Node.js build supports + * [loading ECMAScript modules using `require()`](https://nodejs.org/docs/latest-v22.x/api/modules.md#loading-ecmascript-modules-using-require). + * @since v22.10.0 + */ + readonly require_module: boolean; + /** + * A boolean value that is `true` if the current Node.js build includes support for TLS. + * @since v0.5.3 + */ + readonly tls: boolean; + /** + * A boolean value that is `true` if the current Node.js build includes support for ALPN in TLS. + * @since v4.8.0 + */ + readonly tls_alpn: boolean; + /** + * A boolean value that is `true` if the current Node.js build includes support for OCSP in TLS. + * @since v0.11.13 + */ + readonly tls_ocsp: boolean; + /** + * A boolean value that is `true` if the current Node.js build includes support for SNI in TLS. + * @since v0.5.3 + */ + readonly tls_sni: boolean; + /** + * A value that is `"strip"` if Node.js is run with `--experimental-strip-types`, + * `"transform"` if Node.js is run with `--experimental-transform-types`, and `false` otherwise. + * @since v22.10.0 + */ + readonly typescript: "strip" | "transform" | false; + /** + * A boolean value that is `true` if the current Node.js build includes support for libuv. + * Since it's currently not possible to build Node.js without libuv, this value is always `true`. + * @since v0.5.3 + */ + readonly uv: boolean; + } + interface ProcessVersions extends Dict { + http_parser: string; + node: string; + v8: string; + ares: string; + uv: string; + zlib: string; + modules: string; + openssl: string; + } + type Platform = + | "aix" + | "android" + | "darwin" + | "freebsd" + | "haiku" + | "linux" + | "openbsd" + | "sunos" + | "win32" + | "cygwin" + | "netbsd"; + type Architecture = + | "arm" + | "arm64" + | "ia32" + | "loong64" + | "mips" + | "mipsel" + | "ppc" + | "ppc64" + | "riscv64" + | "s390" + | "s390x" + | "x64"; + type Signals = + | "SIGABRT" + | "SIGALRM" + | "SIGBUS" + | "SIGCHLD" + | "SIGCONT" + | "SIGFPE" + | "SIGHUP" + | "SIGILL" + | "SIGINT" + | "SIGIO" + | "SIGIOT" + | "SIGKILL" + | "SIGPIPE" + | "SIGPOLL" + | "SIGPROF" + | "SIGPWR" + | "SIGQUIT" + | "SIGSEGV" + | "SIGSTKFLT" + | "SIGSTOP" + | "SIGSYS" + | "SIGTERM" + | "SIGTRAP" + | "SIGTSTP" + | "SIGTTIN" + | "SIGTTOU" + | "SIGUNUSED" + | "SIGURG" + | "SIGUSR1" + | "SIGUSR2" + | "SIGVTALRM" + | "SIGWINCH" + | "SIGXCPU" + | "SIGXFSZ" + | "SIGBREAK" + | "SIGLOST" + | "SIGINFO"; + type UncaughtExceptionOrigin = "uncaughtException" | "unhandledRejection"; + type MultipleResolveType = "resolve" | "reject"; + type BeforeExitListener = (code: number) => void; + type DisconnectListener = () => void; + type ExitListener = (code: number) => void; + type RejectionHandledListener = (promise: Promise) => void; + type UncaughtExceptionListener = (error: Error, origin: UncaughtExceptionOrigin) => void; + /** + * Most of the time the unhandledRejection will be an Error, but this should not be relied upon + * as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error. + */ + type UnhandledRejectionListener = (reason: unknown, promise: Promise) => void; + type WarningListener = (warning: Error) => void; + type MessageListener = (message: unknown, sendHandle: unknown) => void; + type SignalsListener = (signal: Signals) => void; + type MultipleResolveListener = ( + type: MultipleResolveType, + promise: Promise, + value: unknown, + ) => void; + type WorkerListener = (worker: Worker) => void; + interface Socket extends ReadWriteStream { + isTTY?: true | undefined; + } + // Alias for compatibility + interface ProcessEnv extends Dict { + /** + * Can be used to change the default timezone at runtime + */ + TZ?: string; + } + interface HRTime { + (time?: [number, number]): [number, number]; + /** + * The `bigint` version of the `{@link hrtime()}` method returning the current high-resolution real time in nanoseconds as a `bigint`. + * + * Unlike `{@link hrtime()}`, it does not support an additional time argument since the difference can just be computed directly by subtraction of the two `bigint`s. + * ```js + * import { hrtime } from 'node:process'; + * + * const start = hrtime.bigint(); + * // 191051479007711n + * + * setTimeout(() => { + * const end = hrtime.bigint(); + * // 191052633396993n + * + * console.log(`Benchmark took ${end - start} nanoseconds`); + * // Benchmark took 1154389282 nanoseconds + * }, 1000); + * ``` + */ + bigint(): bigint; + } + interface ProcessPermission { + /** + * Verifies that the process is able to access the given scope and reference. + * If no reference is provided, a global scope is assumed, for instance, `process.permission.has('fs.read')` + * will check if the process has ALL file system read permissions. + * + * The reference has a meaning based on the provided scope. For example, the reference when the scope is File System means files and folders. + * + * The available scopes are: + * + * * `fs` - All File System + * * `fs.read` - File System read operations + * * `fs.write` - File System write operations + * * `child` - Child process spawning operations + * * `worker` - Worker thread spawning operation + * + * ```js + * // Check if the process has permission to read the README file + * process.permission.has('fs.read', './README.md'); + * // Check if the process has read permission operations + * process.permission.has('fs.read'); + * ``` + * @since v20.0.0 + */ + has(scope: string, reference?: string): boolean; + } + interface ProcessReport { + /** + * Write reports in a compact format, single-line JSON, more easily consumable by log processing systems + * than the default multi-line format designed for human consumption. + * @since v13.12.0, v12.17.0 + */ + compact: boolean; + /** + * Directory where the report is written. + * The default value is the empty string, indicating that reports are written to the current + * working directory of the Node.js process. + */ + directory: string; + /** + * Filename where the report is written. If set to the empty string, the output filename will be comprised + * of a timestamp, PID, and sequence number. The default value is the empty string. + */ + filename: string; + /** + * Returns a JavaScript Object representation of a diagnostic report for the running process. + * The report's JavaScript stack trace is taken from `err`, if present. + */ + getReport(err?: Error): object; + /** + * If true, a diagnostic report is generated on fatal errors, + * such as out of memory errors or failed C++ assertions. + * @default false + */ + reportOnFatalError: boolean; + /** + * If true, a diagnostic report is generated when the process + * receives the signal specified by process.report.signal. + * @default false + */ + reportOnSignal: boolean; + /** + * If true, a diagnostic report is generated on uncaught exception. + * @default false + */ + reportOnUncaughtException: boolean; + /** + * The signal used to trigger the creation of a diagnostic report. + * @default 'SIGUSR2' + */ + signal: Signals; + /** + * Writes a diagnostic report to a file. If filename is not provided, the default filename + * includes the date, time, PID, and a sequence number. + * The report's JavaScript stack trace is taken from `err`, if present. + * + * If the value of filename is set to `'stdout'` or `'stderr'`, the report is written + * to the stdout or stderr of the process respectively. + * @param fileName Name of the file where the report is written. + * This should be a relative path, that will be appended to the directory specified in + * `process.report.directory`, or the current working directory of the Node.js process, + * if unspecified. + * @param err A custom error used for reporting the JavaScript stack. + * @return Filename of the generated report. + */ + writeReport(fileName?: string, err?: Error): string; + writeReport(err?: Error): string; + } + interface ResourceUsage { + fsRead: number; + fsWrite: number; + involuntaryContextSwitches: number; + ipcReceived: number; + ipcSent: number; + majorPageFault: number; + maxRSS: number; + minorPageFault: number; + sharedMemorySize: number; + signalsCount: number; + swappedOut: number; + systemCPUTime: number; + unsharedDataSize: number; + unsharedStackSize: number; + userCPUTime: number; + voluntaryContextSwitches: number; + } + interface EmitWarningOptions { + /** + * When `warning` is a `string`, `type` is the name to use for the _type_ of warning being emitted. + * + * @default 'Warning' + */ + type?: string | undefined; + /** + * A unique identifier for the warning instance being emitted. + */ + code?: string | undefined; + /** + * When `warning` is a `string`, `ctor` is an optional function used to limit the generated stack trace. + * + * @default process.emitWarning + */ + ctor?: Function | undefined; + /** + * Additional text to include with the error. + */ + detail?: string | undefined; + } + interface ProcessConfig { + readonly target_defaults: { + readonly cflags: any[]; + readonly default_configuration: string; + readonly defines: string[]; + readonly include_dirs: string[]; + readonly libraries: string[]; + }; + readonly variables: { + readonly clang: number; + readonly host_arch: string; + readonly node_install_npm: boolean; + readonly node_install_waf: boolean; + readonly node_prefix: string; + readonly node_shared_openssl: boolean; + readonly node_shared_v8: boolean; + readonly node_shared_zlib: boolean; + readonly node_use_dtrace: boolean; + readonly node_use_etw: boolean; + readonly node_use_openssl: boolean; + readonly target_arch: string; + readonly v8_no_strict_aliasing: number; + readonly v8_use_snapshot: boolean; + readonly visibility: string; + }; + } + interface Process extends EventEmitter { + /** + * The `process.stdout` property returns a stream connected to`stdout` (fd `1`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `1` refers to a file, in which case it is + * a `Writable` stream. + * + * For example, to copy `process.stdin` to `process.stdout`: + * + * ```js + * import { stdin, stdout } from 'node:process'; + * + * stdin.pipe(stdout); + * ``` + * + * `process.stdout` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stdout: WriteStream & { + fd: 1; + }; + /** + * The `process.stderr` property returns a stream connected to`stderr` (fd `2`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `2` refers to a file, in which case it is + * a `Writable` stream. + * + * `process.stderr` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stderr: WriteStream & { + fd: 2; + }; + /** + * The `process.stdin` property returns a stream connected to`stdin` (fd `0`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `0` refers to a file, in which case it is + * a `Readable` stream. + * + * For details of how to read from `stdin` see `readable.read()`. + * + * As a `Duplex` stream, `process.stdin` can also be used in "old" mode that + * is compatible with scripts written for Node.js prior to v0.10\. + * For more information see `Stream compatibility`. + * + * In "old" streams mode the `stdin` stream is paused by default, so one + * must call `process.stdin.resume()` to read from it. Note also that calling `process.stdin.resume()` itself would switch stream to "old" mode. + */ + stdin: ReadStream & { + fd: 0; + }; + /** + * The `process.argv` property returns an array containing the command-line + * arguments passed when the Node.js process was launched. The first element will + * be {@link execPath}. See `process.argv0` if access to the original value + * of `argv[0]` is needed. The second element will be the path to the JavaScript + * file being executed. The remaining elements will be any additional command-line + * arguments. + * + * For example, assuming the following script for `process-args.js`: + * + * ```js + * import { argv } from 'node:process'; + * + * // print process.argv + * argv.forEach((val, index) => { + * console.log(`${index}: ${val}`); + * }); + * ``` + * + * Launching the Node.js process as: + * + * ```bash + * node process-args.js one two=three four + * ``` + * + * Would generate the output: + * + * ```text + * 0: /usr/local/bin/node + * 1: /Users/mjr/work/node/process-args.js + * 2: one + * 3: two=three + * 4: four + * ``` + * @since v0.1.27 + */ + argv: string[]; + /** + * The `process.argv0` property stores a read-only copy of the original value of`argv[0]` passed when Node.js starts. + * + * ```console + * $ bash -c 'exec -a customArgv0 ./node' + * > process.argv[0] + * '/Volumes/code/external/node/out/Release/node' + * > process.argv0 + * 'customArgv0' + * ``` + * @since v6.4.0 + */ + argv0: string; + /** + * The `process.execArgv` property returns the set of Node.js-specific command-line + * options passed when the Node.js process was launched. These options do not + * appear in the array returned by the {@link argv} property, and do not + * include the Node.js executable, the name of the script, or any options following + * the script name. These options are useful in order to spawn child processes with + * the same execution environment as the parent. + * + * ```bash + * node --icu-data-dir=./foo --require ./bar.js script.js --version + * ``` + * + * Results in `process.execArgv`: + * + * ```js + * ["--icu-data-dir=./foo", "--require", "./bar.js"] + * ``` + * + * And `process.argv`: + * + * ```js + * ['/usr/local/bin/node', 'script.js', '--version'] + * ``` + * + * Refer to `Worker constructor` for the detailed behavior of worker + * threads with this property. + * @since v0.7.7 + */ + execArgv: string[]; + /** + * The `process.execPath` property returns the absolute pathname of the executable + * that started the Node.js process. Symbolic links, if any, are resolved. + * + * ```js + * '/usr/local/bin/node' + * ``` + * @since v0.1.100 + */ + execPath: string; + /** + * The `process.abort()` method causes the Node.js process to exit immediately and + * generate a core file. + * + * This feature is not available in `Worker` threads. + * @since v0.7.0 + */ + abort(): never; + /** + * The `process.chdir()` method changes the current working directory of the + * Node.js process or throws an exception if doing so fails (for instance, if + * the specified `directory` does not exist). + * + * ```js + * import { chdir, cwd } from 'node:process'; + * + * console.log(`Starting directory: ${cwd()}`); + * try { + * chdir('/tmp'); + * console.log(`New directory: ${cwd()}`); + * } catch (err) { + * console.error(`chdir: ${err}`); + * } + * ``` + * + * This feature is not available in `Worker` threads. + * @since v0.1.17 + */ + chdir(directory: string): void; + /** + * The `process.cwd()` method returns the current working directory of the Node.js + * process. + * + * ```js + * import { cwd } from 'node:process'; + * + * console.log(`Current directory: ${cwd()}`); + * ``` + * @since v0.1.8 + */ + cwd(): string; + /** + * The port used by the Node.js debugger when enabled. + * + * ```js + * import process from 'node:process'; + * + * process.debugPort = 5858; + * ``` + * @since v0.7.2 + */ + debugPort: number; + /** + * The `process.dlopen()` method allows dynamically loading shared objects. It is primarily used by `require()` to load C++ Addons, and + * should not be used directly, except in special cases. In other words, `require()` should be preferred over `process.dlopen()` + * unless there are specific reasons such as custom dlopen flags or loading from ES modules. + * + * The `flags` argument is an integer that allows to specify dlopen behavior. See the `[os.constants.dlopen](https://nodejs.org/docs/latest-v22.x/api/os.html#dlopen-constants)` + * documentation for details. + * + * An important requirement when calling `process.dlopen()` is that the `module` instance must be passed. Functions exported by the C++ Addon + * are then accessible via `module.exports`. + * + * The example below shows how to load a C++ Addon, named `local.node`, that exports a `foo` function. All the symbols are loaded before the call returns, by passing the `RTLD_NOW` constant. + * In this example the constant is assumed to be available. + * + * ```js + * import { dlopen } from 'node:process'; + * import { constants } from 'node:os'; + * import { fileURLToPath } from 'node:url'; + * + * const module = { exports: {} }; + * dlopen(module, fileURLToPath(new URL('local.node', import.meta.url)), + * constants.dlopen.RTLD_NOW); + * module.exports.foo(); + * ``` + */ + dlopen(module: object, filename: string, flags?: number): void; + /** + * The `process.emitWarning()` method can be used to emit custom or application + * specific process warnings. These can be listened for by adding a handler to the `'warning'` event. + * + * ```js + * import { emitWarning } from 'node:process'; + * + * // Emit a warning using a string. + * emitWarning('Something happened!'); + * // Emits: (node: 56338) Warning: Something happened! + * ``` + * + * ```js + * import { emitWarning } from 'node:process'; + * + * // Emit a warning using a string and a type. + * emitWarning('Something Happened!', 'CustomWarning'); + * // Emits: (node:56338) CustomWarning: Something Happened! + * ``` + * + * ```js + * import { emitWarning } from 'node:process'; + * + * emitWarning('Something happened!', 'CustomWarning', 'WARN001'); + * // Emits: (node:56338) [WARN001] CustomWarning: Something happened! + * ```js + * + * In each of the previous examples, an `Error` object is generated internally by `process.emitWarning()` and passed through to the `'warning'` handler. + * + * ```js + * import process from 'node:process'; + * + * process.on('warning', (warning) => { + * console.warn(warning.name); // 'Warning' + * console.warn(warning.message); // 'Something happened!' + * console.warn(warning.code); // 'MY_WARNING' + * console.warn(warning.stack); // Stack trace + * console.warn(warning.detail); // 'This is some additional information' + * }); + * ``` + * + * If `warning` is passed as an `Error` object, it will be passed through to the `'warning'` event handler + * unmodified (and the optional `type`, `code` and `ctor` arguments will be ignored): + * + * ```js + * import { emitWarning } from 'node:process'; + * + * // Emit a warning using an Error object. + * const myWarning = new Error('Something happened!'); + * // Use the Error name property to specify the type name + * myWarning.name = 'CustomWarning'; + * myWarning.code = 'WARN001'; + * + * emitWarning(myWarning); + * // Emits: (node:56338) [WARN001] CustomWarning: Something happened! + * ``` + * + * A `TypeError` is thrown if `warning` is anything other than a string or `Error` object. + * + * While process warnings use `Error` objects, the process warning mechanism is not a replacement for normal error handling mechanisms. + * + * The following additional handling is implemented if the warning `type` is `'DeprecationWarning'`: + * * If the `--throw-deprecation` command-line flag is used, the deprecation warning is thrown as an exception rather than being emitted as an event. + * * If the `--no-deprecation` command-line flag is used, the deprecation warning is suppressed. + * * If the `--trace-deprecation` command-line flag is used, the deprecation warning is printed to `stderr` along with the full stack trace. + * @since v8.0.0 + * @param warning The warning to emit. + */ + emitWarning(warning: string | Error, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, code?: string, ctor?: Function): void; + emitWarning(warning: string | Error, options?: EmitWarningOptions): void; + /** + * The `process.env` property returns an object containing the user environment. + * See [`environ(7)`](http://man7.org/linux/man-pages/man7/environ.7.html). + * + * An example of this object looks like: + * + * ```js + * { + * TERM: 'xterm-256color', + * SHELL: '/usr/local/bin/bash', + * USER: 'maciej', + * PATH: '~/.bin/:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin', + * PWD: '/Users/maciej', + * EDITOR: 'vim', + * SHLVL: '1', + * HOME: '/Users/maciej', + * LOGNAME: 'maciej', + * _: '/usr/local/bin/node' + * } + * ``` + * + * It is possible to modify this object, but such modifications will not be + * reflected outside the Node.js process, or (unless explicitly requested) + * to other `Worker` threads. + * In other words, the following example would not work: + * + * ```bash + * node -e 'process.env.foo = "bar"' && echo $foo + * ``` + * + * While the following will: + * + * ```js + * import { env } from 'node:process'; + * + * env.foo = 'bar'; + * console.log(env.foo); + * ``` + * + * Assigning a property on `process.env` will implicitly convert the value + * to a string. **This behavior is deprecated.** Future versions of Node.js may + * throw an error when the value is not a string, number, or boolean. + * + * ```js + * import { env } from 'node:process'; + * + * env.test = null; + * console.log(env.test); + * // => 'null' + * env.test = undefined; + * console.log(env.test); + * // => 'undefined' + * ``` + * + * Use `delete` to delete a property from `process.env`. + * + * ```js + * import { env } from 'node:process'; + * + * env.TEST = 1; + * delete env.TEST; + * console.log(env.TEST); + * // => undefined + * ``` + * + * On Windows operating systems, environment variables are case-insensitive. + * + * ```js + * import { env } from 'node:process'; + * + * env.TEST = 1; + * console.log(env.test); + * // => 1 + * ``` + * + * Unless explicitly specified when creating a `Worker` instance, + * each `Worker` thread has its own copy of `process.env`, based on its + * parent thread's `process.env`, or whatever was specified as the `env` option + * to the `Worker` constructor. Changes to `process.env` will not be visible + * across `Worker` threads, and only the main thread can make changes that + * are visible to the operating system or to native add-ons. On Windows, a copy of `process.env` on a `Worker` instance operates in a case-sensitive manner + * unlike the main thread. + * @since v0.1.27 + */ + env: ProcessEnv; + /** + * The `process.exit()` method instructs Node.js to terminate the process + * synchronously with an exit status of `code`. If `code` is omitted, exit uses + * either the 'success' code `0` or the value of `process.exitCode` if it has been + * set. Node.js will not terminate until all the `'exit'` event listeners are + * called. + * + * To exit with a 'failure' code: + * + * ```js + * import { exit } from 'node:process'; + * + * exit(1); + * ``` + * + * The shell that executed Node.js should see the exit code as `1`. + * + * Calling `process.exit()` will force the process to exit as quickly as possible + * even if there are still asynchronous operations pending that have not yet + * completed fully, including I/O operations to `process.stdout` and `process.stderr`. + * + * In most situations, it is not actually necessary to call `process.exit()` explicitly. The Node.js process will exit on its own _if there is no additional_ + * _work pending_ in the event loop. The `process.exitCode` property can be set to + * tell the process which exit code to use when the process exits gracefully. + * + * For instance, the following example illustrates a _misuse_ of the `process.exit()` method that could lead to data printed to stdout being + * truncated and lost: + * + * ```js + * import { exit } from 'node:process'; + * + * // This is an example of what *not* to do: + * if (someConditionNotMet()) { + * printUsageToStdout(); + * exit(1); + * } + * ``` + * + * The reason this is problematic is because writes to `process.stdout` in Node.js + * are sometimes _asynchronous_ and may occur over multiple ticks of the Node.js + * event loop. Calling `process.exit()`, however, forces the process to exit _before_ those additional writes to `stdout` can be performed. + * + * Rather than calling `process.exit()` directly, the code _should_ set the `process.exitCode` and allow the process to exit naturally by avoiding + * scheduling any additional work for the event loop: + * + * ```js + * import process from 'node:process'; + * + * // How to properly set the exit code while letting + * // the process exit gracefully. + * if (someConditionNotMet()) { + * printUsageToStdout(); + * process.exitCode = 1; + * } + * ``` + * + * If it is necessary to terminate the Node.js process due to an error condition, + * throwing an _uncaught_ error and allowing the process to terminate accordingly + * is safer than calling `process.exit()`. + * + * In `Worker` threads, this function stops the current thread rather + * than the current process. + * @since v0.1.13 + * @param [code=0] The exit code. For string type, only integer strings (e.g.,'1') are allowed. + */ + exit(code?: number | string | null | undefined): never; + /** + * A number which will be the process exit code, when the process either + * exits gracefully, or is exited via {@link exit} without specifying + * a code. + * + * Specifying a code to {@link exit} will override any + * previous setting of `process.exitCode`. + * @default undefined + * @since v0.11.8 + */ + exitCode?: number | string | number | undefined; + finalization: { + /** + * This function registers a callback to be called when the process emits the `exit` event if the `ref` object was not garbage collected. + * If the object `ref` was garbage collected before the `exit` event is emitted, the callback will be removed from the finalization registry, and it will not be called on process exit. + * + * Inside the callback you can release the resources allocated by the `ref` object. + * Be aware that all limitations applied to the `beforeExit` event are also applied to the callback function, + * this means that there is a possibility that the callback will not be called under special circumstances. + * + * The idea of ​​this function is to help you free up resources when the starts process exiting, but also let the object be garbage collected if it is no longer being used. + * @param ref The reference to the resource that is being tracked. + * @param callback The callback function to be called when the resource is finalized. + * @since v22.5.0 + * @experimental + */ + register(ref: T, callback: (ref: T, event: "exit") => void): void; + /** + * This function behaves exactly like the `register`, except that the callback will be called when the process emits the `beforeExit` event if `ref` object was not garbage collected. + * + * Be aware that all limitations applied to the `beforeExit` event are also applied to the callback function, this means that there is a possibility that the callback will not be called under special circumstances. + * @param ref The reference to the resource that is being tracked. + * @param callback The callback function to be called when the resource is finalized. + * @since v22.5.0 + * @experimental + */ + registerBeforeExit(ref: T, callback: (ref: T, event: "beforeExit") => void): void; + /** + * This function remove the register of the object from the finalization registry, so the callback will not be called anymore. + * @param ref The reference to the resource that was registered previously. + * @since v22.5.0 + * @experimental + */ + unregister(ref: object): void; + }; + /** + * The `process.getActiveResourcesInfo()` method returns an array of strings containing + * the types of the active resources that are currently keeping the event loop alive. + * + * ```js + * import { getActiveResourcesInfo } from 'node:process'; + * import { setTimeout } from 'node:timers'; + + * console.log('Before:', getActiveResourcesInfo()); + * setTimeout(() => {}, 1000); + * console.log('After:', getActiveResourcesInfo()); + * // Prints: + * // Before: [ 'TTYWrap', 'TTYWrap', 'TTYWrap' ] + * // After: [ 'TTYWrap', 'TTYWrap', 'TTYWrap', 'Timeout' ] + * ``` + * @since v17.3.0, v16.14.0 + */ + getActiveResourcesInfo(): string[]; + /** + * Provides a way to load built-in modules in a globally available function. + * @param id ID of the built-in module being requested. + */ + getBuiltinModule(id: ID): BuiltInModule[ID]; + getBuiltinModule(id: string): object | undefined; + /** + * The `process.getgid()` method returns the numerical group identity of the + * process. (See [`getgid(2)`](http://man7.org/linux/man-pages/man2/getgid.2.html).) + * + * ```js + * import process from 'node:process'; + * + * if (process.getgid) { + * console.log(`Current gid: ${process.getgid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.31 + */ + getgid?: () => number; + /** + * The `process.setgid()` method sets the group identity of the process. (See [`setgid(2)`](http://man7.org/linux/man-pages/man2/setgid.2.html).) The `id` can be passed as either a + * numeric ID or a group name + * string. If a group name is specified, this method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'node:process'; + * + * if (process.getgid && process.setgid) { + * console.log(`Current gid: ${process.getgid()}`); + * try { + * process.setgid(501); + * console.log(`New gid: ${process.getgid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.31 + * @param id The group name or ID + */ + setgid?: (id: number | string) => void; + /** + * The `process.getuid()` method returns the numeric user identity of the process. + * (See [`getuid(2)`](http://man7.org/linux/man-pages/man2/getuid.2.html).) + * + * ```js + * import process from 'node:process'; + * + * if (process.getuid) { + * console.log(`Current uid: ${process.getuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.28 + */ + getuid?: () => number; + /** + * The `process.setuid(id)` method sets the user identity of the process. (See [`setuid(2)`](http://man7.org/linux/man-pages/man2/setuid.2.html).) The `id` can be passed as either a + * numeric ID or a username string. + * If a username is specified, the method blocks while resolving the associated + * numeric ID. + * + * ```js + * import process from 'node:process'; + * + * if (process.getuid && process.setuid) { + * console.log(`Current uid: ${process.getuid()}`); + * try { + * process.setuid(501); + * console.log(`New uid: ${process.getuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.28 + */ + setuid?: (id: number | string) => void; + /** + * The `process.geteuid()` method returns the numerical effective user identity of + * the process. (See [`geteuid(2)`](http://man7.org/linux/man-pages/man2/geteuid.2.html).) + * + * ```js + * import process from 'node:process'; + * + * if (process.geteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + geteuid?: () => number; + /** + * The `process.seteuid()` method sets the effective user identity of the process. + * (See [`seteuid(2)`](http://man7.org/linux/man-pages/man2/seteuid.2.html).) The `id` can be passed as either a numeric ID or a username + * string. If a username is specified, the method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'node:process'; + * + * if (process.geteuid && process.seteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * try { + * process.seteuid(501); + * console.log(`New uid: ${process.geteuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A user name or ID + */ + seteuid?: (id: number | string) => void; + /** + * The `process.getegid()` method returns the numerical effective group identity + * of the Node.js process. (See [`getegid(2)`](http://man7.org/linux/man-pages/man2/getegid.2.html).) + * + * ```js + * import process from 'node:process'; + * + * if (process.getegid) { + * console.log(`Current gid: ${process.getegid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + getegid?: () => number; + /** + * The `process.setegid()` method sets the effective group identity of the process. + * (See [`setegid(2)`](http://man7.org/linux/man-pages/man2/setegid.2.html).) The `id` can be passed as either a numeric ID or a group + * name string. If a group name is specified, this method blocks while resolving + * the associated a numeric ID. + * + * ```js + * import process from 'node:process'; + * + * if (process.getegid && process.setegid) { + * console.log(`Current gid: ${process.getegid()}`); + * try { + * process.setegid(501); + * console.log(`New gid: ${process.getegid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A group name or ID + */ + setegid?: (id: number | string) => void; + /** + * The `process.getgroups()` method returns an array with the supplementary group + * IDs. POSIX leaves it unspecified if the effective group ID is included but + * Node.js ensures it always is. + * + * ```js + * import process from 'node:process'; + * + * if (process.getgroups) { + * console.log(process.getgroups()); // [ 16, 21, 297 ] + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.9.4 + */ + getgroups?: () => number[]; + /** + * The `process.setgroups()` method sets the supplementary group IDs for the + * Node.js process. This is a privileged operation that requires the Node.js + * process to have `root` or the `CAP_SETGID` capability. + * + * The `groups` array can contain numeric group IDs, group names, or both. + * + * ```js + * import process from 'node:process'; + * + * if (process.getgroups && process.setgroups) { + * try { + * process.setgroups([501]); + * console.log(process.getgroups()); // new groups + * } catch (err) { + * console.log(`Failed to set groups: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.9.4 + */ + setgroups?: (groups: ReadonlyArray) => void; + /** + * The `process.setUncaughtExceptionCaptureCallback()` function sets a function + * that will be invoked when an uncaught exception occurs, which will receive the + * exception value itself as its first argument. + * + * If such a function is set, the `'uncaughtException'` event will + * not be emitted. If `--abort-on-uncaught-exception` was passed from the + * command line or set through `v8.setFlagsFromString()`, the process will + * not abort. Actions configured to take place on exceptions such as report + * generations will be affected too + * + * To unset the capture function, `process.setUncaughtExceptionCaptureCallback(null)` may be used. Calling this + * method with a non-`null` argument while another capture function is set will + * throw an error. + * + * Using this function is mutually exclusive with using the deprecated `domain` built-in module. + * @since v9.3.0 + */ + setUncaughtExceptionCaptureCallback(cb: ((err: Error) => void) | null): void; + /** + * Indicates whether a callback has been set using {@link setUncaughtExceptionCaptureCallback}. + * @since v9.3.0 + */ + hasUncaughtExceptionCaptureCallback(): boolean; + /** + * The `process.sourceMapsEnabled` property returns whether the [Source Map v3](https://sourcemaps.info/spec.html) support for stack traces is enabled. + * @since v20.7.0 + * @experimental + */ + readonly sourceMapsEnabled: boolean; + /** + * This function enables or disables the [Source Map v3](https://sourcemaps.info/spec.html) support for + * stack traces. + * + * It provides same features as launching Node.js process with commandline options `--enable-source-maps`. + * + * Only source maps in JavaScript files that are loaded after source maps has been + * enabled will be parsed and loaded. + * @since v16.6.0, v14.18.0 + * @experimental + */ + setSourceMapsEnabled(value: boolean): void; + /** + * The `process.version` property contains the Node.js version string. + * + * ```js + * import { version } from 'node:process'; + * + * console.log(`Version: ${version}`); + * // Version: v14.8.0 + * ``` + * + * To get the version string without the prepended _v_, use`process.versions.node`. + * @since v0.1.3 + */ + readonly version: string; + /** + * The `process.versions` property returns an object listing the version strings of + * Node.js and its dependencies. `process.versions.modules` indicates the current + * ABI version, which is increased whenever a C++ API changes. Node.js will refuse + * to load modules that were compiled against a different module ABI version. + * + * ```js + * import { versions } from 'node:process'; + * + * console.log(versions); + * ``` + * + * Will generate an object similar to: + * + * ```console + * { node: '20.2.0', + * acorn: '8.8.2', + * ada: '2.4.0', + * ares: '1.19.0', + * base64: '0.5.0', + * brotli: '1.0.9', + * cjs_module_lexer: '1.2.2', + * cldr: '43.0', + * icu: '73.1', + * llhttp: '8.1.0', + * modules: '115', + * napi: '8', + * nghttp2: '1.52.0', + * nghttp3: '0.7.0', + * ngtcp2: '0.8.1', + * openssl: '3.0.8+quic', + * simdutf: '3.2.9', + * tz: '2023c', + * undici: '5.22.0', + * unicode: '15.0', + * uv: '1.44.2', + * uvwasi: '0.0.16', + * v8: '11.3.244.8-node.9', + * zlib: '1.2.13' } + * ``` + * @since v0.2.0 + */ + readonly versions: ProcessVersions; + /** + * The `process.config` property returns a frozen `Object` containing the + * JavaScript representation of the configure options used to compile the current + * Node.js executable. This is the same as the `config.gypi` file that was produced + * when running the `./configure` script. + * + * An example of the possible output looks like: + * + * ```js + * { + * target_defaults: + * { cflags: [], + * default_configuration: 'Release', + * defines: [], + * include_dirs: [], + * libraries: [] }, + * variables: + * { + * host_arch: 'x64', + * napi_build_version: 5, + * node_install_npm: 'true', + * node_prefix: '', + * node_shared_cares: 'false', + * node_shared_http_parser: 'false', + * node_shared_libuv: 'false', + * node_shared_zlib: 'false', + * node_use_openssl: 'true', + * node_shared_openssl: 'false', + * strict_aliasing: 'true', + * target_arch: 'x64', + * v8_use_snapshot: 1 + * } + * } + * ``` + * @since v0.7.7 + */ + readonly config: ProcessConfig; + /** + * The `process.kill()` method sends the `signal` to the process identified by`pid`. + * + * Signal names are strings such as `'SIGINT'` or `'SIGHUP'`. See `Signal Events` and [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for more information. + * + * This method will throw an error if the target `pid` does not exist. As a special + * case, a signal of `0` can be used to test for the existence of a process. + * Windows platforms will throw an error if the `pid` is used to kill a process + * group. + * + * Even though the name of this function is `process.kill()`, it is really just a + * signal sender, like the `kill` system call. The signal sent may do something + * other than kill the target process. + * + * ```js + * import process, { kill } from 'node:process'; + * + * process.on('SIGHUP', () => { + * console.log('Got SIGHUP signal.'); + * }); + * + * setTimeout(() => { + * console.log('Exiting.'); + * process.exit(0); + * }, 100); + * + * kill(process.pid, 'SIGHUP'); + * ``` + * + * When `SIGUSR1` is received by a Node.js process, Node.js will start the + * debugger. See `Signal Events`. + * @since v0.0.6 + * @param pid A process ID + * @param [signal='SIGTERM'] The signal to send, either as a string or number. + */ + kill(pid: number, signal?: string | number): true; + /** + * Loads the environment configuration from a `.env` file into `process.env`. If + * the file is not found, error will be thrown. + * + * To load a specific .env file by specifying its path, use the following code: + * + * ```js + * import { loadEnvFile } from 'node:process'; + * + * loadEnvFile('./development.env') + * ``` + * @since v20.12.0 + * @param path The path to the .env file + */ + loadEnvFile(path?: string | URL | Buffer): void; + /** + * The `process.pid` property returns the PID of the process. + * + * ```js + * import { pid } from 'node:process'; + * + * console.log(`This process is pid ${pid}`); + * ``` + * @since v0.1.15 + */ + readonly pid: number; + /** + * The `process.ppid` property returns the PID of the parent of the + * current process. + * + * ```js + * import { ppid } from 'node:process'; + * + * console.log(`The parent process is pid ${ppid}`); + * ``` + * @since v9.2.0, v8.10.0, v6.13.0 + */ + readonly ppid: number; + /** + * The `process.title` property returns the current process title (i.e. returns + * the current value of `ps`). Assigning a new value to `process.title` modifies + * the current value of `ps`. + * + * When a new value is assigned, different platforms will impose different maximum + * length restrictions on the title. Usually such restrictions are quite limited. + * For instance, on Linux and macOS, `process.title` is limited to the size of the + * binary name plus the length of the command-line arguments because setting the `process.title` overwrites the `argv` memory of the process. Node.js v0.8 + * allowed for longer process title strings by also overwriting the `environ` memory but that was potentially insecure and confusing in some (rather obscure) + * cases. + * + * Assigning a value to `process.title` might not result in an accurate label + * within process manager applications such as macOS Activity Monitor or Windows + * Services Manager. + * @since v0.1.104 + */ + title: string; + /** + * The operating system CPU architecture for which the Node.js binary was compiled. + * Possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'loong64'`, `'mips'`, `'mipsel'`, `'ppc'`, `'ppc64'`, `'riscv64'`, `'s390'`, `'s390x'`, and `'x64'`. + * + * ```js + * import { arch } from 'node:process'; + * + * console.log(`This processor architecture is ${arch}`); + * ``` + * @since v0.5.0 + */ + readonly arch: Architecture; + /** + * The `process.platform` property returns a string identifying the operating + * system platform for which the Node.js binary was compiled. + * + * Currently possible values are: + * + * * `'aix'` + * * `'darwin'` + * * `'freebsd'` + * * `'linux'` + * * `'openbsd'` + * * `'sunos'` + * * `'win32'` + * + * ```js + * import { platform } from 'node:process'; + * + * console.log(`This platform is ${platform}`); + * ``` + * + * The value `'android'` may also be returned if the Node.js is built on the + * Android operating system. However, Android support in Node.js [is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.1.16 + */ + readonly platform: Platform; + /** + * The `process.mainModule` property provides an alternative way of retrieving `require.main`. The difference is that if the main module changes at + * runtime, `require.main` may still refer to the original main module in + * modules that were required before the change occurred. Generally, it's + * safe to assume that the two refer to the same module. + * + * As with `require.main`, `process.mainModule` will be `undefined` if there + * is no entry script. + * @since v0.1.17 + * @deprecated Since v14.0.0 - Use `main` instead. + */ + mainModule?: Module | undefined; + memoryUsage: MemoryUsageFn; + /** + * Gets the amount of memory available to the process (in bytes) based on + * limits imposed by the OS. If there is no such constraint, or the constraint + * is unknown, `0` is returned. + * + * See [`uv_get_constrained_memory`](https://docs.libuv.org/en/v1.x/misc.html#c.uv_get_constrained_memory) for more + * information. + * @since v19.6.0, v18.15.0 + * @experimental + */ + constrainedMemory(): number; + /** + * Gets the amount of free memory that is still available to the process (in bytes). + * See [`uv_get_available_memory`](https://nodejs.org/docs/latest-v22.x/api/process.html#processavailablememory) for more information. + * @experimental + * @since v20.13.0 + */ + availableMemory(): number; + /** + * The `process.cpuUsage()` method returns the user and system CPU time usage of + * the current process, in an object with properties `user` and `system`, whose + * values are microsecond values (millionth of a second). These values measure time + * spent in user and system code respectively, and may end up being greater than + * actual elapsed time if multiple CPU cores are performing work for this process. + * + * The result of a previous call to `process.cpuUsage()` can be passed as the + * argument to the function, to get a diff reading. + * + * ```js + * import { cpuUsage } from 'node:process'; + * + * const startUsage = cpuUsage(); + * // { user: 38579, system: 6986 } + * + * // spin the CPU for 500 milliseconds + * const now = Date.now(); + * while (Date.now() - now < 500); + * + * console.log(cpuUsage(startUsage)); + * // { user: 514883, system: 11226 } + * ``` + * @since v6.1.0 + * @param previousValue A previous return value from calling `process.cpuUsage()` + */ + cpuUsage(previousValue?: CpuUsage): CpuUsage; + /** + * `process.nextTick()` adds `callback` to the "next tick queue". This queue is + * fully drained after the current operation on the JavaScript stack runs to + * completion and before the event loop is allowed to continue. It's possible to + * create an infinite loop if one were to recursively call `process.nextTick()`. + * See the [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#process-nexttick) guide for more background. + * + * ```js + * import { nextTick } from 'node:process'; + * + * console.log('start'); + * nextTick(() => { + * console.log('nextTick callback'); + * }); + * console.log('scheduled'); + * // Output: + * // start + * // scheduled + * // nextTick callback + * ``` + * + * This is important when developing APIs in order to give users the opportunity + * to assign event handlers _after_ an object has been constructed but before any + * I/O has occurred: + * + * ```js + * import { nextTick } from 'node:process'; + * + * function MyThing(options) { + * this.setupOptions(options); + * + * nextTick(() => { + * this.startDoingStuff(); + * }); + * } + * + * const thing = new MyThing(); + * thing.getReadyForStuff(); + * + * // thing.startDoingStuff() gets called now, not before. + * ``` + * + * It is very important for APIs to be either 100% synchronous or 100% + * asynchronous. Consider this example: + * + * ```js + * // WARNING! DO NOT USE! BAD UNSAFE HAZARD! + * function maybeSync(arg, cb) { + * if (arg) { + * cb(); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * + * This API is hazardous because in the following case: + * + * ```js + * const maybeTrue = Math.random() > 0.5; + * + * maybeSync(maybeTrue, () => { + * foo(); + * }); + * + * bar(); + * ``` + * + * It is not clear whether `foo()` or `bar()` will be called first. + * + * The following approach is much better: + * + * ```js + * import { nextTick } from 'node:process'; + * + * function definitelyAsync(arg, cb) { + * if (arg) { + * nextTick(cb); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * @since v0.1.26 + * @param args Additional arguments to pass when invoking the `callback` + */ + nextTick(callback: Function, ...args: any[]): void; + /** + * This API is available through the [--experimental-permission](https://nodejs.org/api/cli.html#--experimental-permission) flag. + * + * `process.permission` is an object whose methods are used to manage permissions for the current process. + * Additional documentation is available in the [Permission Model](https://nodejs.org/api/permissions.html#permission-model). + * @since v20.0.0 + */ + permission: ProcessPermission; + /** + * The `process.release` property returns an `Object` containing metadata related + * to the current release, including URLs for the source tarball and headers-only + * tarball. + * + * `process.release` contains the following properties: + * + * ```js + * { + * name: 'node', + * lts: 'Hydrogen', + * sourceUrl: 'https://nodejs.org/download/release/v18.12.0/node-v18.12.0.tar.gz', + * headersUrl: 'https://nodejs.org/download/release/v18.12.0/node-v18.12.0-headers.tar.gz', + * libUrl: 'https://nodejs.org/download/release/v18.12.0/win-x64/node.lib' + * } + * ``` + * + * In custom builds from non-release versions of the source tree, only the `name` property may be present. The additional properties should not be + * relied upon to exist. + * @since v3.0.0 + */ + readonly release: ProcessRelease; + readonly features: ProcessFeatures; + /** + * `process.umask()` returns the Node.js process's file mode creation mask. Child + * processes inherit the mask from the parent process. + * @since v0.1.19 + * @deprecated Calling `process.umask()` with no argument causes the process-wide umask to be written twice. This introduces a race condition between threads, and is a potential + * security vulnerability. There is no safe, cross-platform alternative API. + */ + umask(): number; + /** + * Can only be set if not in worker thread. + */ + umask(mask: string | number): number; + /** + * The `process.uptime()` method returns the number of seconds the current Node.js + * process has been running. + * + * The return value includes fractions of a second. Use `Math.floor()` to get whole + * seconds. + * @since v0.5.0 + */ + uptime(): number; + hrtime: HRTime; + /** + * If the Node.js process was spawned with an IPC channel, the process.channel property is a reference to the IPC channel. + * If no IPC channel exists, this property is undefined. + * @since v7.1.0 + */ + channel?: { + /** + * This method makes the IPC channel keep the event loop of the process running if .unref() has been called before. + * @since v7.1.0 + */ + ref(): void; + /** + * This method makes the IPC channel not keep the event loop of the process running, and lets it finish even while the channel is open. + * @since v7.1.0 + */ + unref(): void; + }; + /** + * If Node.js is spawned with an IPC channel, the `process.send()` method can be + * used to send messages to the parent process. Messages will be received as a `'message'` event on the parent's `ChildProcess` object. + * + * If Node.js was not spawned with an IPC channel, `process.send` will be `undefined`. + * + * The message goes through serialization and parsing. The resulting message might + * not be the same as what is originally sent. + * @since v0.5.9 + * @param options used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send?( + message: any, + sendHandle?: any, + options?: { + keepOpen?: boolean | undefined; + }, + callback?: (error: Error | null) => void, + ): boolean; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.disconnect()` method will close the + * IPC channel to the parent process, allowing the child process to exit gracefully + * once there are no other connections keeping it alive. + * + * The effect of calling `process.disconnect()` is the same as calling `ChildProcess.disconnect()` from the parent process. + * + * If the Node.js process was not spawned with an IPC channel, `process.disconnect()` will be `undefined`. + * @since v0.7.2 + */ + disconnect(): void; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.connected` property will return `true` so long as the IPC + * channel is connected and will return `false` after `process.disconnect()` is called. + * + * Once `process.connected` is `false`, it is no longer possible to send messages + * over the IPC channel using `process.send()`. + * @since v0.7.2 + */ + connected: boolean; + /** + * The `process.allowedNodeEnvironmentFlags` property is a special, + * read-only `Set` of flags allowable within the `NODE_OPTIONS` environment variable. + * + * `process.allowedNodeEnvironmentFlags` extends `Set`, but overrides `Set.prototype.has` to recognize several different possible flag + * representations. `process.allowedNodeEnvironmentFlags.has()` will + * return `true` in the following cases: + * + * * Flags may omit leading single (`-`) or double (`--`) dashes; e.g., `inspect-brk` for `--inspect-brk`, or `r` for `-r`. + * * Flags passed through to V8 (as listed in `--v8-options`) may replace + * one or more _non-leading_ dashes for an underscore, or vice-versa; + * e.g., `--perf_basic_prof`, `--perf-basic-prof`, `--perf_basic-prof`, + * etc. + * * Flags may contain one or more equals (`=`) characters; all + * characters after and including the first equals will be ignored; + * e.g., `--stack-trace-limit=100`. + * * Flags _must_ be allowable within `NODE_OPTIONS`. + * + * When iterating over `process.allowedNodeEnvironmentFlags`, flags will + * appear only _once_; each will begin with one or more dashes. Flags + * passed through to V8 will contain underscores instead of non-leading + * dashes: + * + * ```js + * import { allowedNodeEnvironmentFlags } from 'node:process'; + * + * allowedNodeEnvironmentFlags.forEach((flag) => { + * // -r + * // --inspect-brk + * // --abort_on_uncaught_exception + * // ... + * }); + * ``` + * + * The methods `add()`, `clear()`, and `delete()` of`process.allowedNodeEnvironmentFlags` do nothing, and will fail + * silently. + * + * If Node.js was compiled _without_ `NODE_OPTIONS` support (shown in {@link config}), `process.allowedNodeEnvironmentFlags` will + * contain what _would have_ been allowable. + * @since v10.10.0 + */ + allowedNodeEnvironmentFlags: ReadonlySet; + /** + * `process.report` is an object whose methods are used to generate diagnostic reports for the current process. + * Additional documentation is available in the [report documentation](https://nodejs.org/docs/latest-v22.x/api/report.html). + * @since v11.8.0 + */ + report: ProcessReport; + /** + * ```js + * import { resourceUsage } from 'node:process'; + * + * console.log(resourceUsage()); + * /* + * Will output: + * { + * userCPUTime: 82872, + * systemCPUTime: 4143, + * maxRSS: 33164, + * sharedMemorySize: 0, + * unsharedDataSize: 0, + * unsharedStackSize: 0, + * minorPageFault: 2469, + * majorPageFault: 0, + * swappedOut: 0, + * fsRead: 0, + * fsWrite: 8, + * ipcSent: 0, + * ipcReceived: 0, + * signalsCount: 0, + * voluntaryContextSwitches: 79, + * involuntaryContextSwitches: 1 + * } + * + * ``` + * @since v12.6.0 + * @return the resource usage for the current process. All of these values come from the `uv_getrusage` call which returns a [`uv_rusage_t` struct][uv_rusage_t]. + */ + resourceUsage(): ResourceUsage; + /** + * The initial value of `process.throwDeprecation` indicates whether the `--throw-deprecation` flag is set on the current Node.js process. `process.throwDeprecation` + * is mutable, so whether or not deprecation warnings result in errors may be altered at runtime. See the documentation for the 'warning' event and the emitWarning() + * method for more information. + * + * ```bash + * $ node --throw-deprecation -p "process.throwDeprecation" + * true + * $ node -p "process.throwDeprecation" + * undefined + * $ node + * > process.emitWarning('test', 'DeprecationWarning'); + * undefined + * > (node:26598) DeprecationWarning: test + * > process.throwDeprecation = true; + * true + * > process.emitWarning('test', 'DeprecationWarning'); + * Thrown: + * [DeprecationWarning: test] { name: 'DeprecationWarning' } + * ``` + * @since v0.9.12 + */ + throwDeprecation: boolean; + /** + * The `process.traceDeprecation` property indicates whether the `--trace-deprecation` flag is set on the current Node.js process. See the + * documentation for the `'warning' event` and the `emitWarning() method` for more information about this + * flag's behavior. + * @since v0.8.0 + */ + traceDeprecation: boolean; + /* EventEmitter */ + addListener(event: "beforeExit", listener: BeforeExitListener): this; + addListener(event: "disconnect", listener: DisconnectListener): this; + addListener(event: "exit", listener: ExitListener): this; + addListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + addListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + addListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + addListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + addListener(event: "warning", listener: WarningListener): this; + addListener(event: "message", listener: MessageListener): this; + addListener(event: Signals, listener: SignalsListener): this; + addListener(event: "multipleResolves", listener: MultipleResolveListener): this; + addListener(event: "worker", listener: WorkerListener): this; + emit(event: "beforeExit", code: number): boolean; + emit(event: "disconnect"): boolean; + emit(event: "exit", code: number): boolean; + emit(event: "rejectionHandled", promise: Promise): boolean; + emit(event: "uncaughtException", error: Error): boolean; + emit(event: "uncaughtExceptionMonitor", error: Error): boolean; + emit(event: "unhandledRejection", reason: unknown, promise: Promise): boolean; + emit(event: "warning", warning: Error): boolean; + emit(event: "message", message: unknown, sendHandle: unknown): this; + emit(event: Signals, signal?: Signals): boolean; + emit( + event: "multipleResolves", + type: MultipleResolveType, + promise: Promise, + value: unknown, + ): this; + emit(event: "worker", listener: WorkerListener): this; + on(event: "beforeExit", listener: BeforeExitListener): this; + on(event: "disconnect", listener: DisconnectListener): this; + on(event: "exit", listener: ExitListener): this; + on(event: "rejectionHandled", listener: RejectionHandledListener): this; + on(event: "uncaughtException", listener: UncaughtExceptionListener): this; + on(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + on(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + on(event: "warning", listener: WarningListener): this; + on(event: "message", listener: MessageListener): this; + on(event: Signals, listener: SignalsListener): this; + on(event: "multipleResolves", listener: MultipleResolveListener): this; + on(event: "worker", listener: WorkerListener): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "beforeExit", listener: BeforeExitListener): this; + once(event: "disconnect", listener: DisconnectListener): this; + once(event: "exit", listener: ExitListener): this; + once(event: "rejectionHandled", listener: RejectionHandledListener): this; + once(event: "uncaughtException", listener: UncaughtExceptionListener): this; + once(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + once(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + once(event: "warning", listener: WarningListener): this; + once(event: "message", listener: MessageListener): this; + once(event: Signals, listener: SignalsListener): this; + once(event: "multipleResolves", listener: MultipleResolveListener): this; + once(event: "worker", listener: WorkerListener): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "beforeExit", listener: BeforeExitListener): this; + prependListener(event: "disconnect", listener: DisconnectListener): this; + prependListener(event: "exit", listener: ExitListener): this; + prependListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + prependListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + prependListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + prependListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + prependListener(event: "warning", listener: WarningListener): this; + prependListener(event: "message", listener: MessageListener): this; + prependListener(event: Signals, listener: SignalsListener): this; + prependListener(event: "multipleResolves", listener: MultipleResolveListener): this; + prependListener(event: "worker", listener: WorkerListener): this; + prependOnceListener(event: "beforeExit", listener: BeforeExitListener): this; + prependOnceListener(event: "disconnect", listener: DisconnectListener): this; + prependOnceListener(event: "exit", listener: ExitListener): this; + prependOnceListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + prependOnceListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + prependOnceListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + prependOnceListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + prependOnceListener(event: "warning", listener: WarningListener): this; + prependOnceListener(event: "message", listener: MessageListener): this; + prependOnceListener(event: Signals, listener: SignalsListener): this; + prependOnceListener(event: "multipleResolves", listener: MultipleResolveListener): this; + prependOnceListener(event: "worker", listener: WorkerListener): this; + listeners(event: "beforeExit"): BeforeExitListener[]; + listeners(event: "disconnect"): DisconnectListener[]; + listeners(event: "exit"): ExitListener[]; + listeners(event: "rejectionHandled"): RejectionHandledListener[]; + listeners(event: "uncaughtException"): UncaughtExceptionListener[]; + listeners(event: "uncaughtExceptionMonitor"): UncaughtExceptionListener[]; + listeners(event: "unhandledRejection"): UnhandledRejectionListener[]; + listeners(event: "warning"): WarningListener[]; + listeners(event: "message"): MessageListener[]; + listeners(event: Signals): SignalsListener[]; + listeners(event: "multipleResolves"): MultipleResolveListener[]; + listeners(event: "worker"): WorkerListener[]; + } + } + } + export = process; +} +declare module "node:process" { + import process = require("process"); + export = process; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/punycode.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/punycode.d.ts new file mode 100644 index 0000000..655c47b --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/punycode.d.ts @@ -0,0 +1,117 @@ +/** + * **The version of the punycode module bundled in Node.js is being deprecated. **In a future major version of Node.js this module will be removed. Users + * currently depending on the `punycode` module should switch to using the + * userland-provided [Punycode.js](https://github.com/bestiejs/punycode.js) module instead. For punycode-based URL + * encoding, see `url.domainToASCII` or, more generally, the `WHATWG URL API`. + * + * The `punycode` module is a bundled version of the [Punycode.js](https://github.com/bestiejs/punycode.js) module. It + * can be accessed using: + * + * ```js + * import punycode from 'node:punycode'; + * ``` + * + * [Punycode](https://tools.ietf.org/html/rfc3492) is a character encoding scheme defined by RFC 3492 that is + * primarily intended for use in Internationalized Domain Names. Because host + * names in URLs are limited to ASCII characters only, Domain Names that contain + * non-ASCII characters must be converted into ASCII using the Punycode scheme. + * For instance, the Japanese character that translates into the English word, `'example'` is `'例'`. The Internationalized Domain Name, `'例.com'` (equivalent + * to `'example.com'`) is represented by Punycode as the ASCII string `'xn--fsq.com'`. + * + * The `punycode` module provides a simple implementation of the Punycode standard. + * + * The `punycode` module is a third-party dependency used by Node.js and + * made available to developers as a convenience. Fixes or other modifications to + * the module must be directed to the [Punycode.js](https://github.com/bestiejs/punycode.js) project. + * @deprecated Since v7.0.0 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/punycode.js) + */ +declare module "punycode" { + /** + * The `punycode.decode()` method converts a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only + * characters to the equivalent string of Unicode codepoints. + * + * ```js + * punycode.decode('maana-pta'); // 'mañana' + * punycode.decode('--dqo34k'); // '☃-⌘' + * ``` + * @since v0.5.1 + */ + function decode(string: string): string; + /** + * The `punycode.encode()` method converts a string of Unicode codepoints to a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only characters. + * + * ```js + * punycode.encode('mañana'); // 'maana-pta' + * punycode.encode('☃-⌘'); // '--dqo34k' + * ``` + * @since v0.5.1 + */ + function encode(string: string): string; + /** + * The `punycode.toUnicode()` method converts a string representing a domain name + * containing [Punycode](https://tools.ietf.org/html/rfc3492) encoded characters into Unicode. Only the [Punycode](https://tools.ietf.org/html/rfc3492) encoded parts of the domain name are be + * converted. + * + * ```js + * // decode domain names + * punycode.toUnicode('xn--maana-pta.com'); // 'mañana.com' + * punycode.toUnicode('xn----dqo34k.com'); // '☃-⌘.com' + * punycode.toUnicode('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toUnicode(domain: string): string; + /** + * The `punycode.toASCII()` method converts a Unicode string representing an + * Internationalized Domain Name to [Punycode](https://tools.ietf.org/html/rfc3492). Only the non-ASCII parts of the + * domain name will be converted. Calling `punycode.toASCII()` on a string that + * already only contains ASCII characters will have no effect. + * + * ```js + * // encode domain names + * punycode.toASCII('mañana.com'); // 'xn--maana-pta.com' + * punycode.toASCII('☃-⌘.com'); // 'xn----dqo34k.com' + * punycode.toASCII('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toASCII(domain: string): string; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const ucs2: ucs2; + interface ucs2 { + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + decode(string: string): number[]; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + encode(codePoints: readonly number[]): string; + } + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const version: string; +} +declare module "node:punycode" { + export * from "punycode"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/querystring.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/querystring.d.ts new file mode 100644 index 0000000..4d6dac1 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/querystring.d.ts @@ -0,0 +1,153 @@ +/** + * The `node:querystring` module provides utilities for parsing and formatting URL + * query strings. It can be accessed using: + * + * ```js + * import querystring from 'node:querystring'; + * ``` + * + * `querystring` is more performant than `URLSearchParams` but is not a + * standardized API. Use `URLSearchParams` when performance is not critical or + * when compatibility with browser code is desirable. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/querystring.js) + */ +declare module "querystring" { + interface StringifyOptions { + /** + * The function to use when converting URL-unsafe characters to percent-encoding in the query string. + * @default `querystring.escape()` + */ + encodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParseOptions { + /** + * Specifies the maximum number of keys to parse. Specify `0` to remove key counting limitations. + * @default 1000 + */ + maxKeys?: number | undefined; + /** + * The function to use when decoding percent-encoded characters in the query string. + * @default `querystring.unescape()` + */ + decodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParsedUrlQuery extends NodeJS.Dict {} + interface ParsedUrlQueryInput extends + NodeJS.Dict< + | string + | number + | boolean + | readonly string[] + | readonly number[] + | readonly boolean[] + | null + > + {} + /** + * The `querystring.stringify()` method produces a URL query string from a + * given `obj` by iterating through the object's "own properties". + * + * It serializes the following types of values passed in `obj`: [string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) | + * [string\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) The numeric values must be finite. Any other input values will be coerced to + * empty strings. + * + * ```js + * querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' }); + * // Returns 'foo=bar&baz=qux&baz=quux&corge=' + * + * querystring.stringify({ foo: 'bar', baz: 'qux' }, ';', ':'); + * // Returns 'foo:bar;baz:qux' + * ``` + * + * By default, characters requiring percent-encoding within the query string will + * be encoded as UTF-8\. If an alternative encoding is required, then an alternative `encodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkEncodeURIComponent function already exists, + * + * querystring.stringify({ w: '中文', foo: 'bar' }, null, null, + * { encodeURIComponent: gbkEncodeURIComponent }); + * ``` + * @since v0.1.25 + * @param obj The object to serialize into a URL query string + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function stringify(obj?: ParsedUrlQueryInput, sep?: string, eq?: string, options?: StringifyOptions): string; + /** + * The `querystring.parse()` method parses a URL query string (`str`) into a + * collection of key and value pairs. + * + * For example, the query string `'foo=bar&abc=xyz&abc=123'` is parsed into: + * + * ```json + * { + * "foo": "bar", + * "abc": ["xyz", "123"] + * } + * ``` + * + * The object returned by the `querystring.parse()` method _does not_ prototypically inherit from the JavaScript `Object`. This means that typical `Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * By default, percent-encoded characters within the query string will be assumed + * to use UTF-8 encoding. If an alternative character encoding is used, then an + * alternative `decodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkDecodeURIComponent function already exists... + * + * querystring.parse('w=%D6%D0%CE%C4&foo=bar', null, null, + * { decodeURIComponent: gbkDecodeURIComponent }); + * ``` + * @since v0.1.25 + * @param str The URL query string to parse + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] The substring used to delimit keys and values in the query string. + */ + function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): ParsedUrlQuery; + /** + * The querystring.encode() function is an alias for querystring.stringify(). + */ + const encode: typeof stringify; + /** + * The querystring.decode() function is an alias for querystring.parse(). + */ + const decode: typeof parse; + /** + * The `querystring.escape()` method performs URL percent-encoding on the given `str` in a manner that is optimized for the specific requirements of URL + * query strings. + * + * The `querystring.escape()` method is used by `querystring.stringify()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement percent-encoding implementation if + * necessary by assigning `querystring.escape` to an alternative function. + * @since v0.1.25 + */ + function escape(str: string): string; + /** + * The `querystring.unescape()` method performs decoding of URL percent-encoded + * characters on the given `str`. + * + * The `querystring.unescape()` method is used by `querystring.parse()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement decoding implementation if + * necessary by assigning `querystring.unescape` to an alternative function. + * + * By default, the `querystring.unescape()` method will attempt to use the + * JavaScript built-in `decodeURIComponent()` method to decode. If that fails, + * a safer equivalent that does not throw on malformed URLs will be used. + * @since v0.1.25 + */ + function unescape(str: string): string; +} +declare module "node:querystring" { + export * from "querystring"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/readline.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/readline.d.ts new file mode 100644 index 0000000..a1f304f --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/readline.d.ts @@ -0,0 +1,589 @@ +/** + * The `node:readline` module provides an interface for reading data from a [Readable](https://nodejs.org/docs/latest-v22.x/api/stream.html#readable-streams) stream + * (such as [`process.stdin`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstdin)) one line at a time. + * + * To use the promise-based APIs: + * + * ```js + * import * as readline from 'node:readline/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as readline from 'node:readline'; + * ``` + * + * The following simple example illustrates the basic use of the `node:readline` module. + * + * ```js + * import * as readline from 'node:readline/promises'; + * import { stdin as input, stdout as output } from 'node:process'; + * + * const rl = readline.createInterface({ input, output }); + * + * const answer = await rl.question('What do you think of Node.js? '); + * + * console.log(`Thank you for your valuable feedback: ${answer}`); + * + * rl.close(); + * ``` + * + * Once this code is invoked, the Node.js application will not terminate until the `readline.Interface` is closed because the interface waits for data to be + * received on the `input` stream. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/readline.js) + */ +declare module "readline" { + import { Abortable, EventEmitter } from "node:events"; + import * as promises from "node:readline/promises"; + export { promises }; + export interface Key { + sequence?: string | undefined; + name?: string | undefined; + ctrl?: boolean | undefined; + meta?: boolean | undefined; + shift?: boolean | undefined; + } + /** + * Instances of the `readline.Interface` class are constructed using the `readline.createInterface()` method. Every instance is associated with a + * single `input` [Readable](https://nodejs.org/docs/latest-v22.x/api/stream.html#readable-streams) stream and a single `output` [Writable](https://nodejs.org/docs/latest-v22.x/api/stream.html#writable-streams) stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v0.1.104 + */ + export class Interface extends EventEmitter { + readonly terminal: boolean; + /** + * The current input data being processed by node. + * + * This can be used when collecting input from a TTY stream to retrieve the + * current value that has been processed thus far, prior to the `line` event + * being emitted. Once the `line` event has been emitted, this property will + * be an empty string. + * + * Be aware that modifying the value during the instance runtime may have + * unintended consequences if `rl.cursor` is not also controlled. + * + * **If not using a TTY stream for input, use the `'line'` event.** + * + * One possible use case would be as follows: + * + * ```js + * const values = ['lorem ipsum', 'dolor sit amet']; + * const rl = readline.createInterface(process.stdin); + * const showResults = debounce(() => { + * console.log( + * '\n', + * values.filter((val) => val.startsWith(rl.line)).join(' '), + * ); + * }, 300); + * process.stdin.on('keypress', (c, k) => { + * showResults(); + * }); + * ``` + * @since v0.1.98 + */ + readonly line: string; + /** + * The cursor position relative to `rl.line`. + * + * This will track where the current cursor lands in the input string, when + * reading input from a TTY stream. The position of cursor determines the + * portion of the input string that will be modified as input is processed, + * as well as the column where the terminal caret will be rendered. + * @since v0.1.98 + */ + readonly cursor: number; + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/readline.html#class-interfaceconstructor + */ + protected constructor( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ); + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/readline.html#class-interfaceconstructor + */ + protected constructor(options: ReadLineOptions); + /** + * The `rl.getPrompt()` method returns the current prompt used by `rl.prompt()`. + * @since v15.3.0, v14.17.0 + * @return the current prompt string + */ + getPrompt(): string; + /** + * The `rl.setPrompt()` method sets the prompt that will be written to `output` whenever `rl.prompt()` is called. + * @since v0.1.98 + */ + setPrompt(prompt: string): void; + /** + * The `rl.prompt()` method writes the `Interface` instances configured`prompt` to a new line in `output` in order to provide a user with a new + * location at which to provide input. + * + * When called, `rl.prompt()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or `undefined` the prompt is not written. + * @since v0.1.98 + * @param preserveCursor If `true`, prevents the cursor placement from being reset to `0`. + */ + prompt(preserveCursor?: boolean): void; + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback` function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or `undefined` the `query` is not written. + * + * The `callback` function passed to `rl.question()` does not follow the typical + * pattern of accepting an `Error` object or `null` as the first argument. + * The `callback` is called with the provided answer as the only argument. + * + * An error will be thrown if calling `rl.question()` after `rl.close()`. + * + * Example usage: + * + * ```js + * rl.question('What is your favorite food? ', (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * ``` + * + * Using an `AbortController` to cancel a question. + * + * ```js + * const ac = new AbortController(); + * const signal = ac.signal; + * + * rl.question('What is your favorite food? ', { signal }, (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * setTimeout(() => ac.abort(), 10000); + * ``` + * @since v0.3.3 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @param callback A callback function that is invoked with the user's input in response to the `query`. + */ + question(query: string, callback: (answer: string) => void): void; + question(query: string, options: Abortable, callback: (answer: string) => void): void; + /** + * The `rl.pause()` method pauses the `input` stream, allowing it to be resumed + * later if necessary. + * + * Calling `rl.pause()` does not immediately pause other events (including `'line'`) from being emitted by the `Interface` instance. + * @since v0.3.4 + */ + pause(): this; + /** + * The `rl.resume()` method resumes the `input` stream if it has been paused. + * @since v0.3.4 + */ + resume(): this; + /** + * The `rl.close()` method closes the `Interface` instance and + * relinquishes control over the `input` and `output` streams. When called, + * the `'close'` event will be emitted. + * + * Calling `rl.close()` does not immediately stop other events (including `'line'`) + * from being emitted by the `Interface` instance. + * @since v0.1.98 + */ + close(): void; + /** + * The `rl.write()` method will write either `data` or a key sequence identified + * by `key` to the `output`. The `key` argument is supported only if `output` is + * a `TTY` text terminal. See `TTY keybindings` for a list of key + * combinations. + * + * If `key` is specified, `data` is ignored. + * + * When called, `rl.write()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or `undefined` the `data` and `key` are not written. + * + * ```js + * rl.write('Delete this!'); + * // Simulate Ctrl+U to delete the line written previously + * rl.write(null, { ctrl: true, name: 'u' }); + * ``` + * + * The `rl.write()` method will write the data to the `readline` `Interface`'s `input` _as if it were provided by the user_. + * @since v0.1.98 + */ + write(data: string | Buffer, key?: Key): void; + write(data: undefined | null | string | Buffer, key: Key): void; + /** + * Returns the real position of the cursor in relation to the input + * prompt + string. Long input (wrapping) strings, as well as multiple + * line prompts are included in the calculations. + * @since v13.5.0, v12.16.0 + */ + getCursorPos(): CursorPos; + /** + * events.EventEmitter + * 1. close + * 2. line + * 3. pause + * 4. resume + * 5. SIGCONT + * 6. SIGINT + * 7. SIGTSTP + * 8. history + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "line", listener: (input: string) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "SIGCONT", listener: () => void): this; + addListener(event: "SIGINT", listener: () => void): this; + addListener(event: "SIGTSTP", listener: () => void): this; + addListener(event: "history", listener: (history: string[]) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "line", input: string): boolean; + emit(event: "pause"): boolean; + emit(event: "resume"): boolean; + emit(event: "SIGCONT"): boolean; + emit(event: "SIGINT"): boolean; + emit(event: "SIGTSTP"): boolean; + emit(event: "history", history: string[]): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "line", listener: (input: string) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "SIGCONT", listener: () => void): this; + on(event: "SIGINT", listener: () => void): this; + on(event: "SIGTSTP", listener: () => void): this; + on(event: "history", listener: (history: string[]) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "line", listener: (input: string) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "SIGCONT", listener: () => void): this; + once(event: "SIGINT", listener: () => void): this; + once(event: "SIGTSTP", listener: () => void): this; + once(event: "history", listener: (history: string[]) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "line", listener: (input: string) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "SIGCONT", listener: () => void): this; + prependListener(event: "SIGINT", listener: () => void): this; + prependListener(event: "SIGTSTP", listener: () => void): this; + prependListener(event: "history", listener: (history: string[]) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "line", listener: (input: string) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "SIGCONT", listener: () => void): this; + prependOnceListener(event: "SIGINT", listener: () => void): this; + prependOnceListener(event: "SIGTSTP", listener: () => void): this; + prependOnceListener(event: "history", listener: (history: string[]) => void): this; + [Symbol.asyncIterator](): NodeJS.AsyncIterator; + } + export type ReadLine = Interface; // type forwarded for backwards compatibility + export type Completer = (line: string) => CompleterResult; + export type AsyncCompleter = ( + line: string, + callback: (err?: null | Error, result?: CompleterResult) => void, + ) => void; + export type CompleterResult = [string[], string]; + export interface ReadLineOptions { + /** + * The [`Readable`](https://nodejs.org/docs/latest-v22.x/api/stream.html#readable-streams) stream to listen to + */ + input: NodeJS.ReadableStream; + /** + * The [`Writable`](https://nodejs.org/docs/latest-v22.x/api/stream.html#writable-streams) stream to write readline data to. + */ + output?: NodeJS.WritableStream | undefined; + /** + * An optional function used for Tab autocompletion. + */ + completer?: Completer | AsyncCompleter | undefined; + /** + * `true` if the `input` and `output` streams should be treated like a TTY, + * and have ANSI/VT100 escape codes written to it. + * Default: checking `isTTY` on the `output` stream upon instantiation. + */ + terminal?: boolean | undefined; + /** + * Initial list of history lines. + * This option makes sense only if `terminal` is set to `true` by the user or by an internal `output` check, + * otherwise the history caching mechanism is not initialized at all. + * @default [] + */ + history?: string[] | undefined; + /** + * Maximum number of history lines retained. + * To disable the history set this value to `0`. + * This option makes sense only if `terminal` is set to `true` by the user or by an internal `output` check, + * otherwise the history caching mechanism is not initialized at all. + * @default 30 + */ + historySize?: number | undefined; + /** + * If `true`, when a new input line added to the history list duplicates an older one, + * this removes the older line from the list. + * @default false + */ + removeHistoryDuplicates?: boolean | undefined; + /** + * The prompt string to use. + * @default "> " + */ + prompt?: string | undefined; + /** + * If the delay between `\r` and `\n` exceeds `crlfDelay` milliseconds, + * both `\r` and `\n` will be treated as separate end-of-line input. + * `crlfDelay` will be coerced to a number no less than `100`. + * It can be set to `Infinity`, in which case + * `\r` followed by `\n` will always be considered a single newline + * (which may be reasonable for [reading files](https://nodejs.org/docs/latest-v22.x/api/readline.html#example-read-file-stream-line-by-line) with `\r\n` line delimiter). + * @default 100 + */ + crlfDelay?: number | undefined; + /** + * The duration `readline` will wait for a character + * (when reading an ambiguous key sequence in milliseconds + * one that can both form a complete key sequence using the input read so far + * and can take additional input to complete a longer key sequence). + * @default 500 + */ + escapeCodeTimeout?: number | undefined; + /** + * The number of spaces a tab is equal to (minimum 1). + * @default 8 + */ + tabSize?: number | undefined; + /** + * Allows closing the interface using an AbortSignal. + * Aborting the signal will internally call `close` on the interface. + */ + signal?: AbortSignal | undefined; + } + /** + * The `readline.createInterface()` method creates a new `readline.Interface` instance. + * + * ```js + * import readline from 'node:readline'; + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * }); + * ``` + * + * Once the `readline.Interface` instance is created, the most common case is to + * listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * + * When creating a `readline.Interface` using `stdin` as input, the program + * will not terminate until it receives an [EOF character](https://en.wikipedia.org/wiki/End-of-file#EOF_character). To exit without + * waiting for user input, call `process.stdin.unref()`. + * @since v0.1.98 + */ + export function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ): Interface; + export function createInterface(options: ReadLineOptions): Interface; + /** + * The `readline.emitKeypressEvents()` method causes the given `Readable` stream to begin emitting `'keypress'` events corresponding to received input. + * + * Optionally, `interface` specifies a `readline.Interface` instance for which + * autocompletion is disabled when copy-pasted input is detected. + * + * If the `stream` is a `TTY`, then it must be in raw mode. + * + * This is automatically called by any readline instance on its `input` if the `input` is a terminal. Closing the `readline` instance does not stop + * the `input` from emitting `'keypress'` events. + * + * ```js + * readline.emitKeypressEvents(process.stdin); + * if (process.stdin.isTTY) + * process.stdin.setRawMode(true); + * ``` + * + * ## Example: Tiny CLI + * + * The following example illustrates the use of `readline.Interface` class to + * implement a small command-line interface: + * + * ```js + * import readline from 'node:readline'; + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * prompt: 'OHAI> ', + * }); + * + * rl.prompt(); + * + * rl.on('line', (line) => { + * switch (line.trim()) { + * case 'hello': + * console.log('world!'); + * break; + * default: + * console.log(`Say what? I might have heard '${line.trim()}'`); + * break; + * } + * rl.prompt(); + * }).on('close', () => { + * console.log('Have a great day!'); + * process.exit(0); + * }); + * ``` + * + * ## Example: Read file stream line-by-Line + * + * A common use case for `readline` is to consume an input file one line at a + * time. The easiest way to do so is leveraging the `fs.ReadStream` API as + * well as a `for await...of` loop: + * + * ```js + * import fs from 'node:fs'; + * import readline from 'node:readline'; + * + * async function processLineByLine() { + * const fileStream = fs.createReadStream('input.txt'); + * + * const rl = readline.createInterface({ + * input: fileStream, + * crlfDelay: Infinity, + * }); + * // Note: we use the crlfDelay option to recognize all instances of CR LF + * // ('\r\n') in input.txt as a single line break. + * + * for await (const line of rl) { + * // Each line in input.txt will be successively available here as `line`. + * console.log(`Line from file: ${line}`); + * } + * } + * + * processLineByLine(); + * ``` + * + * Alternatively, one could use the `'line'` event: + * + * ```js + * import fs from 'node:fs'; + * import readline from 'node:readline'; + * + * const rl = readline.createInterface({ + * input: fs.createReadStream('sample.txt'), + * crlfDelay: Infinity, + * }); + * + * rl.on('line', (line) => { + * console.log(`Line from file: ${line}`); + * }); + * ``` + * + * Currently, `for await...of` loop can be a bit slower. If `async` / `await` flow and speed are both essential, a mixed approach can be applied: + * + * ```js + * import { once } from 'node:events'; + * import { createReadStream } from 'node:fs'; + * import { createInterface } from 'node:readline'; + * + * (async function processLineByLine() { + * try { + * const rl = createInterface({ + * input: createReadStream('big-file.txt'), + * crlfDelay: Infinity, + * }); + * + * rl.on('line', (line) => { + * // Process the line. + * }); + * + * await once(rl, 'close'); + * + * console.log('File processed.'); + * } catch (err) { + * console.error(err); + * } + * })(); + * ``` + * @since v0.7.7 + */ + export function emitKeypressEvents(stream: NodeJS.ReadableStream, readlineInterface?: Interface): void; + export type Direction = -1 | 0 | 1; + export interface CursorPos { + rows: number; + cols: number; + } + /** + * The `readline.clearLine()` method clears current line of given [TTY](https://nodejs.org/docs/latest-v22.x/api/tty.html) stream + * in a specified direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearLine(stream: NodeJS.WritableStream, dir: Direction, callback?: () => void): boolean; + /** + * The `readline.clearScreenDown()` method clears the given [TTY](https://nodejs.org/docs/latest-v22.x/api/tty.html) stream from + * the current position of the cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearScreenDown(stream: NodeJS.WritableStream, callback?: () => void): boolean; + /** + * The `readline.cursorTo()` method moves cursor to the specified position in a + * given [TTY](https://nodejs.org/docs/latest-v22.x/api/tty.html) `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function cursorTo(stream: NodeJS.WritableStream, x: number, y?: number, callback?: () => void): boolean; + /** + * The `readline.moveCursor()` method moves the cursor _relative_ to its current + * position in a given [TTY](https://nodejs.org/docs/latest-v22.x/api/tty.html) `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function moveCursor(stream: NodeJS.WritableStream, dx: number, dy: number, callback?: () => void): boolean; +} +declare module "node:readline" { + export * from "readline"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/readline/promises.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/readline/promises.d.ts new file mode 100644 index 0000000..86754bb --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/readline/promises.d.ts @@ -0,0 +1,162 @@ +/** + * @since v17.0.0 + * @experimental + */ +declare module "readline/promises" { + import { Abortable } from "node:events"; + import { + CompleterResult, + Direction, + Interface as _Interface, + ReadLineOptions as _ReadLineOptions, + } from "node:readline"; + /** + * Instances of the `readlinePromises.Interface` class are constructed using the `readlinePromises.createInterface()` method. Every instance is associated with a + * single `input` `Readable` stream and a single `output` `Writable` stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v17.0.0 + */ + class Interface extends _Interface { + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback` function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or `undefined` the `query` is not written. + * + * If the question is called after `rl.close()`, it returns a rejected promise. + * + * Example usage: + * + * ```js + * const answer = await rl.question('What is your favorite food? '); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * + * Using an `AbortSignal` to cancel a question. + * + * ```js + * const signal = AbortSignal.timeout(10_000); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * const answer = await rl.question('What is your favorite food? ', { signal }); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * @since v17.0.0 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @return A promise that is fulfilled with the user's input in response to the `query`. + */ + question(query: string): Promise; + question(query: string, options: Abortable): Promise; + } + /** + * @since v17.0.0 + */ + class Readline { + /** + * @param stream A TTY stream. + */ + constructor( + stream: NodeJS.WritableStream, + options?: { + autoCommit?: boolean; + }, + ); + /** + * The `rl.clearLine()` method adds to the internal list of pending action an + * action that clears current line of the associated `stream` in a specified + * direction identified by `dir`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + * @since v17.0.0 + * @return this + */ + clearLine(dir: Direction): this; + /** + * The `rl.clearScreenDown()` method adds to the internal list of pending action an + * action that clears the associated stream from the current position of the + * cursor down. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + * @since v17.0.0 + * @return this + */ + clearScreenDown(): this; + /** + * The `rl.commit()` method sends all the pending actions to the associated `stream` and clears the internal list of pending actions. + * @since v17.0.0 + */ + commit(): Promise; + /** + * The `rl.cursorTo()` method adds to the internal list of pending action an action + * that moves cursor to the specified position in the associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + * @since v17.0.0 + * @return this + */ + cursorTo(x: number, y?: number): this; + /** + * The `rl.moveCursor()` method adds to the internal list of pending action an + * action that moves the cursor _relative_ to its current position in the + * associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + * @since v17.0.0 + * @return this + */ + moveCursor(dx: number, dy: number): this; + /** + * The `rl.rollback` methods clears the internal list of pending actions without + * sending it to the associated `stream`. + * @since v17.0.0 + * @return this + */ + rollback(): this; + } + type Completer = (line: string) => CompleterResult | Promise; + interface ReadLineOptions extends Omit<_ReadLineOptions, "completer"> { + /** + * An optional function used for Tab autocompletion. + */ + completer?: Completer | undefined; + } + /** + * The `readlinePromises.createInterface()` method creates a new `readlinePromises.Interface` instance. + * + * ```js + * import readlinePromises from 'node:readline/promises'; + * const rl = readlinePromises.createInterface({ + * input: process.stdin, + * output: process.stdout, + * }); + * ``` + * + * Once the `readlinePromises.Interface` instance is created, the most common case + * is to listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * @since v17.0.0 + */ + function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer, + terminal?: boolean, + ): Interface; + function createInterface(options: ReadLineOptions): Interface; +} +declare module "node:readline/promises" { + export * from "readline/promises"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/repl.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/repl.d.ts new file mode 100644 index 0000000..5ff046a --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/repl.d.ts @@ -0,0 +1,430 @@ +/** + * The `node:repl` module provides a Read-Eval-Print-Loop (REPL) implementation + * that is available both as a standalone program or includible in other + * applications. It can be accessed using: + * + * ```js + * import repl from 'node:repl'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/repl.js) + */ +declare module "repl" { + import { AsyncCompleter, Completer, Interface } from "node:readline"; + import { Context } from "node:vm"; + import { InspectOptions } from "node:util"; + interface ReplOptions { + /** + * The input prompt to display. + * @default "> " + */ + prompt?: string | undefined; + /** + * The `Readable` stream from which REPL input will be read. + * @default process.stdin + */ + input?: NodeJS.ReadableStream | undefined; + /** + * The `Writable` stream to which REPL output will be written. + * @default process.stdout + */ + output?: NodeJS.WritableStream | undefined; + /** + * If `true`, specifies that the output should be treated as a TTY terminal, and have + * ANSI/VT100 escape codes written to it. + * Default: checking the value of the `isTTY` property on the output stream upon + * instantiation. + */ + terminal?: boolean | undefined; + /** + * The function to be used when evaluating each given line of input. + * Default: an async wrapper for the JavaScript `eval()` function. An `eval` function can + * error with `repl.Recoverable` to indicate the input was incomplete and prompt for + * additional lines. + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_default_evaluation + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_custom_evaluation_functions + */ + eval?: REPLEval | undefined; + /** + * Defines if the repl prints output previews or not. + * @default `true` Always `false` in case `terminal` is falsy. + */ + preview?: boolean | undefined; + /** + * If `true`, specifies that the default `writer` function should include ANSI color + * styling to REPL output. If a custom `writer` function is provided then this has no + * effect. + * @default the REPL instance's `terminal` value + */ + useColors?: boolean | undefined; + /** + * If `true`, specifies that the default evaluation function will use the JavaScript + * `global` as the context as opposed to creating a new separate context for the REPL + * instance. The node CLI REPL sets this value to `true`. + * @default false + */ + useGlobal?: boolean | undefined; + /** + * If `true`, specifies that the default writer will not output the return value of a + * command if it evaluates to `undefined`. + * @default false + */ + ignoreUndefined?: boolean | undefined; + /** + * The function to invoke to format the output of each command before writing to `output`. + * @default a wrapper for `util.inspect` + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_customizing_repl_output + */ + writer?: REPLWriter | undefined; + /** + * An optional function used for custom Tab auto completion. + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/readline.html#readline_use_of_the_completer_function + */ + completer?: Completer | AsyncCompleter | undefined; + /** + * A flag that specifies whether the default evaluator executes all JavaScript commands in + * strict mode or default (sloppy) mode. + * Accepted values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + replMode?: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT | undefined; + /** + * Stop evaluating the current piece of code when `SIGINT` is received, i.e. `Ctrl+C` is + * pressed. This cannot be used together with a custom `eval` function. + * @default false + */ + breakEvalOnSigint?: boolean | undefined; + } + type REPLEval = ( + this: REPLServer, + evalCmd: string, + context: Context, + file: string, + cb: (err: Error | null, result: any) => void, + ) => void; + type REPLWriter = (this: REPLServer, obj: any) => string; + /** + * This is the default "writer" value, if none is passed in the REPL options, + * and it can be overridden by custom print functions. + */ + const writer: REPLWriter & { + options: InspectOptions; + }; + type REPLCommandAction = (this: REPLServer, text: string) => void; + interface REPLCommand { + /** + * Help text to be displayed when `.help` is entered. + */ + help?: string | undefined; + /** + * The function to execute, optionally accepting a single string argument. + */ + action: REPLCommandAction; + } + /** + * Instances of `repl.REPLServer` are created using the {@link start} method + * or directly using the JavaScript `new` keyword. + * + * ```js + * import repl from 'node:repl'; + * + * const options = { useColors: true }; + * + * const firstInstance = repl.start(options); + * const secondInstance = new repl.REPLServer(options); + * ``` + * @since v0.1.91 + */ + class REPLServer extends Interface { + /** + * The `vm.Context` provided to the `eval` function to be used for JavaScript + * evaluation. + */ + readonly context: Context; + /** + * @deprecated since v14.3.0 - Use `input` instead. + */ + readonly inputStream: NodeJS.ReadableStream; + /** + * @deprecated since v14.3.0 - Use `output` instead. + */ + readonly outputStream: NodeJS.WritableStream; + /** + * The `Readable` stream from which REPL input will be read. + */ + readonly input: NodeJS.ReadableStream; + /** + * The `Writable` stream to which REPL output will be written. + */ + readonly output: NodeJS.WritableStream; + /** + * The commands registered via `replServer.defineCommand()`. + */ + readonly commands: NodeJS.ReadOnlyDict; + /** + * A value indicating whether the REPL is currently in "editor mode". + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_commands_and_special_keys + */ + readonly editorMode: boolean; + /** + * A value indicating whether the `_` variable has been assigned. + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreAssigned: boolean; + /** + * The last evaluation result from the REPL (assigned to the `_` variable inside of the REPL). + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly last: any; + /** + * A value indicating whether the `_error` variable has been assigned. + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreErrAssigned: boolean; + /** + * The last error raised inside the REPL (assigned to the `_error` variable inside of the REPL). + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly lastError: any; + /** + * Specified in the REPL options, this is the function to be used when evaluating each + * given line of input. If not specified in the REPL options, this is an async wrapper + * for the JavaScript `eval()` function. + */ + readonly eval: REPLEval; + /** + * Specified in the REPL options, this is a value indicating whether the default + * `writer` function should include ANSI color styling to REPL output. + */ + readonly useColors: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `eval` + * function will use the JavaScript `global` as the context as opposed to creating a new + * separate context for the REPL instance. + */ + readonly useGlobal: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `writer` + * function should output the result of a command if it evaluates to `undefined`. + */ + readonly ignoreUndefined: boolean; + /** + * Specified in the REPL options, this is the function to invoke to format the output of + * each command before writing to `outputStream`. If not specified in the REPL options, + * this will be a wrapper for `util.inspect`. + */ + readonly writer: REPLWriter; + /** + * Specified in the REPL options, this is the function to use for custom Tab auto-completion. + */ + readonly completer: Completer | AsyncCompleter; + /** + * Specified in the REPL options, this is a flag that specifies whether the default `eval` + * function should execute all JavaScript commands in strict mode or default (sloppy) mode. + * Possible values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + readonly replMode: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT; + /** + * NOTE: According to the documentation: + * + * > Instances of `repl.REPLServer` are created using the `repl.start()` method and + * > _should not_ be created directly using the JavaScript `new` keyword. + * + * `REPLServer` cannot be subclassed due to implementation specifics in NodeJS. + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_class_replserver + */ + private constructor(); + /** + * The `replServer.defineCommand()` method is used to add new `.`\-prefixed commands + * to the REPL instance. Such commands are invoked by typing a `.` followed by the `keyword`. The `cmd` is either a `Function` or an `Object` with the following + * properties: + * + * The following example shows two new commands added to the REPL instance: + * + * ```js + * import repl from 'node:repl'; + * + * const replServer = repl.start({ prompt: '> ' }); + * replServer.defineCommand('sayhello', { + * help: 'Say hello', + * action(name) { + * this.clearBufferedCommand(); + * console.log(`Hello, ${name}!`); + * this.displayPrompt(); + * }, + * }); + * replServer.defineCommand('saybye', function saybye() { + * console.log('Goodbye!'); + * this.close(); + * }); + * ``` + * + * The new commands can then be used from within the REPL instance: + * + * ```console + * > .sayhello Node.js User + * Hello, Node.js User! + * > .saybye + * Goodbye! + * ``` + * @since v0.3.0 + * @param keyword The command keyword (_without_ a leading `.` character). + * @param cmd The function to invoke when the command is processed. + */ + defineCommand(keyword: string, cmd: REPLCommandAction | REPLCommand): void; + /** + * The `replServer.displayPrompt()` method readies the REPL instance for input + * from the user, printing the configured `prompt` to a new line in the `output` and resuming the `input` to accept new input. + * + * When multi-line input is being entered, an ellipsis is printed rather than the + * 'prompt'. + * + * When `preserveCursor` is `true`, the cursor placement will not be reset to `0`. + * + * The `replServer.displayPrompt` method is primarily intended to be called from + * within the action function for commands registered using the `replServer.defineCommand()` method. + * @since v0.1.91 + */ + displayPrompt(preserveCursor?: boolean): void; + /** + * The `replServer.clearBufferedCommand()` method clears any command that has been + * buffered but not yet executed. This method is primarily intended to be + * called from within the action function for commands registered using the `replServer.defineCommand()` method. + * @since v9.0.0 + */ + clearBufferedCommand(): void; + /** + * Initializes a history log file for the REPL instance. When executing the + * Node.js binary and using the command-line REPL, a history file is initialized + * by default. However, this is not the case when creating a REPL + * programmatically. Use this method to initialize a history log file when working + * with REPL instances programmatically. + * @since v11.10.0 + * @param historyPath the path to the history file + * @param callback called when history writes are ready or upon error + */ + setupHistory(path: string, callback: (err: Error | null, repl: this) => void): void; + /** + * events.EventEmitter + * 1. close - inherited from `readline.Interface` + * 2. line - inherited from `readline.Interface` + * 3. pause - inherited from `readline.Interface` + * 4. resume - inherited from `readline.Interface` + * 5. SIGCONT - inherited from `readline.Interface` + * 6. SIGINT - inherited from `readline.Interface` + * 7. SIGTSTP - inherited from `readline.Interface` + * 8. exit + * 9. reset + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "line", listener: (input: string) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "SIGCONT", listener: () => void): this; + addListener(event: "SIGINT", listener: () => void): this; + addListener(event: "SIGTSTP", listener: () => void): this; + addListener(event: "exit", listener: () => void): this; + addListener(event: "reset", listener: (context: Context) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "line", input: string): boolean; + emit(event: "pause"): boolean; + emit(event: "resume"): boolean; + emit(event: "SIGCONT"): boolean; + emit(event: "SIGINT"): boolean; + emit(event: "SIGTSTP"): boolean; + emit(event: "exit"): boolean; + emit(event: "reset", context: Context): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "line", listener: (input: string) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "SIGCONT", listener: () => void): this; + on(event: "SIGINT", listener: () => void): this; + on(event: "SIGTSTP", listener: () => void): this; + on(event: "exit", listener: () => void): this; + on(event: "reset", listener: (context: Context) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "line", listener: (input: string) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "SIGCONT", listener: () => void): this; + once(event: "SIGINT", listener: () => void): this; + once(event: "SIGTSTP", listener: () => void): this; + once(event: "exit", listener: () => void): this; + once(event: "reset", listener: (context: Context) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "line", listener: (input: string) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "SIGCONT", listener: () => void): this; + prependListener(event: "SIGINT", listener: () => void): this; + prependListener(event: "SIGTSTP", listener: () => void): this; + prependListener(event: "exit", listener: () => void): this; + prependListener(event: "reset", listener: (context: Context) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "line", listener: (input: string) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "SIGCONT", listener: () => void): this; + prependOnceListener(event: "SIGINT", listener: () => void): this; + prependOnceListener(event: "SIGTSTP", listener: () => void): this; + prependOnceListener(event: "exit", listener: () => void): this; + prependOnceListener(event: "reset", listener: (context: Context) => void): this; + } + /** + * A flag passed in the REPL options. Evaluates expressions in sloppy mode. + */ + const REPL_MODE_SLOPPY: unique symbol; + /** + * A flag passed in the REPL options. Evaluates expressions in strict mode. + * This is equivalent to prefacing every repl statement with `'use strict'`. + */ + const REPL_MODE_STRICT: unique symbol; + /** + * The `repl.start()` method creates and starts a {@link REPLServer} instance. + * + * If `options` is a string, then it specifies the input prompt: + * + * ```js + * import repl from 'node:repl'; + * + * // a Unix style prompt + * repl.start('$ '); + * ``` + * @since v0.1.91 + */ + function start(options?: string | ReplOptions): REPLServer; + /** + * Indicates a recoverable error that a `REPLServer` can use to support multi-line input. + * + * @see https://nodejs.org/dist/latest-v22.x/docs/api/repl.html#repl_recoverable_errors + */ + class Recoverable extends SyntaxError { + err: Error; + constructor(err: Error); + } +} +declare module "node:repl" { + export * from "repl"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/sea.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/sea.d.ts new file mode 100644 index 0000000..0bedc62 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/sea.d.ts @@ -0,0 +1,153 @@ +/** + * This feature allows the distribution of a Node.js application conveniently to a + * system that does not have Node.js installed. + * + * Node.js supports the creation of [single executable applications](https://github.com/nodejs/single-executable) by allowing + * the injection of a blob prepared by Node.js, which can contain a bundled script, + * into the `node` binary. During start up, the program checks if anything has been + * injected. If the blob is found, it executes the script in the blob. Otherwise + * Node.js operates as it normally does. + * + * The single executable application feature currently only supports running a + * single embedded script using the `CommonJS` module system. + * + * Users can create a single executable application from their bundled script + * with the `node` binary itself and any tool which can inject resources into the + * binary. + * + * Here are the steps for creating a single executable application using one such + * tool, [postject](https://github.com/nodejs/postject): + * + * 1. Create a JavaScript file: + * ```bash + * echo 'console.log(`Hello, ${process.argv[2]}!`);' > hello.js + * ``` + * 2. Create a configuration file building a blob that can be injected into the + * single executable application (see `Generating single executable preparation blobs` for details): + * ```bash + * echo '{ "main": "hello.js", "output": "sea-prep.blob" }' > sea-config.json + * ``` + * 3. Generate the blob to be injected: + * ```bash + * node --experimental-sea-config sea-config.json + * ``` + * 4. Create a copy of the `node` executable and name it according to your needs: + * * On systems other than Windows: + * ```bash + * cp $(command -v node) hello + * ``` + * * On Windows: + * ```text + * node -e "require('fs').copyFileSync(process.execPath, 'hello.exe')" + * ``` + * The `.exe` extension is necessary. + * 5. Remove the signature of the binary (macOS and Windows only): + * * On macOS: + * ```bash + * codesign --remove-signature hello + * ``` + * * On Windows (optional): + * [signtool](https://learn.microsoft.com/en-us/windows/win32/seccrypto/signtool) can be used from the installed [Windows SDK](https://developer.microsoft.com/en-us/windows/downloads/windows-sdk/). + * If this step is + * skipped, ignore any signature-related warning from postject. + * ```powershell + * signtool remove /s hello.exe + * ``` + * 6. Inject the blob into the copied binary by running `postject` with + * the following options: + * * `hello` / `hello.exe` \- The name of the copy of the `node` executable + * created in step 4. + * * `NODE_SEA_BLOB` \- The name of the resource / note / section in the binary + * where the contents of the blob will be stored. + * * `sea-prep.blob` \- The name of the blob created in step 1. + * * `--sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2` \- The [fuse](https://www.electronjs.org/docs/latest/tutorial/fuses) used by the Node.js project to detect if a file has been + * injected. + * * `--macho-segment-name NODE_SEA` (only needed on macOS) - The name of the + * segment in the binary where the contents of the blob will be + * stored. + * To summarize, here is the required command for each platform: + * * On Linux: + * ```bash + * npx postject hello NODE_SEA_BLOB sea-prep.blob \ + * --sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2 + * ``` + * * On Windows - PowerShell: + * ```powershell + * npx postject hello.exe NODE_SEA_BLOB sea-prep.blob ` + * --sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2 + * ``` + * * On Windows - Command Prompt: + * ```text + * npx postject hello.exe NODE_SEA_BLOB sea-prep.blob ^ + * --sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2 + * ``` + * * On macOS: + * ```bash + * npx postject hello NODE_SEA_BLOB sea-prep.blob \ + * --sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2 \ + * --macho-segment-name NODE_SEA + * ``` + * 7. Sign the binary (macOS and Windows only): + * * On macOS: + * ```bash + * codesign --sign - hello + * ``` + * * On Windows (optional): + * A certificate needs to be present for this to work. However, the unsigned + * binary would still be runnable. + * ```powershell + * signtool sign /fd SHA256 hello.exe + * ``` + * 8. Run the binary: + * * On systems other than Windows + * ```console + * $ ./hello world + * Hello, world! + * ``` + * * On Windows + * ```console + * $ .\hello.exe world + * Hello, world! + * ``` + * @since v19.7.0, v18.16.0 + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v22.x/src/node_sea.cc) + */ +declare module "node:sea" { + type AssetKey = string; + /** + * @since v20.12.0 + * @return Whether this script is running inside a single-executable application. + */ + function isSea(): boolean; + /** + * This method can be used to retrieve the assets configured to be bundled into the + * single-executable application at build time. + * An error is thrown when no matching asset can be found. + * @since v20.12.0 + */ + function getAsset(key: AssetKey): ArrayBuffer; + function getAsset(key: AssetKey, encoding: string): string; + /** + * Similar to `sea.getAsset()`, but returns the result in a [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob). + * An error is thrown when no matching asset can be found. + * @since v20.12.0 + */ + function getAssetAsBlob(key: AssetKey, options?: { + type: string; + }): Blob; + /** + * This method can be used to retrieve the assets configured to be bundled into the + * single-executable application at build time. + * An error is thrown when no matching asset can be found. + * + * Unlike `sea.getRawAsset()` or `sea.getAssetAsBlob()`, this method does not + * return a copy. Instead, it returns the raw asset bundled inside the executable. + * + * For now, users should avoid writing to the returned array buffer. If the + * injected section is not marked as writable or not aligned properly, + * writes to the returned array buffer is likely to result in a crash. + * @since v20.12.0 + */ + function getRawAsset(key: AssetKey): string | ArrayBuffer; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/sqlite.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/sqlite.d.ts new file mode 100644 index 0000000..9fa7bab --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/sqlite.d.ts @@ -0,0 +1,213 @@ +/** + * The `node:sqlite` module facilitates working with SQLite databases. + * To access it: + * + * ```js + * import sqlite from 'node:sqlite'; + * ``` + * + * This module is only available under the `node:` scheme. The following will not + * work: + * + * ```js + * import sqlite from 'node:sqlite'; + * ``` + * + * The following example shows the basic usage of the `node:sqlite` module to open + * an in-memory database, write data to the database, and then read the data back. + * + * ```js + * import { DatabaseSync } from 'node:sqlite'; + * const database = new DatabaseSync(':memory:'); + * + * // Execute SQL statements from strings. + * database.exec(` + * CREATE TABLE data( + * key INTEGER PRIMARY KEY, + * value TEXT + * ) STRICT + * `); + * // Create a prepared statement to insert data into the database. + * const insert = database.prepare('INSERT INTO data (key, value) VALUES (?, ?)'); + * // Execute the prepared statement with bound values. + * insert.run(1, 'hello'); + * insert.run(2, 'world'); + * // Create a prepared statement to read data from the database. + * const query = database.prepare('SELECT * FROM data ORDER BY key'); + * // Execute the prepared statement and log the result set. + * console.log(query.all()); + * // Prints: [ { key: 1, value: 'hello' }, { key: 2, value: 'world' } ] + * ``` + * @since v22.5.0 + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/sqlite.js) + */ +declare module "node:sqlite" { + interface DatabaseSyncOptions { + /** + * If `true`, the database is opened by the constructor. + * When this value is `false`, the database must be opened via the `open()` method. + */ + open?: boolean | undefined; + } + /** + * This class represents a single [connection](https://www.sqlite.org/c3ref/sqlite3.html) to a SQLite database. All APIs + * exposed by this class execute synchronously. + * @since v22.5.0 + */ + class DatabaseSync { + /** + * Constructs a new `DatabaseSync` instance. + * @param location The location of the database. + * A SQLite database can be stored in a file or completely [in memory](https://www.sqlite.org/inmemorydb.html). + * To use a file-backed database, the location should be a file path. + * To use an in-memory database, the location should be the special name `':memory:'`. + * @param options Configuration options for the database connection. + */ + constructor(location: string, options?: DatabaseSyncOptions); + /** + * Closes the database connection. An exception is thrown if the database is not + * open. This method is a wrapper around [`sqlite3_close_v2()`](https://www.sqlite.org/c3ref/close.html). + * @since v22.5.0 + */ + close(): void; + /** + * This method allows one or more SQL statements to be executed without returning + * any results. This method is useful when executing SQL statements read from a + * file. This method is a wrapper around [`sqlite3_exec()`](https://www.sqlite.org/c3ref/exec.html). + * @since v22.5.0 + * @param sql A SQL string to execute. + */ + exec(sql: string): void; + /** + * Opens the database specified in the `location` argument of the `DatabaseSync`constructor. This method should only be used when the database is not opened via + * the constructor. An exception is thrown if the database is already open. + * @since v22.5.0 + */ + open(): void; + /** + * Compiles a SQL statement into a [prepared statement](https://www.sqlite.org/c3ref/stmt.html). This method is a wrapper + * around [`sqlite3_prepare_v2()`](https://www.sqlite.org/c3ref/prepare.html). + * @since v22.5.0 + * @param sql A SQL string to compile to a prepared statement. + * @return The prepared statement. + */ + prepare(sql: string): StatementSync; + } + type SupportedValueType = null | number | bigint | string | Uint8Array; + interface StatementResultingChanges { + /** + * The number of rows modified, inserted, or deleted by the most recently completed `INSERT`, `UPDATE`, or `DELETE` statement. + * This field is either a number or a `BigInt` depending on the prepared statement's configuration. + * This property is the result of [`sqlite3_changes64()`](https://www.sqlite.org/c3ref/changes.html). + */ + changes: number | bigint; + /** + * The most recently inserted rowid. + * This field is either a number or a `BigInt` depending on the prepared statement's configuration. + * This property is the result of [`sqlite3_last_insert_rowid()`](https://www.sqlite.org/c3ref/last_insert_rowid.html). + */ + lastInsertRowid: number | bigint; + } + /** + * This class represents a single [prepared statement](https://www.sqlite.org/c3ref/stmt.html). This class cannot be + * instantiated via its constructor. Instead, instances are created via the`database.prepare()` method. All APIs exposed by this class execute + * synchronously. + * + * A prepared statement is an efficient binary representation of the SQL used to + * create it. Prepared statements are parameterizable, and can be invoked multiple + * times with different bound values. Parameters also offer protection against [SQL injection](https://en.wikipedia.org/wiki/SQL_injection) attacks. For these reasons, prepared statements are + * preferred + * over hand-crafted SQL strings when handling user input. + * @since v22.5.0 + */ + class StatementSync { + private constructor(); + /** + * This method executes a prepared statement and returns all results as an array of + * objects. If the prepared statement does not return any results, this method + * returns an empty array. The prepared statement [parameters are bound](https://www.sqlite.org/c3ref/bind_blob.html) using + * the values in `namedParameters` and `anonymousParameters`. + * @since v22.5.0 + * @param namedParameters An optional object used to bind named parameters. The keys of this object are used to configure the mapping. + * @param anonymousParameters Zero or more values to bind to anonymous parameters. + * @return An array of objects. Each object corresponds to a row returned by executing the prepared statement. The keys and values of each object correspond to the column names and values of + * the row. + */ + all(...anonymousParameters: SupportedValueType[]): unknown[]; + all( + namedParameters: Record, + ...anonymousParameters: SupportedValueType[] + ): unknown[]; + /** + * This method returns the source SQL of the prepared statement with parameter + * placeholders replaced by values. This method is a wrapper around [`sqlite3_expanded_sql()`](https://www.sqlite.org/c3ref/expanded_sql.html). + * @since v22.5.0 + * @return The source SQL expanded to include parameter values. + */ + expandedSQL(): string; + /** + * This method executes a prepared statement and returns the first result as an + * object. If the prepared statement does not return any results, this method + * returns `undefined`. The prepared statement [parameters are bound](https://www.sqlite.org/c3ref/bind_blob.html) using the + * values in `namedParameters` and `anonymousParameters`. + * @since v22.5.0 + * @param namedParameters An optional object used to bind named parameters. The keys of this object are used to configure the mapping. + * @param anonymousParameters Zero or more values to bind to anonymous parameters. + * @return An object corresponding to the first row returned by executing the prepared statement. The keys and values of the object correspond to the column names and values of the row. If no + * rows were returned from the database then this method returns `undefined`. + */ + get(...anonymousParameters: SupportedValueType[]): unknown; + get(namedParameters: Record, ...anonymousParameters: SupportedValueType[]): unknown; + /** + * This method executes a prepared statement and returns an object summarizing the + * resulting changes. The prepared statement [parameters are bound](https://www.sqlite.org/c3ref/bind_blob.html) using the + * values in `namedParameters` and `anonymousParameters`. + * @since v22.5.0 + * @param namedParameters An optional object used to bind named parameters. The keys of this object are used to configure the mapping. + * @param anonymousParameters Zero or more values to bind to anonymous parameters. + */ + run(...anonymousParameters: SupportedValueType[]): StatementResultingChanges; + run( + namedParameters: Record, + ...anonymousParameters: SupportedValueType[] + ): StatementResultingChanges; + /** + * The names of SQLite parameters begin with a prefix character. By default,`node:sqlite` requires that this prefix character is present when binding + * parameters. However, with the exception of dollar sign character, these + * prefix characters also require extra quoting when used in object keys. + * + * To improve ergonomics, this method can be used to also allow bare named + * parameters, which do not require the prefix character in JavaScript code. There + * are several caveats to be aware of when enabling bare named parameters: + * + * * The prefix character is still required in SQL. + * * The prefix character is still allowed in JavaScript. In fact, prefixed names + * will have slightly better binding performance. + * * Using ambiguous named parameters, such as `$k` and `@k`, in the same prepared + * statement will result in an exception as it cannot be determined how to bind + * a bare name. + * @since v22.5.0 + * @param enabled Enables or disables support for binding named parameters without the prefix character. + */ + setAllowBareNamedParameters(enabled: boolean): void; + /** + * When reading from the database, SQLite `INTEGER`s are mapped to JavaScript + * numbers by default. However, SQLite `INTEGER`s can store values larger than + * JavaScript numbers are capable of representing. In such cases, this method can + * be used to read `INTEGER` data using JavaScript `BigInt`s. This method has no + * impact on database write operations where numbers and `BigInt`s are both + * supported at all times. + * @since v22.5.0 + * @param enabled Enables or disables the use of `BigInt`s when reading `INTEGER` fields from the database. + */ + setReadBigInts(enabled: boolean): void; + /** + * This method returns the source SQL of the prepared statement. This method is a + * wrapper around [`sqlite3_sql()`](https://www.sqlite.org/c3ref/expanded_sql.html). + * @since v22.5.0 + * @return The source SQL used to create this prepared statement. + */ + sourceSQL(): string; + } +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/stream.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/stream.d.ts new file mode 100644 index 0000000..916bfcb --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/stream.d.ts @@ -0,0 +1,1726 @@ +/** + * A stream is an abstract interface for working with streaming data in Node.js. + * The `node:stream` module provides an API for implementing the stream interface. + * + * There are many stream objects provided by Node.js. For instance, a [request to an HTTP server](https://nodejs.org/docs/latest-v22.x/api/http.html#class-httpincomingmessage) + * and [`process.stdout`](https://nodejs.org/docs/latest-v22.x/api/process.html#processstdout) are both stream instances. + * + * Streams can be readable, writable, or both. All streams are instances of [`EventEmitter`](https://nodejs.org/docs/latest-v22.x/api/events.html#class-eventemitter). + * + * To access the `node:stream` module: + * + * ```js + * import stream from 'node:stream'; + * ``` + * + * The `node:stream` module is useful for creating new types of stream instances. + * It is usually not necessary to use the `node:stream` module to consume streams. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/stream.js) + */ +declare module "stream" { + import { Abortable, EventEmitter } from "node:events"; + import { Blob as NodeBlob } from "node:buffer"; + import * as streamPromises from "node:stream/promises"; + import * as streamConsumers from "node:stream/consumers"; + import * as streamWeb from "node:stream/web"; + + type ComposeFnParam = (source: any) => void; + + class internal extends EventEmitter { + pipe( + destination: T, + options?: { + end?: boolean | undefined; + }, + ): T; + compose( + stream: T | ComposeFnParam | Iterable | AsyncIterable, + options?: { signal: AbortSignal }, + ): T; + } + import Stream = internal.Stream; + import Readable = internal.Readable; + import ReadableOptions = internal.ReadableOptions; + interface ArrayOptions { + /** + * The maximum concurrent invocations of `fn` to call on the stream at once. + * @default 1 + */ + concurrency?: number; + /** Allows destroying the stream if the signal is aborted. */ + signal?: AbortSignal; + } + class ReadableBase extends Stream implements NodeJS.ReadableStream { + /** + * A utility method for creating Readable Streams out of iterators. + * @since v12.3.0, v10.17.0 + * @param iterable Object implementing the `Symbol.asyncIterator` or `Symbol.iterator` iterable protocol. Emits an 'error' event if a null value is passed. + * @param options Options provided to `new stream.Readable([options])`. By default, `Readable.from()` will set `options.objectMode` to `true`, unless this is explicitly opted out by setting `options.objectMode` to `false`. + */ + static from(iterable: Iterable | AsyncIterable, options?: ReadableOptions): Readable; + /** + * Returns whether the stream has been read from or cancelled. + * @since v16.8.0 + */ + static isDisturbed(stream: Readable | NodeJS.ReadableStream): boolean; + /** + * Returns whether the stream was destroyed or errored before emitting `'end'`. + * @since v16.8.0 + * @experimental + */ + readonly readableAborted: boolean; + /** + * Is `true` if it is safe to call {@link read}, which means + * the stream has not been destroyed or emitted `'error'` or `'end'`. + * @since v11.4.0 + */ + readable: boolean; + /** + * Returns whether `'data'` has been emitted. + * @since v16.7.0, v14.18.0 + * @experimental + */ + readonly readableDidRead: boolean; + /** + * Getter for the property `encoding` of a given `Readable` stream. The `encoding` property can be set using the {@link setEncoding} method. + * @since v12.7.0 + */ + readonly readableEncoding: BufferEncoding | null; + /** + * Becomes `true` when [`'end'`](https://nodejs.org/docs/latest-v22.x/api/stream.html#event-end) event is emitted. + * @since v12.9.0 + */ + readonly readableEnded: boolean; + /** + * This property reflects the current state of a `Readable` stream as described + * in the [Three states](https://nodejs.org/docs/latest-v22.x/api/stream.html#three-states) section. + * @since v9.4.0 + */ + readonly readableFlowing: boolean | null; + /** + * Returns the value of `highWaterMark` passed when creating this `Readable`. + * @since v9.3.0 + */ + readonly readableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be read. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly readableLength: number; + /** + * Getter for the property `objectMode` of a given `Readable` stream. + * @since v12.3.0 + */ + readonly readableObjectMode: boolean; + /** + * Is `true` after `readable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is `true` after `'close'` has been emitted. + * @since v18.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + constructor(opts?: ReadableOptions); + _construct?(callback: (error?: Error | null) => void): void; + _read(size: number): void; + /** + * The `readable.read()` method reads data out of the internal buffer and + * returns it. If no data is available to be read, `null` is returned. By default, + * the data is returned as a `Buffer` object unless an encoding has been + * specified using the `readable.setEncoding()` method or the stream is operating + * in object mode. + * + * The optional `size` argument specifies a specific number of bytes to read. If + * `size` bytes are not available to be read, `null` will be returned _unless_ the + * stream has ended, in which case all of the data remaining in the internal buffer + * will be returned. + * + * If the `size` argument is not specified, all of the data contained in the + * internal buffer will be returned. + * + * The `size` argument must be less than or equal to 1 GiB. + * + * The `readable.read()` method should only be called on `Readable` streams + * operating in paused mode. In flowing mode, `readable.read()` is called + * automatically until the internal buffer is fully drained. + * + * ```js + * const readable = getReadableStreamSomehow(); + * + * // 'readable' may be triggered multiple times as data is buffered in + * readable.on('readable', () => { + * let chunk; + * console.log('Stream is readable (new data received in buffer)'); + * // Use a loop to make sure we read all currently available data + * while (null !== (chunk = readable.read())) { + * console.log(`Read ${chunk.length} bytes of data...`); + * } + * }); + * + * // 'end' will be triggered once when there is no more data available + * readable.on('end', () => { + * console.log('Reached end of stream.'); + * }); + * ``` + * + * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks + * are not concatenated. A `while` loop is necessary to consume all data + * currently in the buffer. When reading a large file `.read()` may return `null`, + * having consumed all buffered content so far, but there is still more data to + * come not yet buffered. In this case a new `'readable'` event will be emitted + * when there is more data in the buffer. Finally the `'end'` event will be + * emitted when there is no more data to come. + * + * Therefore to read a file's whole contents from a `readable`, it is necessary + * to collect chunks across multiple `'readable'` events: + * + * ```js + * const chunks = []; + * + * readable.on('readable', () => { + * let chunk; + * while (null !== (chunk = readable.read())) { + * chunks.push(chunk); + * } + * }); + * + * readable.on('end', () => { + * const content = chunks.join(''); + * }); + * ``` + * + * A `Readable` stream in object mode will always return a single item from + * a call to `readable.read(size)`, regardless of the value of the `size` argument. + * + * If the `readable.read()` method returns a chunk of data, a `'data'` event will + * also be emitted. + * + * Calling {@link read} after the `'end'` event has + * been emitted will return `null`. No runtime error will be raised. + * @since v0.9.4 + * @param size Optional argument to specify how much data to read. + */ + read(size?: number): any; + /** + * The `readable.setEncoding()` method sets the character encoding for + * data read from the `Readable` stream. + * + * By default, no encoding is assigned and stream data will be returned as `Buffer` objects. Setting an encoding causes the stream data + * to be returned as strings of the specified encoding rather than as `Buffer` objects. For instance, calling `readable.setEncoding('utf8')` will cause the + * output data to be interpreted as UTF-8 data, and passed as strings. Calling `readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal + * string format. + * + * The `Readable` stream will properly handle multi-byte characters delivered + * through the stream that would otherwise become improperly decoded if simply + * pulled from the stream as `Buffer` objects. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.setEncoding('utf8'); + * readable.on('data', (chunk) => { + * assert.equal(typeof chunk, 'string'); + * console.log('Got %d characters of string data:', chunk.length); + * }); + * ``` + * @since v0.9.4 + * @param encoding The encoding to use. + */ + setEncoding(encoding: BufferEncoding): this; + /** + * The `readable.pause()` method will cause a stream in flowing mode to stop + * emitting `'data'` events, switching out of flowing mode. Any data that + * becomes available will remain in the internal buffer. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.on('data', (chunk) => { + * console.log(`Received ${chunk.length} bytes of data.`); + * readable.pause(); + * console.log('There will be no additional data for 1 second.'); + * setTimeout(() => { + * console.log('Now data will start flowing again.'); + * readable.resume(); + * }, 1000); + * }); + * ``` + * + * The `readable.pause()` method has no effect if there is a `'readable'` event listener. + * @since v0.9.4 + */ + pause(): this; + /** + * The `readable.resume()` method causes an explicitly paused `Readable` stream to + * resume emitting `'data'` events, switching the stream into flowing mode. + * + * The `readable.resume()` method can be used to fully consume the data from a + * stream without actually processing any of that data: + * + * ```js + * getReadableStreamSomehow() + * .resume() + * .on('end', () => { + * console.log('Reached the end, but did not read anything.'); + * }); + * ``` + * + * The `readable.resume()` method has no effect if there is a `'readable'` event listener. + * @since v0.9.4 + */ + resume(): this; + /** + * The `readable.isPaused()` method returns the current operating state of the `Readable`. + * This is used primarily by the mechanism that underlies the `readable.pipe()` method. + * In most typical cases, there will be no reason to use this method directly. + * + * ```js + * const readable = new stream.Readable(); + * + * readable.isPaused(); // === false + * readable.pause(); + * readable.isPaused(); // === true + * readable.resume(); + * readable.isPaused(); // === false + * ``` + * @since v0.11.14 + */ + isPaused(): boolean; + /** + * The `readable.unpipe()` method detaches a `Writable` stream previously attached + * using the {@link pipe} method. + * + * If the `destination` is not specified, then _all_ pipes are detached. + * + * If the `destination` is specified, but no pipe is set up for it, then + * the method does nothing. + * + * ```js + * import fs from 'node:fs'; + * const readable = getReadableStreamSomehow(); + * const writable = fs.createWriteStream('file.txt'); + * // All the data from readable goes into 'file.txt', + * // but only for the first second. + * readable.pipe(writable); + * setTimeout(() => { + * console.log('Stop writing to file.txt.'); + * readable.unpipe(writable); + * console.log('Manually close the file stream.'); + * writable.end(); + * }, 1000); + * ``` + * @since v0.9.4 + * @param destination Optional specific stream to unpipe + */ + unpipe(destination?: NodeJS.WritableStream): this; + /** + * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the + * same as `readable.push(null)`, after which no more data can be written. The EOF + * signal is put at the end of the buffer and any buffered data will still be + * flushed. + * + * The `readable.unshift()` method pushes a chunk of data back into the internal + * buffer. This is useful in certain situations where a stream is being consumed by + * code that needs to "un-consume" some amount of data that it has optimistically + * pulled out of the source, so that the data can be passed on to some other party. + * + * The `stream.unshift(chunk)` method cannot be called after the `'end'` event + * has been emitted or a runtime error will be thrown. + * + * Developers using `stream.unshift()` often should consider switching to + * use of a `Transform` stream instead. See the `API for stream implementers` section for more information. + * + * ```js + * // Pull off a header delimited by \n\n. + * // Use unshift() if we get too much. + * // Call the callback with (error, header, stream). + * import { StringDecoder } from 'node:string_decoder'; + * function parseHeader(stream, callback) { + * stream.on('error', callback); + * stream.on('readable', onReadable); + * const decoder = new StringDecoder('utf8'); + * let header = ''; + * function onReadable() { + * let chunk; + * while (null !== (chunk = stream.read())) { + * const str = decoder.write(chunk); + * if (str.includes('\n\n')) { + * // Found the header boundary. + * const split = str.split(/\n\n/); + * header += split.shift(); + * const remaining = split.join('\n\n'); + * const buf = Buffer.from(remaining, 'utf8'); + * stream.removeListener('error', callback); + * // Remove the 'readable' listener before unshifting. + * stream.removeListener('readable', onReadable); + * if (buf.length) + * stream.unshift(buf); + * // Now the body of the message can be read from the stream. + * callback(null, header, stream); + * return; + * } + * // Still reading the header. + * header += str; + * } + * } + * } + * ``` + * + * Unlike {@link push}, `stream.unshift(chunk)` will not + * end the reading process by resetting the internal reading state of the stream. + * This can cause unexpected results if `readable.unshift()` is called during a + * read (i.e. from within a {@link _read} implementation on a + * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately, + * however it is best to simply avoid calling `readable.unshift()` while in the + * process of performing a read. + * @since v0.9.11 + * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must + * be a {string}, {Buffer}, {TypedArray}, {DataView} or `null`. For object mode streams, `chunk` may be any JavaScript value. + * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`. + */ + unshift(chunk: any, encoding?: BufferEncoding): void; + /** + * Prior to Node.js 0.10, streams did not implement the entire `node:stream` module API as it is currently defined. (See `Compatibility` for more + * information.) + * + * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the `readable.wrap()` method can be used to create a `Readable` + * stream that uses + * the old stream as its data source. + * + * It will rarely be necessary to use `readable.wrap()` but the method has been + * provided as a convenience for interacting with older Node.js applications and + * libraries. + * + * ```js + * import { OldReader } from './old-api-module.js'; + * import { Readable } from 'node:stream'; + * const oreader = new OldReader(); + * const myReader = new Readable().wrap(oreader); + * + * myReader.on('readable', () => { + * myReader.read(); // etc. + * }); + * ``` + * @since v0.9.4 + * @param stream An "old style" readable stream + */ + wrap(stream: NodeJS.ReadableStream): this; + push(chunk: any, encoding?: BufferEncoding): boolean; + /** + * The iterator created by this method gives users the option to cancel the destruction + * of the stream if the `for await...of` loop is exited by `return`, `break`, or `throw`, + * or if the iterator should destroy the stream if the stream emitted an error during iteration. + * @since v16.3.0 + * @param options.destroyOnReturn When set to `false`, calling `return` on the async iterator, + * or exiting a `for await...of` iteration using a `break`, `return`, or `throw` will not destroy the stream. + * **Default: `true`**. + */ + iterator(options?: { destroyOnReturn?: boolean }): NodeJS.AsyncIterator; + /** + * This method allows mapping over the stream. The *fn* function will be called for every chunk in the stream. + * If the *fn* function returns a promise - that promise will be `await`ed before being passed to the result stream. + * @since v17.4.0, v16.14.0 + * @param fn a function to map over every chunk in the stream. Async or not. + * @returns a stream mapped with the function *fn*. + */ + map(fn: (data: any, options?: Pick) => any, options?: ArrayOptions): Readable; + /** + * This method allows filtering the stream. For each chunk in the stream the *fn* function will be called + * and if it returns a truthy value, the chunk will be passed to the result stream. + * If the *fn* function returns a promise - that promise will be `await`ed. + * @since v17.4.0, v16.14.0 + * @param fn a function to filter chunks from the stream. Async or not. + * @returns a stream filtered with the predicate *fn*. + */ + filter( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Readable; + /** + * This method allows iterating a stream. For each chunk in the stream the *fn* function will be called. + * If the *fn* function returns a promise - that promise will be `await`ed. + * + * This method is different from `for await...of` loops in that it can optionally process chunks concurrently. + * In addition, a `forEach` iteration can only be stopped by having passed a `signal` option + * and aborting the related AbortController while `for await...of` can be stopped with `break` or `return`. + * In either case the stream will be destroyed. + * + * This method is different from listening to the `'data'` event in that it uses the `readable` event + * in the underlying machinary and can limit the number of concurrent *fn* calls. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise for when the stream has finished. + */ + forEach( + fn: (data: any, options?: Pick) => void | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method allows easily obtaining the contents of a stream. + * + * As this method reads the entire stream into memory, it negates the benefits of streams. It's intended + * for interoperability and convenience, not as the primary way to consume streams. + * @since v17.5.0 + * @returns a promise containing an array with the contents of the stream. + */ + toArray(options?: Pick): Promise; + /** + * This method is similar to `Array.prototype.some` and calls *fn* on each chunk in the stream + * until the awaited return value is `true` (or any truthy value). Once an *fn* call on a chunk + * `await`ed return value is truthy, the stream is destroyed and the promise is fulfilled with `true`. + * If none of the *fn* calls on the chunks return a truthy value, the promise is fulfilled with `false`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to `true` if *fn* returned a truthy value for at least one of the chunks. + */ + some( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method is similar to `Array.prototype.find` and calls *fn* on each chunk in the stream + * to find a chunk with a truthy value for *fn*. Once an *fn* call's awaited return value is truthy, + * the stream is destroyed and the promise is fulfilled with value for which *fn* returned a truthy value. + * If all of the *fn* calls on the chunks return a falsy value, the promise is fulfilled with `undefined`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to the first chunk for which *fn* evaluated with a truthy value, + * or `undefined` if no element was found. + */ + find( + fn: (data: any, options?: Pick) => data is T, + options?: ArrayOptions, + ): Promise; + find( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method is similar to `Array.prototype.every` and calls *fn* on each chunk in the stream + * to check if all awaited return values are truthy value for *fn*. Once an *fn* call on a chunk + * `await`ed return value is falsy, the stream is destroyed and the promise is fulfilled with `false`. + * If all of the *fn* calls on the chunks return a truthy value, the promise is fulfilled with `true`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to `true` if *fn* returned a truthy value for every one of the chunks. + */ + every( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method returns a new stream by applying the given callback to each chunk of the stream + * and then flattening the result. + * + * It is possible to return a stream or another iterable or async iterable from *fn* and the result streams + * will be merged (flattened) into the returned stream. + * @since v17.5.0 + * @param fn a function to map over every chunk in the stream. May be async. May be a stream or generator. + * @returns a stream flat-mapped with the function *fn*. + */ + flatMap(fn: (data: any, options?: Pick) => any, options?: ArrayOptions): Readable; + /** + * This method returns a new stream with the first *limit* chunks dropped from the start. + * @since v17.5.0 + * @param limit the number of chunks to drop from the readable. + * @returns a stream with *limit* chunks dropped from the start. + */ + drop(limit: number, options?: Pick): Readable; + /** + * This method returns a new stream with the first *limit* chunks. + * @since v17.5.0 + * @param limit the number of chunks to take from the readable. + * @returns a stream with *limit* chunks taken. + */ + take(limit: number, options?: Pick): Readable; + /** + * This method returns a new stream with chunks of the underlying stream paired with a counter + * in the form `[index, chunk]`. The first index value is `0` and it increases by 1 for each chunk produced. + * @since v17.5.0 + * @returns a stream of indexed pairs. + */ + asIndexedPairs(options?: Pick): Readable; + /** + * This method calls *fn* on each chunk of the stream in order, passing it the result from the calculation + * on the previous element. It returns a promise for the final value of the reduction. + * + * If no *initial* value is supplied the first chunk of the stream is used as the initial value. + * If the stream is empty, the promise is rejected with a `TypeError` with the `ERR_INVALID_ARGS` code property. + * + * The reducer function iterates the stream element-by-element which means that there is no *concurrency* parameter + * or parallelism. To perform a reduce concurrently, you can extract the async function to `readable.map` method. + * @since v17.5.0 + * @param fn a reducer function to call over every chunk in the stream. Async or not. + * @param initial the initial value to use in the reduction. + * @returns a promise for the final value of the reduction. + */ + reduce( + fn: (previous: any, data: any, options?: Pick) => T, + initial?: undefined, + options?: Pick, + ): Promise; + reduce( + fn: (previous: T, data: any, options?: Pick) => T, + initial: T, + options?: Pick, + ): Promise; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'` event (unless `emitClose` is set to `false`). After this call, the readable + * stream will release any internal resources and subsequent calls to `push()` will be ignored. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, but instead implement `readable._destroy()`. + * @since v8.0.0 + * @param error Error which will be passed as payload in `'error'` event + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. end + * 4. error + * 5. pause + * 6. readable + * 7. resume + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: any) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "data", chunk: any): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "pause"): boolean; + emit(event: "readable"): boolean; + emit(event: "resume"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: any) => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: any) => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: any) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: any) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: any) => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "pause", listener: () => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "resume", listener: () => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + [Symbol.asyncIterator](): NodeJS.AsyncIterator; + /** + * Calls `readable.destroy()` with an `AbortError` and returns a promise that fulfills when the stream is finished. + * @since v20.4.0 + */ + [Symbol.asyncDispose](): Promise; + } + import WritableOptions = internal.WritableOptions; + class WritableBase extends Stream implements NodeJS.WritableStream { + /** + * Is `true` if it is safe to call `writable.write()`, which means + * the stream has not been destroyed, errored, or ended. + * @since v11.4.0 + */ + readonly writable: boolean; + /** + * Is `true` after `writable.end()` has been called. This property + * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead. + * @since v12.9.0 + */ + readonly writableEnded: boolean; + /** + * Is set to `true` immediately before the `'finish'` event is emitted. + * @since v12.6.0 + */ + readonly writableFinished: boolean; + /** + * Return the value of `highWaterMark` passed when creating this `Writable`. + * @since v9.3.0 + */ + readonly writableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be written. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly writableLength: number; + /** + * Getter for the property `objectMode` of a given `Writable` stream. + * @since v12.3.0 + */ + readonly writableObjectMode: boolean; + /** + * Number of times `writable.uncork()` needs to be + * called in order to fully uncork the stream. + * @since v13.2.0, v12.16.0 + */ + readonly writableCorked: number; + /** + * Is `true` after `writable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is `true` after `'close'` has been emitted. + * @since v18.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + /** + * Is `true` if the stream's buffer has been full and stream will emit `'drain'`. + * @since v15.2.0, v14.17.0 + */ + readonly writableNeedDrain: boolean; + constructor(opts?: WritableOptions); + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + _construct?(callback: (error?: Error | null) => void): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + /** + * The `writable.write()` method writes some data to the stream, and calls the + * supplied `callback` once the data has been fully handled. If an error + * occurs, the `callback` will be called with the error as its + * first argument. The `callback` is called asynchronously and before `'error'` is + * emitted. + * + * The return value is `true` if the internal buffer is less than the `highWaterMark` configured when the stream was created after admitting `chunk`. + * If `false` is returned, further attempts to write data to the stream should + * stop until the `'drain'` event is emitted. + * + * While a stream is not draining, calls to `write()` will buffer `chunk`, and + * return false. Once all currently buffered chunks are drained (accepted for + * delivery by the operating system), the `'drain'` event will be emitted. + * Once `write()` returns false, do not write more chunks + * until the `'drain'` event is emitted. While calling `write()` on a stream that + * is not draining is allowed, Node.js will buffer all written chunks until + * maximum memory usage occurs, at which point it will abort unconditionally. + * Even before it aborts, high memory usage will cause poor garbage collector + * performance and high RSS (which is not typically released back to the system, + * even after the memory is no longer required). Since TCP sockets may never + * drain if the remote peer does not read the data, writing a socket that is + * not draining may lead to a remotely exploitable vulnerability. + * + * Writing data while the stream is not draining is particularly + * problematic for a `Transform`, because the `Transform` streams are paused + * by default until they are piped or a `'data'` or `'readable'` event handler + * is added. + * + * If the data to be written can be generated or fetched on demand, it is + * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is + * possible to respect backpressure and avoid memory issues using the `'drain'` event: + * + * ```js + * function write(data, cb) { + * if (!stream.write(data)) { + * stream.once('drain', cb); + * } else { + * process.nextTick(cb); + * } + * } + * + * // Wait for cb to be called before doing any other write. + * write('hello', () => { + * console.log('Write completed, do more writes now.'); + * }); + * ``` + * + * A `Writable` stream in object mode will always ignore the `encoding` argument. + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a {string}, {Buffer}, + * {TypedArray} or {DataView}. For object mode streams, `chunk` may be any JavaScript value other than `null`. + * @param [encoding='utf8'] The encoding, if `chunk` is a string. + * @param callback Callback for when this chunk of data is flushed. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean; + /** + * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream. + * @since v0.11.15 + * @param encoding The new default encoding + */ + setDefaultEncoding(encoding: BufferEncoding): this; + /** + * Calling the `writable.end()` method signals that no more data will be written + * to the `Writable`. The optional `chunk` and `encoding` arguments allow one + * final additional chunk of data to be written immediately before closing the + * stream. + * + * Calling the {@link write} method after calling {@link end} will raise an error. + * + * ```js + * // Write 'hello, ' and then end with 'world!'. + * import fs from 'node:fs'; + * const file = fs.createWriteStream('example.txt'); + * file.write('hello, '); + * file.end('world!'); + * // Writing more now is not allowed! + * ``` + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a {string}, {Buffer}, + * {TypedArray} or {DataView}. For object mode streams, `chunk` may be any JavaScript value other than `null`. + * @param encoding The encoding if `chunk` is a string + * @param callback Callback for when the stream is finished. + */ + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding: BufferEncoding, cb?: () => void): this; + /** + * The `writable.cork()` method forces all written data to be buffered in memory. + * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called. + * + * The primary intent of `writable.cork()` is to accommodate a situation in which + * several small chunks are written to the stream in rapid succession. Instead of + * immediately forwarding them to the underlying destination, `writable.cork()` buffers all the chunks until `writable.uncork()` is called, which will pass them + * all to `writable._writev()`, if present. This prevents a head-of-line blocking + * situation where data is being buffered while waiting for the first small chunk + * to be processed. However, use of `writable.cork()` without implementing `writable._writev()` may have an adverse effect on throughput. + * + * See also: `writable.uncork()`, `writable._writev()`. + * @since v0.11.2 + */ + cork(): void; + /** + * The `writable.uncork()` method flushes all data buffered since {@link cork} was called. + * + * When using `writable.cork()` and `writable.uncork()` to manage the buffering + * of writes to a stream, defer calls to `writable.uncork()` using `process.nextTick()`. Doing so allows batching of all `writable.write()` calls that occur within a given Node.js event + * loop phase. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.write('data '); + * process.nextTick(() => stream.uncork()); + * ``` + * + * If the `writable.cork()` method is called multiple times on a stream, the + * same number of calls to `writable.uncork()` must be called to flush the buffered + * data. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.cork(); + * stream.write('data '); + * process.nextTick(() => { + * stream.uncork(); + * // The data will not be flushed until uncork() is called a second time. + * stream.uncork(); + * }); + * ``` + * + * See also: `writable.cork()`. + * @since v0.11.2 + */ + uncork(): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'` event (unless `emitClose` is set to `false`). After this call, the writable + * stream has ended and subsequent calls to `write()` or `end()` will result in + * an `ERR_STREAM_DESTROYED` error. + * This is a destructive and immediate way to destroy a stream. Previous calls to `write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error. + * Use `end()` instead of destroy if data should flush before close, or wait for + * the `'drain'` event before destroying the stream. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, + * but instead implement `writable._destroy()`. + * @since v8.0.0 + * @param error Optional, an error to emit with `'error'` event. + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. drain + * 3. error + * 4. finish + * 5. pipe + * 6. unpipe + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: Readable) => void): this; + addListener(event: "unpipe", listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "drain"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pipe", src: Readable): boolean; + emit(event: "unpipe", src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: Readable) => void): this; + on(event: "unpipe", listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: Readable) => void): this; + once(event: "unpipe", listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: Readable) => void): this; + prependListener(event: "unpipe", listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "pipe", listener: (src: Readable) => void): this; + removeListener(event: "unpipe", listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + namespace internal { + class Stream extends internal { + constructor(opts?: ReadableOptions); + } + interface StreamOptions extends Abortable { + emitClose?: boolean | undefined; + highWaterMark?: number | undefined; + objectMode?: boolean | undefined; + construct?(this: T, callback: (error?: Error | null) => void): void; + destroy?(this: T, error: Error | null, callback: (error?: Error | null) => void): void; + autoDestroy?: boolean | undefined; + } + interface ReadableOptions extends StreamOptions { + encoding?: BufferEncoding | undefined; + read?(this: Readable, size: number): void; + } + /** + * @since v0.9.4 + */ + class Readable extends ReadableBase { + /** + * A utility method for creating a `Readable` from a web `ReadableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + readableStream: streamWeb.ReadableStream, + options?: Pick, + ): Readable; + /** + * A utility method for creating a web `ReadableStream` from a `Readable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamReadable: Readable): streamWeb.ReadableStream; + } + interface WritableOptions extends StreamOptions { + decodeStrings?: boolean | undefined; + defaultEncoding?: BufferEncoding | undefined; + write?( + this: Writable, + chunk: any, + encoding: BufferEncoding, + callback: (error?: Error | null) => void, + ): void; + writev?( + this: Writable, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Writable, callback: (error?: Error | null) => void): void; + } + /** + * @since v0.9.4 + */ + class Writable extends WritableBase { + /** + * A utility method for creating a `Writable` from a web `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + writableStream: streamWeb.WritableStream, + options?: Pick, + ): Writable; + /** + * A utility method for creating a web `WritableStream` from a `Writable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamWritable: Writable): streamWeb.WritableStream; + } + interface DuplexOptions extends ReadableOptions, WritableOptions { + allowHalfOpen?: boolean | undefined; + readableObjectMode?: boolean | undefined; + writableObjectMode?: boolean | undefined; + readableHighWaterMark?: number | undefined; + writableHighWaterMark?: number | undefined; + writableCorked?: number | undefined; + construct?(this: Duplex, callback: (error?: Error | null) => void): void; + read?(this: Duplex, size: number): void; + write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Duplex, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Duplex, callback: (error?: Error | null) => void): void; + destroy?(this: Duplex, error: Error | null, callback: (error?: Error | null) => void): void; + } + /** + * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Duplex` streams include: + * + * * `TCP sockets` + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Duplex extends ReadableBase implements WritableBase { + readonly writable: boolean; + readonly writableEnded: boolean; + readonly writableFinished: boolean; + readonly writableHighWaterMark: number; + readonly writableLength: number; + readonly writableObjectMode: boolean; + readonly writableCorked: number; + readonly writableNeedDrain: boolean; + readonly closed: boolean; + readonly errored: Error | null; + /** + * If `false` then the stream will automatically end the writable side when the + * readable side ends. Set initially by the `allowHalfOpen` constructor option, + * which defaults to `true`. + * + * This can be changed manually to change the half-open behavior of an existing + * `Duplex` stream instance, but must be changed before the `'end'` event is emitted. + * @since v0.9.4 + */ + allowHalfOpen: boolean; + constructor(opts?: DuplexOptions); + /** + * A utility method for creating duplex streams. + * + * - `Stream` converts writable stream into writable `Duplex` and readable stream + * to `Duplex`. + * - `Blob` converts into readable `Duplex`. + * - `string` converts into readable `Duplex`. + * - `ArrayBuffer` converts into readable `Duplex`. + * - `AsyncIterable` converts into a readable `Duplex`. Cannot yield `null`. + * - `AsyncGeneratorFunction` converts into a readable/writable transform + * `Duplex`. Must take a source `AsyncIterable` as first parameter. Cannot yield + * `null`. + * - `AsyncFunction` converts into a writable `Duplex`. Must return + * either `null` or `undefined` + * - `Object ({ writable, readable })` converts `readable` and + * `writable` into `Stream` and then combines them into `Duplex` where the + * `Duplex` will write to the `writable` and read from the `readable`. + * - `Promise` converts into readable `Duplex`. Value `null` is ignored. + * + * @since v16.8.0 + */ + static from( + src: + | Stream + | NodeBlob + | ArrayBuffer + | string + | Iterable + | AsyncIterable + | AsyncGeneratorFunction + | Promise + | Object, + ): Duplex; + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean; + setDefaultEncoding(encoding: BufferEncoding): this; + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this; + cork(): void; + uncork(): void; + /** + * A utility method for creating a web `ReadableStream` and `WritableStream` from a `Duplex`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamDuplex: Duplex): { + readable: streamWeb.ReadableStream; + writable: streamWeb.WritableStream; + }; + /** + * A utility method for creating a `Duplex` from a web `ReadableStream` and `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + duplexStream: { + readable: streamWeb.ReadableStream; + writable: streamWeb.WritableStream; + }, + options?: Pick< + DuplexOptions, + "allowHalfOpen" | "decodeStrings" | "encoding" | "highWaterMark" | "objectMode" | "signal" + >, + ): Duplex; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. drain + * 4. end + * 5. error + * 6. finish + * 7. pause + * 8. pipe + * 9. readable + * 10. resume + * 11. unpipe + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: any) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "pipe", listener: (src: Readable) => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "unpipe", listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "data", chunk: any): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pause"): boolean; + emit(event: "pipe", src: Readable): boolean; + emit(event: "readable"): boolean; + emit(event: "resume"): boolean; + emit(event: "unpipe", src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: any) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pause", listener: () => void): this; + on(event: "pipe", listener: (src: Readable) => void): this; + on(event: "readable", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "unpipe", listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: any) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pause", listener: () => void): this; + once(event: "pipe", listener: (src: Readable) => void): this; + once(event: "readable", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "unpipe", listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: any) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "pipe", listener: (src: Readable) => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "unpipe", listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: any) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: any) => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "pause", listener: () => void): this; + removeListener(event: "pipe", listener: (src: Readable) => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "resume", listener: () => void): this; + removeListener(event: "unpipe", listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * The utility function `duplexPair` returns an Array with two items, + * each being a `Duplex` stream connected to the other side: + * + * ```js + * const [ sideA, sideB ] = duplexPair(); + * ``` + * + * Whatever is written to one stream is made readable on the other. It provides + * behavior analogous to a network connection, where the data written by the client + * becomes readable by the server, and vice-versa. + * + * The Duplex streams are symmetrical; one or the other may be used without any + * difference in behavior. + * @param options A value to pass to both {@link Duplex} constructors, + * to set options such as buffering. + * @since v22.6.0 + */ + function duplexPair(options?: DuplexOptions): [Duplex, Duplex]; + type TransformCallback = (error?: Error | null, data?: any) => void; + interface TransformOptions extends DuplexOptions { + construct?(this: Transform, callback: (error?: Error | null) => void): void; + read?(this: Transform, size: number): void; + write?( + this: Transform, + chunk: any, + encoding: BufferEncoding, + callback: (error?: Error | null) => void, + ): void; + writev?( + this: Transform, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Transform, callback: (error?: Error | null) => void): void; + destroy?(this: Transform, error: Error | null, callback: (error?: Error | null) => void): void; + transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + flush?(this: Transform, callback: TransformCallback): void; + } + /** + * Transform streams are `Duplex` streams where the output is in some way + * related to the input. Like all `Duplex` streams, `Transform` streams + * implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Transform` streams include: + * + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Transform extends Duplex { + constructor(opts?: TransformOptions); + _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + _flush(callback: TransformCallback): void; + } + /** + * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is + * primarily for examples and testing, but there are some use cases where `stream.PassThrough` is useful as a building block for novel sorts of streams. + */ + class PassThrough extends Transform {} + /** + * A stream to attach a signal to. + * + * Attaches an AbortSignal to a readable or writeable stream. This lets code + * control stream destruction using an `AbortController`. + * + * Calling `abort` on the `AbortController` corresponding to the passed `AbortSignal` will behave the same way as calling `.destroy(new AbortError())` on the + * stream, and `controller.error(new AbortError())` for webstreams. + * + * ```js + * import fs from 'node:fs'; + * + * const controller = new AbortController(); + * const read = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')), + * ); + * // Later, abort the operation closing the stream + * controller.abort(); + * ``` + * + * Or using an `AbortSignal` with a readable stream as an async iterable: + * + * ```js + * const controller = new AbortController(); + * setTimeout(() => controller.abort(), 10_000); // set a timeout + * const stream = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')), + * ); + * (async () => { + * try { + * for await (const chunk of stream) { + * await process(chunk); + * } + * } catch (e) { + * if (e.name === 'AbortError') { + * // The operation was cancelled + * } else { + * throw e; + * } + * } + * })(); + * ``` + * + * Or using an `AbortSignal` with a ReadableStream: + * + * ```js + * const controller = new AbortController(); + * const rs = new ReadableStream({ + * start(controller) { + * controller.enqueue('hello'); + * controller.enqueue('world'); + * controller.close(); + * }, + * }); + * + * addAbortSignal(controller.signal, rs); + * + * finished(rs, (err) => { + * if (err) { + * if (err.name === 'AbortError') { + * // The operation was cancelled + * } + * } + * }); + * + * const reader = rs.getReader(); + * + * reader.read().then(({ value, done }) => { + * console.log(value); // hello + * console.log(done); // false + * controller.abort(); + * }); + * ``` + * @since v15.4.0 + * @param signal A signal representing possible cancellation + * @param stream A stream to attach a signal to. + */ + function addAbortSignal(signal: AbortSignal, stream: T): T; + /** + * Returns the default highWaterMark used by streams. + * Defaults to `65536` (64 KiB), or `16` for `objectMode`. + * @since v19.9.0 + */ + function getDefaultHighWaterMark(objectMode: boolean): number; + /** + * Sets the default highWaterMark used by streams. + * @since v19.9.0 + * @param value highWaterMark value + */ + function setDefaultHighWaterMark(objectMode: boolean, value: number): void; + interface FinishedOptions extends Abortable { + error?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + } + /** + * A readable and/or writable stream/webstream. + * + * A function to get notified when a stream is no longer readable, writable + * or has experienced an error or a premature close event. + * + * ```js + * import { finished } from 'node:stream'; + * import fs from 'node:fs'; + * + * const rs = fs.createReadStream('archive.tar'); + * + * finished(rs, (err) => { + * if (err) { + * console.error('Stream failed.', err); + * } else { + * console.log('Stream is done reading.'); + * } + * }); + * + * rs.resume(); // Drain the stream. + * ``` + * + * Especially useful in error handling scenarios where a stream is destroyed + * prematurely (like an aborted HTTP request), and will not emit `'end'` or `'finish'`. + * + * The `finished` API provides [`promise version`](https://nodejs.org/docs/latest-v22.x/api/stream.html#streamfinishedstream-options). + * + * `stream.finished()` leaves dangling event listeners (in particular `'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been + * invoked. The reason for this is so that unexpected `'error'` events (due to + * incorrect stream implementations) do not cause unexpected crashes. + * If this is unwanted behavior then the returned cleanup function needs to be + * invoked in the callback: + * + * ```js + * const cleanup = finished(rs, (err) => { + * cleanup(); + * // ... + * }); + * ``` + * @since v10.0.0 + * @param stream A readable and/or writable stream. + * @param callback A callback function that takes an optional error argument. + * @returns A cleanup function which removes all registered listeners. + */ + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options: FinishedOptions, + callback: (err?: NodeJS.ErrnoException | null) => void, + ): () => void; + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + callback: (err?: NodeJS.ErrnoException | null) => void, + ): () => void; + namespace finished { + function __promisify__( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options?: FinishedOptions, + ): Promise; + } + type PipelineSourceFunction = () => Iterable | AsyncIterable; + type PipelineSource = Iterable | AsyncIterable | NodeJS.ReadableStream | PipelineSourceFunction; + type PipelineTransform, U> = + | NodeJS.ReadWriteStream + | (( + source: S extends (...args: any[]) => Iterable | AsyncIterable ? AsyncIterable + : S, + ) => AsyncIterable); + type PipelineTransformSource = PipelineSource | PipelineTransform; + type PipelineDestinationIterableFunction = (source: AsyncIterable) => AsyncIterable; + type PipelineDestinationPromiseFunction = (source: AsyncIterable) => Promise

; + type PipelineDestination, P> = S extends + PipelineTransformSource ? + | NodeJS.WritableStream + | PipelineDestinationIterableFunction + | PipelineDestinationPromiseFunction + : never; + type PipelineCallback> = S extends + PipelineDestinationPromiseFunction ? (err: NodeJS.ErrnoException | null, value: P) => void + : (err: NodeJS.ErrnoException | null) => void; + type PipelinePromise> = S extends + PipelineDestinationPromiseFunction ? Promise

: Promise; + interface PipelineOptions { + signal?: AbortSignal | undefined; + end?: boolean | undefined; + } + /** + * A module method to pipe between streams and generators forwarding errors and + * properly cleaning up and provide a callback when the pipeline is complete. + * + * ```js + * import { pipeline } from 'node:stream'; + * import fs from 'node:fs'; + * import zlib from 'node:zlib'; + * + * // Use the pipeline API to easily pipe a series of streams + * // together and get notified when the pipeline is fully done. + * + * // A pipeline to gzip a potentially huge tar file efficiently: + * + * pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz'), + * (err) => { + * if (err) { + * console.error('Pipeline failed.', err); + * } else { + * console.log('Pipeline succeeded.'); + * } + * }, + * ); + * ``` + * + * The `pipeline` API provides a [`promise version`](https://nodejs.org/docs/latest-v22.x/api/stream.html#streampipelinesource-transforms-destination-options). + * + * `stream.pipeline()` will call `stream.destroy(err)` on all streams except: + * + * * `Readable` streams which have emitted `'end'` or `'close'`. + * * `Writable` streams which have emitted `'finish'` or `'close'`. + * + * `stream.pipeline()` leaves dangling event listeners on the streams + * after the `callback` has been invoked. In the case of reuse of streams after + * failure, this can cause event listener leaks and swallowed errors. If the last + * stream is readable, dangling event listeners will be removed so that the last + * stream can be consumed later. + * + * `stream.pipeline()` closes all the streams when an error is raised. + * The `IncomingRequest` usage with `pipeline` could lead to an unexpected behavior + * once it would destroy the socket without sending the expected response. + * See the example below: + * + * ```js + * import fs from 'node:fs'; + * import http from 'node:http'; + * import { pipeline } from 'node:stream'; + * + * const server = http.createServer((req, res) => { + * const fileStream = fs.createReadStream('./fileNotExist.txt'); + * pipeline(fileStream, res, (err) => { + * if (err) { + * console.log(err); // No such file + * // this message can't be sent once `pipeline` already destroyed the socket + * return res.end('error!!!'); + * } + * }); + * }); + * ``` + * @since v10.0.0 + * @param callback Called when the pipeline is fully done. + */ + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + callback: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + callback: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + callback: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + callback: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + callback: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline( + streams: ReadonlyArray, + callback: (err: NodeJS.ErrnoException | null) => void, + ): NodeJS.WritableStream; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array< + NodeJS.ReadWriteStream | NodeJS.WritableStream | ((err: NodeJS.ErrnoException | null) => void) + > + ): NodeJS.WritableStream; + namespace pipeline { + function __promisify__, B extends PipelineDestination>( + source: A, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__( + streams: ReadonlyArray, + options?: PipelineOptions, + ): Promise; + function __promisify__( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; + } + interface Pipe { + close(): void; + hasRef(): boolean; + ref(): void; + unref(): void; + } + /** + * Returns whether the stream has encountered an error. + * @since v17.3.0, v16.14.0 + * @experimental + */ + function isErrored(stream: Readable | Writable | NodeJS.ReadableStream | NodeJS.WritableStream): boolean; + /** + * Returns whether the stream is readable. + * @since v17.4.0, v16.14.0 + * @experimental + */ + function isReadable(stream: Readable | NodeJS.ReadableStream): boolean; + const promises: typeof streamPromises; + const consumers: typeof streamConsumers; + } + export = internal; +} +declare module "node:stream" { + import stream = require("stream"); + export = stream; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/stream/consumers.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/stream/consumers.d.ts new file mode 100644 index 0000000..5ad9cba --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/stream/consumers.d.ts @@ -0,0 +1,12 @@ +declare module "stream/consumers" { + import { Blob as NodeBlob } from "node:buffer"; + import { Readable } from "node:stream"; + function buffer(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function text(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function arrayBuffer(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function blob(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function json(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; +} +declare module "node:stream/consumers" { + export * from "stream/consumers"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/stream/promises.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/stream/promises.d.ts new file mode 100644 index 0000000..d54c14c --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/stream/promises.d.ts @@ -0,0 +1,90 @@ +declare module "stream/promises" { + import { + FinishedOptions as _FinishedOptions, + PipelineDestination, + PipelineOptions, + PipelinePromise, + PipelineSource, + PipelineTransform, + } from "node:stream"; + interface FinishedOptions extends _FinishedOptions { + /** + * If true, removes the listeners registered by this function before the promise is fulfilled. + * @default false + */ + cleanup?: boolean | undefined; + } + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options?: FinishedOptions, + ): Promise; + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline( + streams: ReadonlyArray, + options?: PipelineOptions, + ): Promise; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; +} +declare module "node:stream/promises" { + export * from "stream/promises"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/stream/web.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/stream/web.d.ts new file mode 100644 index 0000000..5da8a98 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/stream/web.d.ts @@ -0,0 +1,609 @@ +type _ByteLengthQueuingStrategy = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").ByteLengthQueuingStrategy; +type _CompressionStream = typeof globalThis extends { onmessage: any; ReportingObserver: any } ? {} + : import("stream/web").CompressionStream; +type _CountQueuingStrategy = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").CountQueuingStrategy; +type _DecompressionStream = typeof globalThis extends { onmessage: any; ReportingObserver: any } ? {} + : import("stream/web").DecompressionStream; +type _ReadableByteStreamController = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").ReadableByteStreamController; +type _ReadableStream = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").ReadableStream; +type _ReadableStreamBYOBReader = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").ReadableStreamBYOBReader; +type _ReadableStreamBYOBRequest = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").ReadableStreamBYOBRequest; +type _ReadableStreamDefaultController = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").ReadableStreamDefaultController; +type _ReadableStreamDefaultReader = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").ReadableStreamDefaultReader; +type _TextDecoderStream = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").TextDecoderStream; +type _TextEncoderStream = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").TextEncoderStream; +type _TransformStream = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").TransformStream; +type _TransformStreamDefaultController = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").TransformStreamDefaultController; +type _WritableStream = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").WritableStream; +type _WritableStreamDefaultController = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").WritableStreamDefaultController; +type _WritableStreamDefaultWriter = typeof globalThis extends { onmessage: any } ? {} + : import("stream/web").WritableStreamDefaultWriter; + +declare module "stream/web" { + // stub module, pending copy&paste from .d.ts or manual impl + // copy from lib.dom.d.ts + interface ReadableWritablePair { + readable: ReadableStream; + /** + * Provides a convenient, chainable way of piping this readable stream + * through a transform stream (or any other { writable, readable } + * pair). It simply pipes the stream into the writable side of the + * supplied pair, and returns the readable side for further use. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + */ + writable: WritableStream; + } + interface StreamPipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + /** + * Pipes this readable stream to a given writable stream destination. + * The way in which the piping process behaves under various error + * conditions can be customized with a number of passed options. It + * returns a promise that fulfills when the piping process completes + * successfully, or rejects if any errors were encountered. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + * + * Errors and closures of the source and destination streams propagate + * as follows: + * + * An error in this source readable stream will abort destination, + * unless preventAbort is truthy. The returned promise will be rejected + * with the source's error, or with any error that occurs during + * aborting the destination. + * + * An error in destination will cancel this source readable stream, + * unless preventCancel is truthy. The returned promise will be rejected + * with the destination's error, or with any error that occurs during + * canceling the source. + * + * When this source readable stream closes, destination will be closed, + * unless preventClose is truthy. The returned promise will be fulfilled + * once this process completes, unless an error is encountered while + * closing the destination, in which case it will be rejected with that + * error. + * + * If destination starts out closed or closing, this source readable + * stream will be canceled, unless preventCancel is true. The returned + * promise will be rejected with an error indicating piping to a closed + * stream failed, or with any error that occurs during canceling the + * source. + * + * The signal option can be set to an AbortSignal to allow aborting an + * ongoing pipe operation via the corresponding AbortController. In this + * case, this source readable stream will be canceled, and destination + * aborted, unless the respective options preventCancel or preventAbort + * are set. + */ + preventClose?: boolean; + signal?: AbortSignal; + } + interface ReadableStreamGenericReader { + readonly closed: Promise; + cancel(reason?: any): Promise; + } + type ReadableStreamController = ReadableStreamDefaultController; + interface ReadableStreamReadValueResult { + done: false; + value: T; + } + interface ReadableStreamReadDoneResult { + done: true; + value?: T; + } + type ReadableStreamReadResult = ReadableStreamReadValueResult | ReadableStreamReadDoneResult; + interface ReadableByteStreamControllerCallback { + (controller: ReadableByteStreamController): void | PromiseLike; + } + interface UnderlyingSinkAbortCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSinkCloseCallback { + (): void | PromiseLike; + } + interface UnderlyingSinkStartCallback { + (controller: WritableStreamDefaultController): any; + } + interface UnderlyingSinkWriteCallback { + (chunk: W, controller: WritableStreamDefaultController): void | PromiseLike; + } + interface UnderlyingSourceCancelCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSourcePullCallback { + (controller: ReadableStreamController): void | PromiseLike; + } + interface UnderlyingSourceStartCallback { + (controller: ReadableStreamController): any; + } + interface TransformerFlushCallback { + (controller: TransformStreamDefaultController): void | PromiseLike; + } + interface TransformerStartCallback { + (controller: TransformStreamDefaultController): any; + } + interface TransformerTransformCallback { + (chunk: I, controller: TransformStreamDefaultController): void | PromiseLike; + } + interface UnderlyingByteSource { + autoAllocateChunkSize?: number; + cancel?: ReadableStreamErrorCallback; + pull?: ReadableByteStreamControllerCallback; + start?: ReadableByteStreamControllerCallback; + type: "bytes"; + } + interface UnderlyingSource { + cancel?: UnderlyingSourceCancelCallback; + pull?: UnderlyingSourcePullCallback; + start?: UnderlyingSourceStartCallback; + type?: undefined; + } + interface UnderlyingSink { + abort?: UnderlyingSinkAbortCallback; + close?: UnderlyingSinkCloseCallback; + start?: UnderlyingSinkStartCallback; + type?: undefined; + write?: UnderlyingSinkWriteCallback; + } + interface ReadableStreamErrorCallback { + (reason: any): void | PromiseLike; + } + interface ReadableStreamAsyncIterator extends NodeJS.AsyncIterator { + [Symbol.asyncIterator](): ReadableStreamAsyncIterator; + } + /** This Streams API interface represents a readable stream of byte data. */ + interface ReadableStream { + readonly locked: boolean; + cancel(reason?: any): Promise; + getReader(options: { mode: "byob" }): ReadableStreamBYOBReader; + getReader(): ReadableStreamDefaultReader; + getReader(options?: ReadableStreamGetReaderOptions): ReadableStreamReader; + pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; + pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; + tee(): [ReadableStream, ReadableStream]; + values(options?: { preventCancel?: boolean }): ReadableStreamAsyncIterator; + [Symbol.asyncIterator](): ReadableStreamAsyncIterator; + } + const ReadableStream: { + prototype: ReadableStream; + from(iterable: Iterable | AsyncIterable): ReadableStream; + new(underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; + new(underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; + }; + type ReadableStreamReaderMode = "byob"; + interface ReadableStreamGetReaderOptions { + /** + * Creates a ReadableStreamBYOBReader and locks the stream to the new reader. + * + * This call behaves the same way as the no-argument variant, except that it only works on readable byte streams, i.e. streams which were constructed specifically with the ability to handle "bring your own buffer" reading. The returned BYOB reader provides the ability to directly read individual chunks from the stream via its read() method, into developer-supplied buffers, allowing more precise control over allocation. + */ + mode?: ReadableStreamReaderMode; + } + type ReadableStreamReader = ReadableStreamDefaultReader | ReadableStreamBYOBReader; + interface ReadableStreamDefaultReader extends ReadableStreamGenericReader { + read(): Promise>; + releaseLock(): void; + } + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader) */ + interface ReadableStreamBYOBReader extends ReadableStreamGenericReader { + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/read) */ + read(view: T): Promise>; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/releaseLock) */ + releaseLock(): void; + } + const ReadableStreamDefaultReader: { + prototype: ReadableStreamDefaultReader; + new(stream: ReadableStream): ReadableStreamDefaultReader; + }; + const ReadableStreamBYOBReader: { + prototype: ReadableStreamBYOBReader; + new(stream: ReadableStream): ReadableStreamBYOBReader; + }; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest) */ + interface ReadableStreamBYOBRequest { + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/view) */ + readonly view: ArrayBufferView | null; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respond) */ + respond(bytesWritten: number): void; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respondWithNewView) */ + respondWithNewView(view: ArrayBufferView): void; + } + const ReadableStreamBYOBRequest: { + prototype: ReadableStreamBYOBRequest; + new(): ReadableStreamBYOBRequest; + }; + interface ReadableByteStreamController { + readonly byobRequest: undefined; + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: ArrayBufferView): void; + error(error?: any): void; + } + const ReadableByteStreamController: { + prototype: ReadableByteStreamController; + new(): ReadableByteStreamController; + }; + interface ReadableStreamDefaultController { + readonly desiredSize: number | null; + close(): void; + enqueue(chunk?: R): void; + error(e?: any): void; + } + const ReadableStreamDefaultController: { + prototype: ReadableStreamDefaultController; + new(): ReadableStreamDefaultController; + }; + interface Transformer { + flush?: TransformerFlushCallback; + readableType?: undefined; + start?: TransformerStartCallback; + transform?: TransformerTransformCallback; + writableType?: undefined; + } + interface TransformStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const TransformStream: { + prototype: TransformStream; + new( + transformer?: Transformer, + writableStrategy?: QueuingStrategy, + readableStrategy?: QueuingStrategy, + ): TransformStream; + }; + interface TransformStreamDefaultController { + readonly desiredSize: number | null; + enqueue(chunk?: O): void; + error(reason?: any): void; + terminate(): void; + } + const TransformStreamDefaultController: { + prototype: TransformStreamDefaultController; + new(): TransformStreamDefaultController; + }; + /** + * This Streams API interface provides a standard abstraction for writing + * streaming data to a destination, known as a sink. This object comes with + * built-in back pressure and queuing. + */ + interface WritableStream { + readonly locked: boolean; + abort(reason?: any): Promise; + close(): Promise; + getWriter(): WritableStreamDefaultWriter; + } + const WritableStream: { + prototype: WritableStream; + new(underlyingSink?: UnderlyingSink, strategy?: QueuingStrategy): WritableStream; + }; + /** + * This Streams API interface is the object returned by + * WritableStream.getWriter() and once created locks the < writer to the + * WritableStream ensuring that no other streams can write to the underlying + * sink. + */ + interface WritableStreamDefaultWriter { + readonly closed: Promise; + readonly desiredSize: number | null; + readonly ready: Promise; + abort(reason?: any): Promise; + close(): Promise; + releaseLock(): void; + write(chunk?: W): Promise; + } + const WritableStreamDefaultWriter: { + prototype: WritableStreamDefaultWriter; + new(stream: WritableStream): WritableStreamDefaultWriter; + }; + /** + * This Streams API interface represents a controller allowing control of a + * WritableStream's state. When constructing a WritableStream, the + * underlying sink is given a corresponding WritableStreamDefaultController + * instance to manipulate. + */ + interface WritableStreamDefaultController { + error(e?: any): void; + } + const WritableStreamDefaultController: { + prototype: WritableStreamDefaultController; + new(): WritableStreamDefaultController; + }; + interface QueuingStrategy { + highWaterMark?: number; + size?: QueuingStrategySize; + } + interface QueuingStrategySize { + (chunk?: T): number; + } + interface QueuingStrategyInit { + /** + * Creates a new ByteLengthQueuingStrategy with the provided high water + * mark. + * + * Note that the provided high water mark will not be validated ahead of + * time. Instead, if it is negative, NaN, or not a number, the resulting + * ByteLengthQueuingStrategy will cause the corresponding stream + * constructor to throw. + */ + highWaterMark: number; + } + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface ByteLengthQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const ByteLengthQueuingStrategy: { + prototype: ByteLengthQueuingStrategy; + new(init: QueuingStrategyInit): ByteLengthQueuingStrategy; + }; + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface CountQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const CountQueuingStrategy: { + prototype: CountQueuingStrategy; + new(init: QueuingStrategyInit): CountQueuingStrategy; + }; + interface TextEncoderStream { + /** Returns "utf-8". */ + readonly encoding: "utf-8"; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextEncoderStream: { + prototype: TextEncoderStream; + new(): TextEncoderStream; + }; + interface TextDecoderOptions { + fatal?: boolean; + ignoreBOM?: boolean; + } + type BufferSource = ArrayBufferView | ArrayBuffer; + interface TextDecoderStream { + /** Returns encoding's name, lower cased. */ + readonly encoding: string; + /** Returns `true` if error mode is "fatal", and `false` otherwise. */ + readonly fatal: boolean; + /** Returns `true` if ignore BOM flag is set, and `false` otherwise. */ + readonly ignoreBOM: boolean; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextDecoderStream: { + prototype: TextDecoderStream; + new(encoding?: string, options?: TextDecoderOptions): TextDecoderStream; + }; + interface CompressionStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const CompressionStream: { + prototype: CompressionStream; + new(format: "deflate" | "deflate-raw" | "gzip"): CompressionStream; + }; + interface DecompressionStream { + readonly writable: WritableStream; + readonly readable: ReadableStream; + } + const DecompressionStream: { + prototype: DecompressionStream; + new(format: "deflate" | "deflate-raw" | "gzip"): DecompressionStream; + }; + + global { + interface ByteLengthQueuingStrategy extends _ByteLengthQueuingStrategy {} + /** + * `ByteLengthQueuingStrategy` class is a global reference for `import { ByteLengthQueuingStrategy } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-bytelengthqueuingstrategy + * @since v18.0.0 + */ + var ByteLengthQueuingStrategy: typeof globalThis extends { onmessage: any; ByteLengthQueuingStrategy: infer T } + ? T + : typeof import("stream/web").ByteLengthQueuingStrategy; + + interface CompressionStream extends _CompressionStream {} + /** + * `CompressionStream` class is a global reference for `import { CompressionStream } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-compressionstream + * @since v18.0.0 + */ + var CompressionStream: typeof globalThis extends { + onmessage: any; + // CompressionStream, DecompressionStream and ReportingObserver was introduced in the same commit. + // If ReportingObserver check is removed, the type here will form a circular reference in TS5.0+lib.dom.d.ts + ReportingObserver: any; + CompressionStream: infer T; + } ? T + // TS 4.8, 4.9, 5.0 + : typeof globalThis extends { onmessage: any; TransformStream: { prototype: infer T } } ? { + prototype: T; + new(format: "deflate" | "deflate-raw" | "gzip"): T; + } + : typeof import("stream/web").CompressionStream; + + interface CountQueuingStrategy extends _CountQueuingStrategy {} + /** + * `CountQueuingStrategy` class is a global reference for `import { CountQueuingStrategy } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-countqueuingstrategy + * @since v18.0.0 + */ + var CountQueuingStrategy: typeof globalThis extends { onmessage: any; CountQueuingStrategy: infer T } ? T + : typeof import("stream/web").CountQueuingStrategy; + + interface DecompressionStream extends _DecompressionStream {} + /** + * `DecompressionStream` class is a global reference for `import { DecompressionStream } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-decompressionstream + * @since v18.0.0 + */ + var DecompressionStream: typeof globalThis extends { + onmessage: any; + // CompressionStream, DecompressionStream and ReportingObserver was introduced in the same commit. + // If ReportingObserver check is removed, the type here will form a circular reference in TS5.0+lib.dom.d.ts + ReportingObserver: any; + DecompressionStream: infer T extends object; + } ? T + // TS 4.8, 4.9, 5.0 + : typeof globalThis extends { onmessage: any; TransformStream: { prototype: infer T } } ? { + prototype: T; + new(format: "deflate" | "deflate-raw" | "gzip"): T; + } + : typeof import("stream/web").DecompressionStream; + + interface ReadableByteStreamController extends _ReadableByteStreamController {} + /** + * `ReadableByteStreamController` class is a global reference for `import { ReadableByteStreamController } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-readablebytestreamcontroller + * @since v18.0.0 + */ + var ReadableByteStreamController: typeof globalThis extends + { onmessage: any; ReadableByteStreamController: infer T } ? T + : typeof import("stream/web").ReadableByteStreamController; + + interface ReadableStream extends _ReadableStream {} + /** + * `ReadableStream` class is a global reference for `import { ReadableStream } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-readablestream + * @since v18.0.0 + */ + var ReadableStream: typeof globalThis extends { onmessage: any; ReadableStream: infer T } ? T + : typeof import("stream/web").ReadableStream; + + interface ReadableStreamBYOBReader extends _ReadableStreamBYOBReader {} + /** + * `ReadableStreamBYOBReader` class is a global reference for `import { ReadableStreamBYOBReader } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-readablestreambyobreader + * @since v18.0.0 + */ + var ReadableStreamBYOBReader: typeof globalThis extends { onmessage: any; ReadableStreamBYOBReader: infer T } + ? T + : typeof import("stream/web").ReadableStreamBYOBReader; + + interface ReadableStreamBYOBRequest extends _ReadableStreamBYOBRequest {} + /** + * `ReadableStreamBYOBRequest` class is a global reference for `import { ReadableStreamBYOBRequest } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-readablestreambyobrequest + * @since v18.0.0 + */ + var ReadableStreamBYOBRequest: typeof globalThis extends { onmessage: any; ReadableStreamBYOBRequest: infer T } + ? T + : typeof import("stream/web").ReadableStreamBYOBRequest; + + interface ReadableStreamDefaultController extends _ReadableStreamDefaultController {} + /** + * `ReadableStreamDefaultController` class is a global reference for `import { ReadableStreamDefaultController } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-readablestreamdefaultcontroller + * @since v18.0.0 + */ + var ReadableStreamDefaultController: typeof globalThis extends + { onmessage: any; ReadableStreamDefaultController: infer T } ? T + : typeof import("stream/web").ReadableStreamDefaultController; + + interface ReadableStreamDefaultReader extends _ReadableStreamDefaultReader {} + /** + * `ReadableStreamDefaultReader` class is a global reference for `import { ReadableStreamDefaultReader } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-readablestreamdefaultreader + * @since v18.0.0 + */ + var ReadableStreamDefaultReader: typeof globalThis extends + { onmessage: any; ReadableStreamDefaultReader: infer T } ? T + : typeof import("stream/web").ReadableStreamDefaultReader; + + interface TextDecoderStream extends _TextDecoderStream {} + /** + * `TextDecoderStream` class is a global reference for `import { TextDecoderStream } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-textdecoderstream + * @since v18.0.0 + */ + var TextDecoderStream: typeof globalThis extends { onmessage: any; TextDecoderStream: infer T } ? T + : typeof import("stream/web").TextDecoderStream; + + interface TextEncoderStream extends _TextEncoderStream {} + /** + * `TextEncoderStream` class is a global reference for `import { TextEncoderStream } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-textencoderstream + * @since v18.0.0 + */ + var TextEncoderStream: typeof globalThis extends { onmessage: any; TextEncoderStream: infer T } ? T + : typeof import("stream/web").TextEncoderStream; + + interface TransformStream extends _TransformStream {} + /** + * `TransformStream` class is a global reference for `import { TransformStream } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-transformstream + * @since v18.0.0 + */ + var TransformStream: typeof globalThis extends { onmessage: any; TransformStream: infer T } ? T + : typeof import("stream/web").TransformStream; + + interface TransformStreamDefaultController extends _TransformStreamDefaultController {} + /** + * `TransformStreamDefaultController` class is a global reference for `import { TransformStreamDefaultController } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-transformstreamdefaultcontroller + * @since v18.0.0 + */ + var TransformStreamDefaultController: typeof globalThis extends + { onmessage: any; TransformStreamDefaultController: infer T } ? T + : typeof import("stream/web").TransformStreamDefaultController; + + interface WritableStream extends _WritableStream {} + /** + * `WritableStream` class is a global reference for `import { WritableStream } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-writablestream + * @since v18.0.0 + */ + var WritableStream: typeof globalThis extends { onmessage: any; WritableStream: infer T } ? T + : typeof import("stream/web").WritableStream; + + interface WritableStreamDefaultController extends _WritableStreamDefaultController {} + /** + * `WritableStreamDefaultController` class is a global reference for `import { WritableStreamDefaultController } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-writablestreamdefaultcontroller + * @since v18.0.0 + */ + var WritableStreamDefaultController: typeof globalThis extends + { onmessage: any; WritableStreamDefaultController: infer T } ? T + : typeof import("stream/web").WritableStreamDefaultController; + + interface WritableStreamDefaultWriter extends _WritableStreamDefaultWriter {} + /** + * `WritableStreamDefaultWriter` class is a global reference for `import { WritableStreamDefaultWriter } from 'node:stream/web'`. + * https://nodejs.org/api/globals.html#class-writablestreamdefaultwriter + * @since v18.0.0 + */ + var WritableStreamDefaultWriter: typeof globalThis extends + { onmessage: any; WritableStreamDefaultWriter: infer T } ? T + : typeof import("stream/web").WritableStreamDefaultWriter; + } +} +declare module "node:stream/web" { + export * from "stream/web"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/string_decoder.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/string_decoder.d.ts new file mode 100644 index 0000000..350aace --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/string_decoder.d.ts @@ -0,0 +1,67 @@ +/** + * The `node:string_decoder` module provides an API for decoding `Buffer` objects + * into strings in a manner that preserves encoded multi-byte UTF-8 and UTF-16 + * characters. It can be accessed using: + * + * ```js + * import { StringDecoder } from 'node:string_decoder'; + * ``` + * + * The following example shows the basic use of the `StringDecoder` class. + * + * ```js + * import { StringDecoder } from 'node:string_decoder'; + * const decoder = new StringDecoder('utf8'); + * + * const cent = Buffer.from([0xC2, 0xA2]); + * console.log(decoder.write(cent)); // Prints: ¢ + * + * const euro = Buffer.from([0xE2, 0x82, 0xAC]); + * console.log(decoder.write(euro)); // Prints: € + * ``` + * + * When a `Buffer` instance is written to the `StringDecoder` instance, an + * internal buffer is used to ensure that the decoded string does not contain + * any incomplete multibyte characters. These are held in the buffer until the + * next call to `stringDecoder.write()` or until `stringDecoder.end()` is called. + * + * In the following example, the three UTF-8 encoded bytes of the European Euro + * symbol (`€`) are written over three separate operations: + * + * ```js + * import { StringDecoder } from 'node:string_decoder'; + * const decoder = new StringDecoder('utf8'); + * + * decoder.write(Buffer.from([0xE2])); + * decoder.write(Buffer.from([0x82])); + * console.log(decoder.end(Buffer.from([0xAC]))); // Prints: € + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/string_decoder.js) + */ +declare module "string_decoder" { + class StringDecoder { + constructor(encoding?: BufferEncoding); + /** + * Returns a decoded string, ensuring that any incomplete multibyte characters at + * the end of the `Buffer`, or `TypedArray`, or `DataView` are omitted from the + * returned string and stored in an internal buffer for the next call to `stringDecoder.write()` or `stringDecoder.end()`. + * @since v0.1.99 + * @param buffer The bytes to decode. + */ + write(buffer: string | Buffer | NodeJS.ArrayBufferView): string; + /** + * Returns any remaining input stored in the internal buffer as a string. Bytes + * representing incomplete UTF-8 and UTF-16 characters will be replaced with + * substitution characters appropriate for the character encoding. + * + * If the `buffer` argument is provided, one final call to `stringDecoder.write()` is performed before returning the remaining input. + * After `end()` is called, the `stringDecoder` object can be reused for new input. + * @since v0.9.3 + * @param buffer The bytes to decode. + */ + end(buffer?: string | Buffer | NodeJS.ArrayBufferView): string; + } +} +declare module "node:string_decoder" { + export * from "string_decoder"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/test.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/test.d.ts new file mode 100644 index 0000000..25e44ed --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/test.d.ts @@ -0,0 +1,2248 @@ +/** + * The `node:test` module facilitates the creation of JavaScript tests. + * To access it: + * + * ```js + * import test from 'node:test'; + * ``` + * + * This module is only available under the `node:` scheme. The following will not + * work: + * + * ```js + * import test from 'node:test'; + * ``` + * + * Tests created via the `test` module consist of a single function that is + * processed in one of three ways: + * + * 1. A synchronous function that is considered failing if it throws an exception, + * and is considered passing otherwise. + * 2. A function that returns a `Promise` that is considered failing if the `Promise` rejects, and is considered passing if the `Promise` fulfills. + * 3. A function that receives a callback function. If the callback receives any + * truthy value as its first argument, the test is considered failing. If a + * falsy value is passed as the first argument to the callback, the test is + * considered passing. If the test function receives a callback function and + * also returns a `Promise`, the test will fail. + * + * The following example illustrates how tests are written using the `test` module. + * + * ```js + * test('synchronous passing test', (t) => { + * // This test passes because it does not throw an exception. + * assert.strictEqual(1, 1); + * }); + * + * test('synchronous failing test', (t) => { + * // This test fails because it throws an exception. + * assert.strictEqual(1, 2); + * }); + * + * test('asynchronous passing test', async (t) => { + * // This test passes because the Promise returned by the async + * // function is settled and not rejected. + * assert.strictEqual(1, 1); + * }); + * + * test('asynchronous failing test', async (t) => { + * // This test fails because the Promise returned by the async + * // function is rejected. + * assert.strictEqual(1, 2); + * }); + * + * test('failing test using Promises', (t) => { + * // Promises can be used directly as well. + * return new Promise((resolve, reject) => { + * setImmediate(() => { + * reject(new Error('this will cause the test to fail')); + * }); + * }); + * }); + * + * test('callback passing test', (t, done) => { + * // done() is the callback function. When the setImmediate() runs, it invokes + * // done() with no arguments. + * setImmediate(done); + * }); + * + * test('callback failing test', (t, done) => { + * // When the setImmediate() runs, done() is invoked with an Error object and + * // the test fails. + * setImmediate(() => { + * done(new Error('callback failure')); + * }); + * }); + * ``` + * + * If any tests fail, the process exit code is set to `1`. + * @since v18.0.0, v16.17.0 + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/test.js) + */ +declare module "node:test" { + import { Readable } from "node:stream"; + /** + * **Note:** `shard` is used to horizontally parallelize test running across + * machines or processes, ideal for large-scale executions across varied + * environments. It's incompatible with `watch` mode, tailored for rapid + * code iteration by automatically rerunning tests on file changes. + * + * ```js + * import { tap } from 'node:test/reporters'; + * import { run } from 'node:test'; + * import process from 'node:process'; + * import path from 'node:path'; + * + * run({ files: [path.resolve('./tests/test.js')] }) + * .compose(tap) + * .pipe(process.stdout); + * ``` + * @since v18.9.0, v16.19.0 + * @param options Configuration options for running tests. + */ + function run(options?: RunOptions): TestsStream; + /** + * The `test()` function is the value imported from the `test` module. Each + * invocation of this function results in reporting the test to the `TestsStream`. + * + * The `TestContext` object passed to the `fn` argument can be used to perform + * actions related to the current test. Examples include skipping the test, adding + * additional diagnostic information, or creating subtests. + * + * `test()` returns a `Promise` that fulfills once the test completes. + * if `test()` is called within a suite, it fulfills immediately. + * The return value can usually be discarded for top level tests. + * However, the return value from subtests should be used to prevent the parent + * test from finishing first and cancelling the subtest + * as shown in the following example. + * + * ```js + * test('top level test', async (t) => { + * // The setTimeout() in the following subtest would cause it to outlive its + * // parent test if 'await' is removed on the next line. Once the parent test + * // completes, it will cancel any outstanding subtests. + * await t.test('longer running subtest', async (t) => { + * return new Promise((resolve, reject) => { + * setTimeout(resolve, 1000); + * }); + * }); + * }); + * ``` + * + * The `timeout` option can be used to fail the test if it takes longer than `timeout` milliseconds to complete. However, it is not a reliable mechanism for + * canceling tests because a running test might block the application thread and + * thus prevent the scheduled cancellation. + * @since v18.0.0, v16.17.0 + * @param name The name of the test, which is displayed when reporting test results. + * Defaults to the `name` property of `fn`, or `''` if `fn` does not have a name. + * @param options Configuration options for the test. + * @param fn The function under test. The first argument to this function is a {@link TestContext} object. + * If the test uses callbacks, the callback function is passed as the second argument. + * @return Fulfilled with `undefined` once the test completes, or immediately if the test runs within a suite. + */ + function test(name?: string, fn?: TestFn): Promise; + function test(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function test(options?: TestOptions, fn?: TestFn): Promise; + function test(fn?: TestFn): Promise; + namespace test { + export { + after, + afterEach, + before, + beforeEach, + describe, + it, + mock, + only, + run, + skip, + snapshot, + suite, + test, + todo, + }; + } + /** + * The `suite()` function is imported from the `node:test` module. + * @param name The name of the suite, which is displayed when reporting test results. + * Defaults to the `name` property of `fn`, or `''` if `fn` does not have a name. + * @param options Configuration options for the suite. This supports the same options as {@link test}. + * @param fn The suite function declaring nested tests and suites. The first argument to this function is a {@link SuiteContext} object. + * @return Immediately fulfilled with `undefined`. + * @since v20.13.0 + */ + function suite(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function suite(name?: string, fn?: SuiteFn): Promise; + function suite(options?: TestOptions, fn?: SuiteFn): Promise; + function suite(fn?: SuiteFn): Promise; + namespace suite { + /** + * Shorthand for skipping a suite. This is the same as calling {@link suite} with `options.skip` set to `true`. + * @since v20.13.0 + */ + function skip(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function skip(name?: string, fn?: SuiteFn): Promise; + function skip(options?: TestOptions, fn?: SuiteFn): Promise; + function skip(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `TODO`. This is the same as calling {@link suite} with `options.todo` set to `true`. + * @since v20.13.0 + */ + function todo(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function todo(name?: string, fn?: SuiteFn): Promise; + function todo(options?: TestOptions, fn?: SuiteFn): Promise; + function todo(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `only`. This is the same as calling {@link suite} with `options.only` set to `true`. + * @since v20.13.0 + */ + function only(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function only(name?: string, fn?: SuiteFn): Promise; + function only(options?: TestOptions, fn?: SuiteFn): Promise; + function only(fn?: SuiteFn): Promise; + } + /** + * Alias for {@link suite}. + * + * The `describe()` function is imported from the `node:test` module. + */ + function describe(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function describe(name?: string, fn?: SuiteFn): Promise; + function describe(options?: TestOptions, fn?: SuiteFn): Promise; + function describe(fn?: SuiteFn): Promise; + namespace describe { + /** + * Shorthand for skipping a suite. This is the same as calling {@link describe} with `options.skip` set to `true`. + * @since v18.15.0 + */ + function skip(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function skip(name?: string, fn?: SuiteFn): Promise; + function skip(options?: TestOptions, fn?: SuiteFn): Promise; + function skip(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `TODO`. This is the same as calling {@link describe} with `options.todo` set to `true`. + * @since v18.15.0 + */ + function todo(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function todo(name?: string, fn?: SuiteFn): Promise; + function todo(options?: TestOptions, fn?: SuiteFn): Promise; + function todo(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `only`. This is the same as calling {@link describe} with `options.only` set to `true`. + * @since v18.15.0 + */ + function only(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function only(name?: string, fn?: SuiteFn): Promise; + function only(options?: TestOptions, fn?: SuiteFn): Promise; + function only(fn?: SuiteFn): Promise; + } + /** + * Alias for {@link test}. + * + * The `it()` function is imported from the `node:test` module. + * @since v18.6.0, v16.17.0 + */ + function it(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function it(name?: string, fn?: TestFn): Promise; + function it(options?: TestOptions, fn?: TestFn): Promise; + function it(fn?: TestFn): Promise; + namespace it { + /** + * Shorthand for skipping a test. This is the same as calling {@link it} with `options.skip` set to `true`. + */ + function skip(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function skip(name?: string, fn?: TestFn): Promise; + function skip(options?: TestOptions, fn?: TestFn): Promise; + function skip(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `TODO`. This is the same as calling {@link it} with `options.todo` set to `true`. + */ + function todo(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function todo(name?: string, fn?: TestFn): Promise; + function todo(options?: TestOptions, fn?: TestFn): Promise; + function todo(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `only`. This is the same as calling {@link it} with `options.only` set to `true`. + * @since v18.15.0 + */ + function only(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function only(name?: string, fn?: TestFn): Promise; + function only(options?: TestOptions, fn?: TestFn): Promise; + function only(fn?: TestFn): Promise; + } + /** + * Shorthand for skipping a test. This is the same as calling {@link test} with `options.skip` set to `true`. + * @since v20.2.0 + */ + function skip(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function skip(name?: string, fn?: TestFn): Promise; + function skip(options?: TestOptions, fn?: TestFn): Promise; + function skip(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `TODO`. This is the same as calling {@link test} with `options.todo` set to `true`. + * @since v20.2.0 + */ + function todo(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function todo(name?: string, fn?: TestFn): Promise; + function todo(options?: TestOptions, fn?: TestFn): Promise; + function todo(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `only`. This is the same as calling {@link test} with `options.only` set to `true`. + * @since v20.2.0 + */ + function only(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function only(name?: string, fn?: TestFn): Promise; + function only(options?: TestOptions, fn?: TestFn): Promise; + function only(fn?: TestFn): Promise; + /** + * The type of a function passed to {@link test}. The first argument to this function is a {@link TestContext} object. + * If the test uses callbacks, the callback function is passed as the second argument. + */ + type TestFn = (t: TestContext, done: (result?: any) => void) => void | Promise; + /** + * The type of a suite test function. The argument to this function is a {@link SuiteContext} object. + */ + type SuiteFn = (s: SuiteContext) => void | Promise; + interface TestShard { + /** + * A positive integer between 1 and `total` that specifies the index of the shard to run. + */ + index: number; + /** + * A positive integer that specifies the total number of shards to split the test files to. + */ + total: number; + } + interface RunOptions { + /** + * If a number is provided, then that many test processes would run in parallel, where each process corresponds to one test file. + * If `true`, it would run `os.availableParallelism() - 1` test files in parallel. If `false`, it would only run one test file at a time. + * @default false + */ + concurrency?: number | boolean | undefined; + /** + * An array containing the list of files to run. If omitted, files are run according to the + * [test runner execution model](https://nodejs.org/docs/latest-v22.x/api/test.html#test-runner-execution-model). + */ + files?: readonly string[] | undefined; + /** + * Configures the test runner to exit the process once all known + * tests have finished executing even if the event loop would + * otherwise remain active. + * @default false + */ + forceExit?: boolean | undefined; + /** + * An array containing the list of glob patterns to match test files. + * This option cannot be used together with `files`. If omitted, files are run according to the + * [test runner execution model](https://nodejs.org/docs/latest-v22.x/api/test.html#test-runner-execution-model). + * @since v22.6.0 + */ + globPatterns?: readonly string[] | undefined; + /** + * Sets inspector port of test child process. + * This can be a number, or a function that takes no arguments and returns a + * number. If a nullish value is provided, each process gets its own port, + * incremented from the primary's `process.debugPort`. This option is ignored + * if the `isolation` option is set to `'none'` as no child processes are + * spawned. + * @default undefined + */ + inspectPort?: number | (() => number) | undefined; + /** + * Configures the type of test isolation. If set to + * `'process'`, each test file is run in a separate child process. If set to + * `'none'`, all test files run in the current process. + * @default 'process' + * @since v22.8.0 + */ + isolation?: "process" | "none" | undefined; + /** + * If truthy, the test context will only run tests that have the `only` option set + */ + only?: boolean | undefined; + /** + * A function that accepts the `TestsStream` instance and can be used to setup listeners before any tests are run. + * @default undefined + */ + setup?: ((reporter: TestsStream) => void | Promise) | undefined; + /** + * An array of CLI flags to pass to the `node` executable when + * spawning the subprocesses. This option has no effect when `isolation` is `'none`'. + * @since v22.10.0 + * @default [] + */ + execArgv?: readonly string[] | undefined; + /** + * An array of CLI flags to pass to each test file when spawning the + * subprocesses. This option has no effect when `isolation` is `'none'`. + * @since v22.10.0 + * @default [] + */ + argv?: readonly string[] | undefined; + /** + * Allows aborting an in-progress test execution. + */ + signal?: AbortSignal | undefined; + /** + * If provided, only run tests whose name matches the provided pattern. + * Strings are interpreted as JavaScript regular expressions. + * @default undefined + */ + testNamePatterns?: string | RegExp | ReadonlyArray | undefined; + /** + * A String, RegExp or a RegExp Array, that can be used to exclude running tests whose + * name matches the provided pattern. Test name patterns are interpreted as JavaScript + * regular expressions. For each test that is executed, any corresponding test hooks, + * such as `beforeEach()`, are also run. + * @default undefined + * @since v22.1.0 + */ + testSkipPatterns?: string | RegExp | ReadonlyArray | undefined; + /** + * The number of milliseconds after which the test execution will fail. + * If unspecified, subtests inherit this value from their parent. + * @default Infinity + */ + timeout?: number | undefined; + /** + * Whether to run in watch mode or not. + * @default false + */ + watch?: boolean | undefined; + /** + * Running tests in a specific shard. + * @default undefined + */ + shard?: TestShard | undefined; + /** + * enable [code coverage](https://nodejs.org/docs/latest-v22.x/api/test.html#collecting-code-coverage) collection. + * @since v22.10.0 + * @default false + */ + coverage?: boolean | undefined; + /** + * Excludes specific files from code coverage + * using a glob pattern, which can match both absolute and relative file paths. + * This property is only applicable when `coverage` was set to `true`. + * If both `coverageExcludeGlobs` and `coverageIncludeGlobs` are provided, + * files must meet **both** criteria to be included in the coverage report. + * @since v22.10.0 + * @default undefined + */ + coverageExcludeGlobs?: string | readonly string[] | undefined; + /** + * Includes specific files in code coverage + * using a glob pattern, which can match both absolute and relative file paths. + * This property is only applicable when `coverage` was set to `true`. + * If both `coverageExcludeGlobs` and `coverageIncludeGlobs` are provided, + * files must meet **both** criteria to be included in the coverage report. + * @since v22.10.0 + * @default undefined + */ + coverageIncludeGlobs?: string | readonly string[] | undefined; + /** + * Require a minimum percent of covered lines. If code + * coverage does not reach the threshold specified, the process will exit with code `1`. + * @since v22.10.0 + * @default 0 + */ + lineCoverage?: number | undefined; + /** + * Require a minimum percent of covered branches. If code + * coverage does not reach the threshold specified, the process will exit with code `1`. + * @since v22.10.0 + * @default 0 + */ + branchCoverage?: number | undefined; + /** + * Require a minimum percent of covered functions. If code + * coverage does not reach the threshold specified, the process will exit with code `1`. + * @since v22.10.0 + * @default 0 + */ + functionCoverage?: number | undefined; + } + /** + * A successful call to `run()` will return a new `TestsStream` object, streaming a series of events representing the execution of the tests. + * + * Some of the events are guaranteed to be emitted in the same order as the tests are defined, while others are emitted in the order that the tests execute. + * @since v18.9.0, v16.19.0 + */ + class TestsStream extends Readable implements NodeJS.ReadableStream { + addListener(event: "test:coverage", listener: (data: TestCoverage) => void): this; + addListener(event: "test:complete", listener: (data: TestComplete) => void): this; + addListener(event: "test:dequeue", listener: (data: TestDequeue) => void): this; + addListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + addListener(event: "test:enqueue", listener: (data: TestEnqueue) => void): this; + addListener(event: "test:fail", listener: (data: TestFail) => void): this; + addListener(event: "test:pass", listener: (data: TestPass) => void): this; + addListener(event: "test:plan", listener: (data: TestPlan) => void): this; + addListener(event: "test:start", listener: (data: TestStart) => void): this; + addListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + addListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + addListener(event: "test:summary", listener: (data: TestSummary) => void): this; + addListener(event: "test:watch:drained", listener: () => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + emit(event: "test:coverage", data: TestCoverage): boolean; + emit(event: "test:complete", data: TestComplete): boolean; + emit(event: "test:dequeue", data: TestDequeue): boolean; + emit(event: "test:diagnostic", data: DiagnosticData): boolean; + emit(event: "test:enqueue", data: TestEnqueue): boolean; + emit(event: "test:fail", data: TestFail): boolean; + emit(event: "test:pass", data: TestPass): boolean; + emit(event: "test:plan", data: TestPlan): boolean; + emit(event: "test:start", data: TestStart): boolean; + emit(event: "test:stderr", data: TestStderr): boolean; + emit(event: "test:stdout", data: TestStdout): boolean; + emit(event: "test:summary", data: TestSummary): boolean; + emit(event: "test:watch:drained"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "test:coverage", listener: (data: TestCoverage) => void): this; + on(event: "test:complete", listener: (data: TestComplete) => void): this; + on(event: "test:dequeue", listener: (data: TestDequeue) => void): this; + on(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + on(event: "test:enqueue", listener: (data: TestEnqueue) => void): this; + on(event: "test:fail", listener: (data: TestFail) => void): this; + on(event: "test:pass", listener: (data: TestPass) => void): this; + on(event: "test:plan", listener: (data: TestPlan) => void): this; + on(event: "test:start", listener: (data: TestStart) => void): this; + on(event: "test:stderr", listener: (data: TestStderr) => void): this; + on(event: "test:stdout", listener: (data: TestStdout) => void): this; + on(event: "test:summary", listener: (data: TestSummary) => void): this; + on(event: "test:watch:drained", listener: () => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "test:coverage", listener: (data: TestCoverage) => void): this; + once(event: "test:complete", listener: (data: TestComplete) => void): this; + once(event: "test:dequeue", listener: (data: TestDequeue) => void): this; + once(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + once(event: "test:enqueue", listener: (data: TestEnqueue) => void): this; + once(event: "test:fail", listener: (data: TestFail) => void): this; + once(event: "test:pass", listener: (data: TestPass) => void): this; + once(event: "test:plan", listener: (data: TestPlan) => void): this; + once(event: "test:start", listener: (data: TestStart) => void): this; + once(event: "test:stderr", listener: (data: TestStderr) => void): this; + once(event: "test:stdout", listener: (data: TestStdout) => void): this; + once(event: "test:summary", listener: (data: TestSummary) => void): this; + once(event: "test:watch:drained", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "test:coverage", listener: (data: TestCoverage) => void): this; + prependListener(event: "test:complete", listener: (data: TestComplete) => void): this; + prependListener(event: "test:dequeue", listener: (data: TestDequeue) => void): this; + prependListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + prependListener(event: "test:enqueue", listener: (data: TestEnqueue) => void): this; + prependListener(event: "test:fail", listener: (data: TestFail) => void): this; + prependListener(event: "test:pass", listener: (data: TestPass) => void): this; + prependListener(event: "test:plan", listener: (data: TestPlan) => void): this; + prependListener(event: "test:start", listener: (data: TestStart) => void): this; + prependListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + prependListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + prependListener(event: "test:summary", listener: (data: TestSummary) => void): this; + prependListener(event: "test:watch:drained", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "test:coverage", listener: (data: TestCoverage) => void): this; + prependOnceListener(event: "test:complete", listener: (data: TestComplete) => void): this; + prependOnceListener(event: "test:dequeue", listener: (data: TestDequeue) => void): this; + prependOnceListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + prependOnceListener(event: "test:enqueue", listener: (data: TestEnqueue) => void): this; + prependOnceListener(event: "test:fail", listener: (data: TestFail) => void): this; + prependOnceListener(event: "test:pass", listener: (data: TestPass) => void): this; + prependOnceListener(event: "test:plan", listener: (data: TestPlan) => void): this; + prependOnceListener(event: "test:start", listener: (data: TestStart) => void): this; + prependOnceListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + prependOnceListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + prependOnceListener(event: "test:summary", listener: (data: TestSummary) => void): this; + prependOnceListener(event: "test:watch:drained", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + } + /** + * An instance of `TestContext` is passed to each test function in order to + * interact with the test runner. However, the `TestContext` constructor is not + * exposed as part of the API. + * @since v18.0.0, v16.17.0 + */ + class TestContext { + /** + * An object containing assertion methods bound to the test context. + * The top-level functions from the `node:assert` module are exposed here for the purpose of creating test plans. + * @since v22.2.0, v20.15.0 + */ + readonly assert: TestContextAssert; + /** + * This function is used to create a hook running before subtest of the current test. + * @param fn The hook function. The first argument to this function is a `TestContext` object. + * If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. + * @since v20.1.0, v18.17.0 + */ + before(fn?: TestContextHookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running before each subtest of the current test. + * @param fn The hook function. The first argument to this function is a `TestContext` object. + * If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. + * @since v18.8.0 + */ + beforeEach(fn?: TestContextHookFn, options?: HookOptions): void; + /** + * This function is used to create a hook that runs after the current test finishes. + * @param fn The hook function. The first argument to this function is a `TestContext` object. + * If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. + * @since v18.13.0 + */ + after(fn?: TestContextHookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running after each subtest of the current test. + * @param fn The hook function. The first argument to this function is a `TestContext` object. + * If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. + * @since v18.8.0 + */ + afterEach(fn?: TestContextHookFn, options?: HookOptions): void; + /** + * This function is used to write diagnostics to the output. Any diagnostic + * information is included at the end of the test's results. This function does + * not return a value. + * + * ```js + * test('top level test', (t) => { + * t.diagnostic('A diagnostic message'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Message to be reported. + */ + diagnostic(message: string): void; + /** + * The absolute path of the test file that created the current test. If a test file imports + * additional modules that generate tests, the imported tests will return the path of the root test file. + * @since v22.6.0 + */ + readonly filePath: string | undefined; + /** + * The name of the test and each of its ancestors, separated by `>`. + * @since v22.3.0 + */ + readonly fullName: string; + /** + * The name of the test. + * @since v18.8.0, v16.18.0 + */ + readonly name: string; + /** + * Used to set the number of assertions and subtests that are expected to run within the test. + * If the number of assertions and subtests that run does not match the expected count, the test will fail. + * + * To make sure assertions are tracked, the assert functions on `context.assert` must be used, + * instead of importing from the `node:assert` module. + * ```js + * test('top level test', (t) => { + * t.plan(2); + * t.assert.ok('some relevant assertion here'); + * t.test('subtest', () => {}); + * }); + * ``` + * + * When working with asynchronous code, the `plan` function can be used to ensure that the correct number of assertions are run: + * ```js + * test('planning with streams', (t, done) => { + * function* generate() { + * yield 'a'; + * yield 'b'; + * yield 'c'; + * } + * const expected = ['a', 'b', 'c']; + * t.plan(expected.length); + * const stream = Readable.from(generate()); + * stream.on('data', (chunk) => { + * t.assert.strictEqual(chunk, expected.shift()); + * }); + * stream.on('end', () => { + * done(); + * }); + * }); + * ``` + * @since v22.2.0 + */ + plan(count: number): void; + /** + * If `shouldRunOnlyTests` is truthy, the test context will only run tests that + * have the `only` option set. Otherwise, all tests are run. If Node.js was not + * started with the `--test-only` command-line option, this function is a + * no-op. + * + * ```js + * test('top level test', (t) => { + * // The test context can be set to run subtests with the 'only' option. + * t.runOnly(true); + * return Promise.all([ + * t.test('this subtest is now skipped'), + * t.test('this subtest is run', { only: true }), + * ]); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param shouldRunOnlyTests Whether or not to run `only` tests. + */ + runOnly(shouldRunOnlyTests: boolean): void; + /** + * ```js + * test('top level test', async (t) => { + * await fetch('some/uri', { signal: t.signal }); + * }); + * ``` + * @since v18.7.0, v16.17.0 + */ + readonly signal: AbortSignal; + /** + * This function causes the test's output to indicate the test as skipped. If `message` is provided, it is included in the output. Calling `skip()` does + * not terminate execution of the test function. This function does not return a + * value. + * + * ```js + * test('top level test', (t) => { + * // Make sure to return here as well if the test contains additional logic. + * t.skip('this is skipped'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Optional skip message. + */ + skip(message?: string): void; + /** + * This function adds a `TODO` directive to the test's output. If `message` is + * provided, it is included in the output. Calling `todo()` does not terminate + * execution of the test function. This function does not return a value. + * + * ```js + * test('top level test', (t) => { + * // This test is marked as `TODO` + * t.todo('this is a todo'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Optional `TODO` message. + */ + todo(message?: string): void; + /** + * This function is used to create subtests under the current test. This function behaves in + * the same fashion as the top level {@link test} function. + * @since v18.0.0 + * @param name The name of the test, which is displayed when reporting test results. + * Defaults to the `name` property of `fn`, or `''` if `fn` does not have a name. + * @param options Configuration options for the test. + * @param fn The function under test. This first argument to this function is a {@link TestContext} object. + * If the test uses callbacks, the callback function is passed as the second argument. + * @returns A {@link Promise} resolved with `undefined` once the test completes. + */ + test: typeof test; + /** + * Each test provides its own MockTracker instance. + */ + readonly mock: MockTracker; + } + interface TestContextAssert { + /** + * Identical to the `deepEqual` function from the `node:assert` module, but bound to the test context. + */ + deepEqual: typeof import("node:assert").deepEqual; + /** + * Identical to the `deepStrictEqual` function from the `node:assert` module, but bound to the test context. + * + * **Note:** as this method returns a type assertion, the context parameter in the callback signature must have a + * type annotation, otherwise an error will be raised by the TypeScript compiler: + * ```ts + * import { test, type TestContext } from 'node:test'; + * + * // The test function's context parameter must have a type annotation. + * test('example', (t: TestContext) => { + * t.assert.deepStrictEqual(actual, expected); + * }); + * + * // Omitting the type annotation will result in a compilation error. + * test('example', t => { + * t.assert.deepStrictEqual(actual, expected); // Error: 't' needs an explicit type annotation. + * }); + * ``` + */ + deepStrictEqual: typeof import("node:assert").deepStrictEqual; + /** + * Identical to the `doesNotMatch` function from the `node:assert` module, but bound to the test context. + */ + doesNotMatch: typeof import("node:assert").doesNotMatch; + /** + * Identical to the `doesNotReject` function from the `node:assert` module, but bound to the test context. + */ + doesNotReject: typeof import("node:assert").doesNotReject; + /** + * Identical to the `doesNotThrow` function from the `node:assert` module, but bound to the test context. + */ + doesNotThrow: typeof import("node:assert").doesNotThrow; + /** + * Identical to the `equal` function from the `node:assert` module, but bound to the test context. + */ + equal: typeof import("node:assert").equal; + /** + * Identical to the `fail` function from the `node:assert` module, but bound to the test context. + */ + fail: typeof import("node:assert").fail; + /** + * Identical to the `ifError` function from the `node:assert` module, but bound to the test context. + * + * **Note:** as this method returns a type assertion, the context parameter in the callback signature must have a + * type annotation, otherwise an error will be raised by the TypeScript compiler: + * ```ts + * import { test, type TestContext } from 'node:test'; + * + * // The test function's context parameter must have a type annotation. + * test('example', (t: TestContext) => { + * t.assert.ifError(err); + * }); + * + * // Omitting the type annotation will result in a compilation error. + * test('example', t => { + * t.assert.ifError(err); // Error: 't' needs an explicit type annotation. + * }); + * ``` + */ + ifError: typeof import("node:assert").ifError; + /** + * Identical to the `match` function from the `node:assert` module, but bound to the test context. + */ + match: typeof import("node:assert").match; + /** + * Identical to the `notDeepEqual` function from the `node:assert` module, but bound to the test context. + */ + notDeepEqual: typeof import("node:assert").notDeepEqual; + /** + * Identical to the `notDeepStrictEqual` function from the `node:assert` module, but bound to the test context. + */ + notDeepStrictEqual: typeof import("node:assert").notDeepStrictEqual; + /** + * Identical to the `notEqual` function from the `node:assert` module, but bound to the test context. + */ + notEqual: typeof import("node:assert").notEqual; + /** + * Identical to the `notStrictEqual` function from the `node:assert` module, but bound to the test context. + */ + notStrictEqual: typeof import("node:assert").notStrictEqual; + /** + * Identical to the `ok` function from the `node:assert` module, but bound to the test context. + * + * **Note:** as this method returns a type assertion, the context parameter in the callback signature must have a + * type annotation, otherwise an error will be raised by the TypeScript compiler: + * ```ts + * import { test, type TestContext } from 'node:test'; + * + * // The test function's context parameter must have a type annotation. + * test('example', (t: TestContext) => { + * t.assert.ok(condition); + * }); + * + * // Omitting the type annotation will result in a compilation error. + * test('example', t => { + * t.assert.ok(condition)); // Error: 't' needs an explicit type annotation. + * }); + * ``` + */ + ok: typeof import("node:assert").ok; + /** + * Identical to the `rejects` function from the `node:assert` module, but bound to the test context. + */ + rejects: typeof import("node:assert").rejects; + /** + * Identical to the `strictEqual` function from the `node:assert` module, but bound to the test context. + * + * **Note:** as this method returns a type assertion, the context parameter in the callback signature must have a + * type annotation, otherwise an error will be raised by the TypeScript compiler: + * ```ts + * import { test, type TestContext } from 'node:test'; + * + * // The test function's context parameter must have a type annotation. + * test('example', (t: TestContext) => { + * t.assert.strictEqual(actual, expected); + * }); + * + * // Omitting the type annotation will result in a compilation error. + * test('example', t => { + * t.assert.strictEqual(actual, expected); // Error: 't' needs an explicit type annotation. + * }); + * ``` + */ + strictEqual: typeof import("node:assert").strictEqual; + /** + * Identical to the `throws` function from the `node:assert` module, but bound to the test context. + */ + throws: typeof import("node:assert").throws; + /** + * This function implements assertions for snapshot testing. + * ```js + * test('snapshot test with default serialization', (t) => { + * t.assert.snapshot({ value1: 1, value2: 2 }); + * }); + * + * test('snapshot test with custom serialization', (t) => { + * t.assert.snapshot({ value3: 3, value4: 4 }, { + * serializers: [(value) => JSON.stringify(value)] + * }); + * }); + * ``` + * + * Only available through the [--experimental-test-snapshots](https://nodejs.org/api/cli.html#--experimental-test-snapshots) flag. + * @since v22.3.0 + * @experimental + */ + snapshot(value: any, options?: AssertSnapshotOptions): void; + } + interface AssertSnapshotOptions { + /** + * An array of synchronous functions used to serialize `value` into a string. + * `value` is passed as the only argument to the first serializer function. + * The return value of each serializer is passed as input to the next serializer. + * Once all serializers have run, the resulting value is coerced to a string. + * + * If no serializers are provided, the test runner's default serializers are used. + */ + serializers?: ReadonlyArray<(value: any) => any> | undefined; + } + + /** + * An instance of `SuiteContext` is passed to each suite function in order to + * interact with the test runner. However, the `SuiteContext` constructor is not + * exposed as part of the API. + * @since v18.7.0, v16.17.0 + */ + class SuiteContext { + /** + * The absolute path of the test file that created the current suite. If a test file imports + * additional modules that generate suites, the imported suites will return the path of the root test file. + * @since v22.6.0 + */ + readonly filePath: string | undefined; + /** + * The name of the suite. + * @since v18.8.0, v16.18.0 + */ + readonly name: string; + /** + * Can be used to abort test subtasks when the test has been aborted. + * @since v18.7.0, v16.17.0 + */ + readonly signal: AbortSignal; + } + interface TestOptions { + /** + * If a number is provided, then that many tests would run in parallel. + * If truthy, it would run (number of cpu cores - 1) tests in parallel. + * For subtests, it will be `Infinity` tests in parallel. + * If falsy, it would only run one test at a time. + * If unspecified, subtests inherit this value from their parent. + * @default false + */ + concurrency?: number | boolean | undefined; + /** + * If truthy, and the test context is configured to run `only` tests, then this test will be + * run. Otherwise, the test is skipped. + * @default false + */ + only?: boolean | undefined; + /** + * Allows aborting an in-progress test. + * @since v18.8.0 + */ + signal?: AbortSignal | undefined; + /** + * If truthy, the test is skipped. If a string is provided, that string is displayed in the + * test results as the reason for skipping the test. + * @default false + */ + skip?: boolean | string | undefined; + /** + * A number of milliseconds the test will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + * @since v18.7.0 + */ + timeout?: number | undefined; + /** + * If truthy, the test marked as `TODO`. If a string is provided, that string is displayed in + * the test results as the reason why the test is `TODO`. + * @default false + */ + todo?: boolean | string | undefined; + /** + * The number of assertions and subtests expected to be run in the test. + * If the number of assertions run in the test does not match the number + * specified in the plan, the test will fail. + * @default undefined + * @since v22.2.0 + */ + plan?: number | undefined; + } + /** + * This function creates a hook that runs before executing a suite. + * + * ```js + * describe('tests', async () => { + * before(() => console.log('about to run some test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. + */ + function before(fn?: HookFn, options?: HookOptions): void; + /** + * This function creates a hook that runs after executing a suite. + * + * ```js + * describe('tests', async () => { + * after(() => console.log('finished running tests')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. + */ + function after(fn?: HookFn, options?: HookOptions): void; + /** + * This function creates a hook that runs before each test in the current suite. + * + * ```js + * describe('tests', async () => { + * beforeEach(() => console.log('about to run a test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. + */ + function beforeEach(fn?: HookFn, options?: HookOptions): void; + /** + * This function creates a hook that runs after each test in the current suite. + * The `afterEach()` hook is run even if the test fails. + * + * ```js + * describe('tests', async () => { + * afterEach(() => console.log('finished running a test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. + */ + function afterEach(fn?: HookFn, options?: HookOptions): void; + /** + * The hook function. The first argument is the context in which the hook is called. + * If the hook uses callbacks, the callback function is passed as the second argument. + */ + type HookFn = (c: TestContext | SuiteContext, done: (result?: any) => void) => any; + /** + * The hook function. The first argument is a `TestContext` object. + * If the hook uses callbacks, the callback function is passed as the second argument. + */ + type TestContextHookFn = (t: TestContext, done: (result?: any) => void) => any; + /** + * Configuration options for hooks. + * @since v18.8.0 + */ + interface HookOptions { + /** + * Allows aborting an in-progress hook. + */ + signal?: AbortSignal | undefined; + /** + * A number of milliseconds the hook will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + */ + timeout?: number | undefined; + } + interface MockFunctionOptions { + /** + * The number of times that the mock will use the behavior of `implementation`. + * Once the mock function has been called `times` times, + * it will automatically restore the behavior of `original`. + * This value must be an integer greater than zero. + * @default Infinity + */ + times?: number | undefined; + } + interface MockMethodOptions extends MockFunctionOptions { + /** + * If `true`, `object[methodName]` is treated as a getter. + * This option cannot be used with the `setter` option. + */ + getter?: boolean | undefined; + /** + * If `true`, `object[methodName]` is treated as a setter. + * This option cannot be used with the `getter` option. + */ + setter?: boolean | undefined; + } + type Mock = F & { + mock: MockFunctionContext; + }; + type NoOpFunction = (...args: any[]) => undefined; + type FunctionPropertyNames = { + [K in keyof T]: T[K] extends Function ? K : never; + }[keyof T]; + interface MockModuleOptions { + /** + * If false, each call to `require()` or `import()` generates a new mock module. + * If true, subsequent calls will return the same module mock, and the mock module is inserted into the CommonJS cache. + * @default false + */ + cache?: boolean | undefined; + /** + * The value to use as the mocked module's default export. + * + * If this value is not provided, ESM mocks do not include a default export. + * If the mock is a CommonJS or builtin module, this setting is used as the value of `module.exports`. + * If this value is not provided, CJS and builtin mocks use an empty object as the value of `module.exports`. + */ + defaultExport?: any; + /** + * An object whose keys and values are used to create the named exports of the mock module. + * + * If the mock is a CommonJS or builtin module, these values are copied onto `module.exports`. + * Therefore, if a mock is created with both named exports and a non-object default export, + * the mock will throw an exception when used as a CJS or builtin module. + */ + namedExports?: object | undefined; + } + /** + * The `MockTracker` class is used to manage mocking functionality. The test runner + * module provides a top level `mock` export which is a `MockTracker` instance. + * Each test also provides its own `MockTracker` instance via the test context's `mock` property. + * @since v19.1.0, v18.13.0 + */ + class MockTracker { + /** + * This function is used to create a mock function. + * + * The following example creates a mock function that increments a counter by one + * on each invocation. The `times` option is used to modify the mock behavior such + * that the first two invocations add two to the counter instead of one. + * + * ```js + * test('mocks a counting function', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne, addTwo, { times: 2 }); + * + * assert.strictEqual(fn(), 2); + * assert.strictEqual(fn(), 4); + * assert.strictEqual(fn(), 5); + * assert.strictEqual(fn(), 6); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param original An optional function to create a mock on. + * @param implementation An optional function used as the mock implementation for `original`. This is useful for creating mocks that exhibit one behavior for a specified number of calls and + * then restore the behavior of `original`. + * @param options Optional configuration options for the mock function. + * @return The mocked function. The mocked function contains a special `mock` property, which is an instance of {@link MockFunctionContext}, and can be used for inspecting and changing the + * behavior of the mocked function. + */ + fn(original?: F, options?: MockFunctionOptions): Mock; + fn( + original?: F, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock; + /** + * This function is used to create a mock on an existing object method. The + * following example demonstrates how a mock is created on an existing object + * method. + * + * ```js + * test('spies on an object method', (t) => { + * const number = { + * value: 5, + * subtract(a) { + * return this.value - a; + * }, + * }; + * + * t.mock.method(number, 'subtract'); + * assert.strictEqual(number.subtract.mock.calls.length, 0); + * assert.strictEqual(number.subtract(3), 2); + * assert.strictEqual(number.subtract.mock.calls.length, 1); + * + * const call = number.subtract.mock.calls[0]; + * + * assert.deepStrictEqual(call.arguments, [3]); + * assert.strictEqual(call.result, 2); + * assert.strictEqual(call.error, undefined); + * assert.strictEqual(call.target, undefined); + * assert.strictEqual(call.this, number); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param object The object whose method is being mocked. + * @param methodName The identifier of the method on `object` to mock. If `object[methodName]` is not a function, an error is thrown. + * @param implementation An optional function used as the mock implementation for `object[methodName]`. + * @param options Optional configuration options for the mock method. + * @return The mocked method. The mocked method contains a special `mock` property, which is an instance of {@link MockFunctionContext}, and can be used for inspecting and changing the + * behavior of the mocked method. + */ + method< + MockedObject extends object, + MethodName extends FunctionPropertyNames, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): MockedObject[MethodName] extends Function ? Mock + : never; + method< + MockedObject extends object, + MethodName extends FunctionPropertyNames, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation: Implementation, + options?: MockFunctionOptions, + ): MockedObject[MethodName] extends Function ? Mock + : never; + method( + object: MockedObject, + methodName: keyof MockedObject, + options: MockMethodOptions, + ): Mock; + method( + object: MockedObject, + methodName: keyof MockedObject, + implementation: Function, + options: MockMethodOptions, + ): Mock; + + /** + * This function is syntax sugar for `MockTracker.method` with `options.getter` set to `true`. + * @since v19.3.0, v18.13.0 + */ + getter< + MockedObject extends object, + MethodName extends keyof MockedObject, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): Mock<() => MockedObject[MethodName]>; + getter< + MockedObject extends object, + MethodName extends keyof MockedObject, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock<(() => MockedObject[MethodName]) | Implementation>; + /** + * This function is syntax sugar for `MockTracker.method` with `options.setter` set to `true`. + * @since v19.3.0, v18.13.0 + */ + setter< + MockedObject extends object, + MethodName extends keyof MockedObject, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): Mock<(value: MockedObject[MethodName]) => void>; + setter< + MockedObject extends object, + MethodName extends keyof MockedObject, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock<((value: MockedObject[MethodName]) => void) | Implementation>; + + /** + * This function is used to mock the exports of ECMAScript modules, CommonJS modules, and Node.js builtin modules. + * Any references to the original module prior to mocking are not impacted. + * + * Only available through the [--experimental-test-module-mocks](https://nodejs.org/api/cli.html#--experimental-test-module-mocks) flag. + * @since v22.3.0 + * @experimental + * @param specifier A string identifying the module to mock. + * @param options Optional configuration options for the mock module. + */ + module(specifier: string, options?: MockModuleOptions): MockModuleContext; + + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTracker` and disassociates the mocks from the `MockTracker` instance. Once disassociated, the mocks can still be used, but the `MockTracker` instance can no longer be + * used to reset their behavior or + * otherwise interact with them. + * + * After each test completes, this function is called on the test context's `MockTracker`. If the global `MockTracker` is used extensively, calling this + * function manually is recommended. + * @since v19.1.0, v18.13.0 + */ + reset(): void; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTracker`. Unlike `mock.reset()`, `mock.restoreAll()` does + * not disassociate the mocks from the `MockTracker` instance. + * @since v19.1.0, v18.13.0 + */ + restoreAll(): void; + + timers: MockTimers; + } + const mock: MockTracker; + interface MockFunctionCall< + F extends Function, + ReturnType = F extends (...args: any) => infer T ? T + : F extends abstract new(...args: any) => infer T ? T + : unknown, + Args = F extends (...args: infer Y) => any ? Y + : F extends abstract new(...args: infer Y) => any ? Y + : unknown[], + > { + /** + * An array of the arguments passed to the mock function. + */ + arguments: Args; + /** + * If the mocked function threw then this property contains the thrown value. + */ + error: unknown | undefined; + /** + * The value returned by the mocked function. + * + * If the mocked function threw, it will be `undefined`. + */ + result: ReturnType | undefined; + /** + * An `Error` object whose stack can be used to determine the callsite of the mocked function invocation. + */ + stack: Error; + /** + * If the mocked function is a constructor, this field contains the class being constructed. + * Otherwise this will be `undefined`. + */ + target: F extends abstract new(...args: any) => any ? F : undefined; + /** + * The mocked function's `this` value. + */ + this: unknown; + } + /** + * The `MockFunctionContext` class is used to inspect or manipulate the behavior of + * mocks created via the `MockTracker` APIs. + * @since v19.1.0, v18.13.0 + */ + class MockFunctionContext { + /** + * A getter that returns a copy of the internal array used to track calls to the + * mock. Each entry in the array is an object with the following properties. + * @since v19.1.0, v18.13.0 + */ + readonly calls: Array>; + /** + * This function returns the number of times that this mock has been invoked. This + * function is more efficient than checking `ctx.calls.length` because `ctx.calls` is a getter that creates a copy of the internal call tracking array. + * @since v19.1.0, v18.13.0 + * @return The number of times that this mock has been invoked. + */ + callCount(): number; + /** + * This function is used to change the behavior of an existing mock. + * + * The following example creates a mock function using `t.mock.fn()`, calls the + * mock function, and then changes the mock implementation to a different function. + * + * ```js + * test('changes a mock behavior', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne); + * + * assert.strictEqual(fn(), 1); + * fn.mock.mockImplementation(addTwo); + * assert.strictEqual(fn(), 3); + * assert.strictEqual(fn(), 5); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param implementation The function to be used as the mock's new implementation. + */ + mockImplementation(implementation: F): void; + /** + * This function is used to change the behavior of an existing mock for a single + * invocation. Once invocation `onCall` has occurred, the mock will revert to + * whatever behavior it would have used had `mockImplementationOnce()` not been + * called. + * + * The following example creates a mock function using `t.mock.fn()`, calls the + * mock function, changes the mock implementation to a different function for the + * next invocation, and then resumes its previous behavior. + * + * ```js + * test('changes a mock behavior once', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne); + * + * assert.strictEqual(fn(), 1); + * fn.mock.mockImplementationOnce(addTwo); + * assert.strictEqual(fn(), 3); + * assert.strictEqual(fn(), 4); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param implementation The function to be used as the mock's implementation for the invocation number specified by `onCall`. + * @param onCall The invocation number that will use `implementation`. If the specified invocation has already occurred then an exception is thrown. + */ + mockImplementationOnce(implementation: F, onCall?: number): void; + /** + * Resets the call history of the mock function. + * @since v19.3.0, v18.13.0 + */ + resetCalls(): void; + /** + * Resets the implementation of the mock function to its original behavior. The + * mock can still be used after calling this function. + * @since v19.1.0, v18.13.0 + */ + restore(): void; + } + /** + * @since v22.3.0 + * @experimental + */ + class MockModuleContext { + /** + * Resets the implementation of the mock module. + * @since v22.3.0 + */ + restore(): void; + } + + type Timer = "setInterval" | "setTimeout" | "setImmediate" | "Date"; + interface MockTimersOptions { + apis: Timer[]; + now?: number | Date | undefined; + } + /** + * Mocking timers is a technique commonly used in software testing to simulate and + * control the behavior of timers, such as `setInterval` and `setTimeout`, + * without actually waiting for the specified time intervals. + * + * The MockTimers API also allows for mocking of the `Date` constructor and + * `setImmediate`/`clearImmediate` functions. + * + * The `MockTracker` provides a top-level `timers` export + * which is a `MockTimers` instance. + * @since v20.4.0 + * @experimental + */ + class MockTimers { + /** + * Enables timer mocking for the specified timers. + * + * **Note:** When you enable mocking for a specific timer, its associated + * clear function will also be implicitly mocked. + * + * **Note:** Mocking `Date` will affect the behavior of the mocked timers + * as they use the same internal clock. + * + * Example usage without setting initial time: + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['setInterval', 'Date'], now: 1234 }); + * ``` + * + * The above example enables mocking for the `Date` constructor, `setInterval` timer and + * implicitly mocks the `clearInterval` function. Only the `Date` constructor from `globalThis`, + * `setInterval` and `clearInterval` functions from `node:timers`, `node:timers/promises`, and `globalThis` will be mocked. + * + * Example usage with initial time set + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['Date'], now: 1000 }); + * ``` + * + * Example usage with initial Date object as time set + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['Date'], now: new Date() }); + * ``` + * + * Alternatively, if you call `mock.timers.enable()` without any parameters: + * + * All timers (`'setInterval'`, `'clearInterval'`, `'Date'`, `'setImmediate'`, `'clearImmediate'`, `'setTimeout'`, and `'clearTimeout'`) + * will be mocked. + * + * The `setInterval`, `clearInterval`, `setTimeout`, and `clearTimeout` functions from `node:timers`, `node:timers/promises`, + * and `globalThis` will be mocked. + * The `Date` constructor from `globalThis` will be mocked. + * + * If there is no initial epoch set, the initial date will be based on 0 in the Unix epoch. This is `January 1st, 1970, 00:00:00 UTC`. You can + * set an initial date by passing a now property to the `.enable()` method. This value will be used as the initial date for the mocked Date + * object. It can either be a positive integer, or another Date object. + * @since v20.4.0 + */ + enable(options?: MockTimersOptions): void; + /** + * You can use the `.setTime()` method to manually move the mocked date to another time. This method only accepts a positive integer. + * Note: This method will execute any mocked timers that are in the past from the new time. + * In the below example we are setting a new time for the mocked date. + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * test('sets the time of a date object', (context) => { + * // Optionally choose what to mock + * context.mock.timers.enable({ apis: ['Date'], now: 100 }); + * assert.strictEqual(Date.now(), 100); + * // Advance in time will also advance the date + * context.mock.timers.setTime(1000); + * context.mock.timers.tick(200); + * assert.strictEqual(Date.now(), 1200); + * }); + * ``` + */ + setTime(time: number): void; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTimers` instance and disassociates the mocks + * from the `MockTracker` instance. + * + * **Note:** After each test completes, this function is called on + * the test context's `MockTracker`. + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.reset(); + * ``` + * @since v20.4.0 + */ + reset(): void; + /** + * Advances time for all mocked timers. + * + * **Note:** This diverges from how `setTimeout` in Node.js behaves and accepts + * only positive numbers. In Node.js, `setTimeout` with negative numbers is + * only supported for web compatibility reasons. + * + * The following example mocks a `setTimeout` function and + * by using `.tick` advances in + * time triggering all pending timers. + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * + * context.mock.timers.enable({ apis: ['setTimeout'] }); + * + * setTimeout(fn, 9999); + * + * assert.strictEqual(fn.mock.callCount(), 0); + * + * // Advance in time + * context.mock.timers.tick(9999); + * + * assert.strictEqual(fn.mock.callCount(), 1); + * }); + * ``` + * + * Alternativelly, the `.tick` function can be called many times + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * context.mock.timers.enable({ apis: ['setTimeout'] }); + * const nineSecs = 9000; + * setTimeout(fn, nineSecs); + * + * const twoSeconds = 3000; + * context.mock.timers.tick(twoSeconds); + * context.mock.timers.tick(twoSeconds); + * context.mock.timers.tick(twoSeconds); + * + * assert.strictEqual(fn.mock.callCount(), 1); + * }); + * ``` + * + * Advancing time using `.tick` will also advance the time for any `Date` object + * created after the mock was enabled (if `Date` was also set to be mocked). + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * + * context.mock.timers.enable({ apis: ['setTimeout', 'Date'] }); + * setTimeout(fn, 9999); + * + * assert.strictEqual(fn.mock.callCount(), 0); + * assert.strictEqual(Date.now(), 0); + * + * // Advance in time + * context.mock.timers.tick(9999); + * assert.strictEqual(fn.mock.callCount(), 1); + * assert.strictEqual(Date.now(), 9999); + * }); + * ``` + * @since v20.4.0 + */ + tick(milliseconds: number): void; + /** + * Triggers all pending mocked timers immediately. If the `Date` object is also + * mocked, it will also advance the `Date` object to the furthest timer's time. + * + * The example below triggers all pending timers immediately, + * causing them to execute without any delay. + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('runAll functions following the given order', (context) => { + * context.mock.timers.enable({ apis: ['setTimeout', 'Date'] }); + * const results = []; + * setTimeout(() => results.push(1), 9999); + * + * // Notice that if both timers have the same timeout, + * // the order of execution is guaranteed + * setTimeout(() => results.push(3), 8888); + * setTimeout(() => results.push(2), 8888); + * + * assert.deepStrictEqual(results, []); + * + * context.mock.timers.runAll(); + * assert.deepStrictEqual(results, [3, 2, 1]); + * // The Date object is also advanced to the furthest timer's time + * assert.strictEqual(Date.now(), 9999); + * }); + * ``` + * + * **Note:** The `runAll()` function is specifically designed for + * triggering timers in the context of timer mocking. + * It does not have any effect on real-time system + * clocks or actual timers outside of the mocking environment. + * @since v20.4.0 + */ + runAll(): void; + /** + * Calls {@link MockTimers.reset()}. + */ + [Symbol.dispose](): void; + } + /** + * Only available through the [--experimental-test-snapshots](https://nodejs.org/api/cli.html#--experimental-test-snapshots) flag. + * @since v22.3.0 + * @experimental + */ + namespace snapshot { + /** + * This function is used to customize the default serialization mechanism used by the test runner. + * + * By default, the test runner performs serialization by calling `JSON.stringify(value, null, 2)` on the provided value. + * `JSON.stringify()` does have limitations regarding circular structures and supported data types. + * If a more robust serialization mechanism is required, this function should be used to specify a list of custom serializers. + * + * Serializers are called in order, with the output of the previous serializer passed as input to the next. + * The final result must be a string value. + * @since v22.3.0 + * @param serializers An array of synchronous functions used as the default serializers for snapshot tests. + */ + function setDefaultSnapshotSerializers(serializers: ReadonlyArray<(value: any) => any>): void; + /** + * This function is used to set a custom resolver for the location of the snapshot file used for snapshot testing. + * By default, the snapshot filename is the same as the entry point filename with `.snapshot` appended. + * @since v22.3.0 + * @param fn A function used to compute the location of the snapshot file. + * The function receives the path of the test file as its only argument. If the + * test is not associated with a file (for example in the REPL), the input is + * undefined. `fn()` must return a string specifying the location of the snapshot file. + */ + function setResolveSnapshotPath(fn: (path: string | undefined) => string): void; + } + export { + after, + afterEach, + before, + beforeEach, + describe, + it, + Mock, + mock, + only, + run, + skip, + snapshot, + suite, + SuiteContext, + test, + test as default, + TestContext, + todo, + }; +} + +interface TestError extends Error { + cause: Error; +} +interface TestLocationInfo { + /** + * The column number where the test is defined, or + * `undefined` if the test was run through the REPL. + */ + column?: number; + /** + * The path of the test file, `undefined` if test was run through the REPL. + */ + file?: string; + /** + * The line number where the test is defined, or `undefined` if the test was run through the REPL. + */ + line?: number; +} +interface DiagnosticData extends TestLocationInfo { + /** + * The diagnostic message. + */ + message: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestCoverage { + /** + * An object containing the coverage report. + */ + summary: { + /** + * An array of coverage reports for individual files. + */ + files: Array<{ + /** + * The absolute path of the file. + */ + path: string; + /** + * The total number of lines. + */ + totalLineCount: number; + /** + * The total number of branches. + */ + totalBranchCount: number; + /** + * The total number of functions. + */ + totalFunctionCount: number; + /** + * The number of covered lines. + */ + coveredLineCount: number; + /** + * The number of covered branches. + */ + coveredBranchCount: number; + /** + * The number of covered functions. + */ + coveredFunctionCount: number; + /** + * The percentage of lines covered. + */ + coveredLinePercent: number; + /** + * The percentage of branches covered. + */ + coveredBranchPercent: number; + /** + * The percentage of functions covered. + */ + coveredFunctionPercent: number; + /** + * An array of functions representing function coverage. + */ + functions: Array<{ + /** + * The name of the function. + */ + name: string; + /** + * The line number where the function is defined. + */ + line: number; + /** + * The number of times the function was called. + */ + count: number; + }>; + /** + * An array of branches representing branch coverage. + */ + branches: Array<{ + /** + * The line number where the branch is defined. + */ + line: number; + /** + * The number of times the branch was taken. + */ + count: number; + }>; + /** + * An array of lines representing line numbers and the number of times they were covered. + */ + lines: Array<{ + /** + * The line number. + */ + line: number; + /** + * The number of times the line was covered. + */ + count: number; + }>; + }>; + /** + * An object containing whether or not the coverage for + * each coverage type. + * @since v22.9.0 + */ + thresholds: { + /** + * The function coverage threshold. + */ + function: number; + /** + * The branch coverage threshold. + */ + branch: number; + /** + * The line coverage threshold. + */ + line: number; + }; + /** + * An object containing a summary of coverage for all files. + */ + totals: { + /** + * The total number of lines. + */ + totalLineCount: number; + /** + * The total number of branches. + */ + totalBranchCount: number; + /** + * The total number of functions. + */ + totalFunctionCount: number; + /** + * The number of covered lines. + */ + coveredLineCount: number; + /** + * The number of covered branches. + */ + coveredBranchCount: number; + /** + * The number of covered functions. + */ + coveredFunctionCount: number; + /** + * The percentage of lines covered. + */ + coveredLinePercent: number; + /** + * The percentage of branches covered. + */ + coveredBranchPercent: number; + /** + * The percentage of functions covered. + */ + coveredFunctionPercent: number; + }; + /** + * The working directory when code coverage began. This + * is useful for displaying relative path names in case + * the tests changed the working directory of the Node.js process. + */ + workingDirectory: string; + }; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestComplete extends TestLocationInfo { + /** + * Additional execution metadata. + */ + details: { + /** + * Whether the test passed or not. + */ + passed: boolean; + /** + * The duration of the test in milliseconds. + */ + duration_ms: number; + /** + * An error wrapping the error thrown by the test if it did not pass. + */ + error?: TestError; + /** + * The type of the test, used to denote whether this is a suite. + */ + type?: "suite"; + }; + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The ordinal number of the test. + */ + testNumber: number; + /** + * Present if `context.todo` is called. + */ + todo?: string | boolean; + /** + * Present if `context.skip` is called. + */ + skip?: string | boolean; +} +interface TestDequeue extends TestLocationInfo { + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestEnqueue extends TestLocationInfo { + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestFail extends TestLocationInfo { + /** + * Additional execution metadata. + */ + details: { + /** + * The duration of the test in milliseconds. + */ + duration_ms: number; + /** + * An error wrapping the error thrown by the test. + */ + error: TestError; + /** + * The type of the test, used to denote whether this is a suite. + * @since v20.0.0, v19.9.0, v18.17.0 + */ + type?: "suite"; + }; + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The ordinal number of the test. + */ + testNumber: number; + /** + * Present if `context.todo` is called. + */ + todo?: string | boolean; + /** + * Present if `context.skip` is called. + */ + skip?: string | boolean; +} +interface TestPass extends TestLocationInfo { + /** + * Additional execution metadata. + */ + details: { + /** + * The duration of the test in milliseconds. + */ + duration_ms: number; + /** + * The type of the test, used to denote whether this is a suite. + * @since 20.0.0, 19.9.0, 18.17.0 + */ + type?: "suite"; + }; + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The ordinal number of the test. + */ + testNumber: number; + /** + * Present if `context.todo` is called. + */ + todo?: string | boolean; + /** + * Present if `context.skip` is called. + */ + skip?: string | boolean; +} +interface TestPlan extends TestLocationInfo { + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The number of subtests that have ran. + */ + count: number; +} +interface TestStart extends TestLocationInfo { + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestStderr { + /** + * The path of the test file. + */ + file: string; + /** + * The message written to `stderr`. + */ + message: string; +} +interface TestStdout { + /** + * The path of the test file. + */ + file: string; + /** + * The message written to `stdout`. + */ + message: string; +} +interface TestSummary { + /** + * An object containing the counts of various test results. + */ + counts: { + /** + * The total number of cancelled tests. + */ + cancelled: number; + /** + * The total number of passed tests. + */ + passed: number; + /** + * The total number of skipped tests. + */ + skipped: number; + /** + * The total number of suites run. + */ + suites: number; + /** + * The total number of tests run, excluding suites. + */ + tests: number; + /** + * The total number of TODO tests. + */ + todo: number; + /** + * The total number of top level tests and suites. + */ + topLevel: number; + }; + /** + * The duration of the test run in milliseconds. + */ + duration_ms: number; + /** + * The path of the test file that generated the + * summary. If the summary corresponds to multiple files, this value is + * `undefined`. + */ + file: string | undefined; + /** + * Indicates whether or not the test run is considered + * successful or not. If any error condition occurs, such as a failing test or + * unmet coverage threshold, this value will be set to `false`. + */ + success: boolean; +} + +/** + * The `node:test/reporters` module exposes the builtin-reporters for `node:test`. + * To access it: + * + * ```js + * import test from 'node:test/reporters'; + * ``` + * + * This module is only available under the `node:` scheme. The following will not + * work: + * + * ```js + * import test from 'node:test/reporters'; + * ``` + * @since v19.9.0 + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/test/reporters.js) + */ +declare module "node:test/reporters" { + import { Transform, TransformOptions } from "node:stream"; + + type TestEvent = + | { type: "test:coverage"; data: TestCoverage } + | { type: "test:complete"; data: TestComplete } + | { type: "test:dequeue"; data: TestDequeue } + | { type: "test:diagnostic"; data: DiagnosticData } + | { type: "test:enqueue"; data: TestEnqueue } + | { type: "test:fail"; data: TestFail } + | { type: "test:pass"; data: TestPass } + | { type: "test:plan"; data: TestPlan } + | { type: "test:start"; data: TestStart } + | { type: "test:stderr"; data: TestStderr } + | { type: "test:stdout"; data: TestStdout } + | { type: "test:summary"; data: TestSummary } + | { type: "test:watch:drained"; data: undefined }; + type TestEventGenerator = AsyncGenerator; + + interface ReporterConstructorWrapper Transform> { + new(...args: ConstructorParameters): InstanceType; + (...args: ConstructorParameters): InstanceType; + } + + /** + * The `dot` reporter outputs the test results in a compact format, + * where each passing test is represented by a `.`, + * and each failing test is represented by a `X`. + * @since v20.0.0 + */ + function dot(source: TestEventGenerator): AsyncGenerator<"\n" | "." | "X", void>; + /** + * The `tap` reporter outputs the test results in the [TAP](https://testanything.org/) format. + * @since v20.0.0 + */ + function tap(source: TestEventGenerator): AsyncGenerator; + class SpecReporter extends Transform { + constructor(); + } + /** + * The `spec` reporter outputs the test results in a human-readable format. + * @since v20.0.0 + */ + const spec: ReporterConstructorWrapper; + /** + * The `junit` reporter outputs test results in a jUnit XML format. + * @since v21.0.0 + */ + function junit(source: TestEventGenerator): AsyncGenerator; + class LcovReporter extends Transform { + constructor(opts?: Omit); + } + /** + * The `lcov` reporter outputs test coverage when used with the + * [`--experimental-test-coverage`](https://nodejs.org/docs/latest-v22.x/api/cli.html#--experimental-test-coverage) flag. + * @since v22.0.0 + */ + // TODO: change the export to a wrapper function once node@0db38f0 is merged (breaking change) + // const lcov: ReporterConstructorWrapper; + const lcov: LcovReporter; + + export { dot, junit, lcov, spec, tap, TestEvent }; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/timers.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/timers.d.ts new file mode 100644 index 0000000..a4ea3e5 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/timers.d.ts @@ -0,0 +1,240 @@ +/** + * The `timer` module exposes a global API for scheduling functions to + * be called at some future period of time. Because the timer functions are + * globals, there is no need to import `node:timers` to use the API. + * + * The timer functions within Node.js implement a similar API as the timers API + * provided by Web Browsers but use a different internal implementation that is + * built around the Node.js [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#setimmediate-vs-settimeout). + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/timers.js) + */ +declare module "timers" { + import { Abortable } from "node:events"; + import { + setImmediate as setImmediatePromise, + setInterval as setIntervalPromise, + setTimeout as setTimeoutPromise, + } from "node:timers/promises"; + interface TimerOptions extends Abortable { + /** + * Set to `false` to indicate that the scheduled `Timeout` + * should not require the Node.js event loop to remain active. + * @default true + */ + ref?: boolean | undefined; + } + let setTimeout: typeof global.setTimeout; + let clearTimeout: typeof global.clearTimeout; + let setInterval: typeof global.setInterval; + let clearInterval: typeof global.clearInterval; + let setImmediate: typeof global.setImmediate; + let clearImmediate: typeof global.clearImmediate; + global { + namespace NodeJS { + // compatibility with older typings + interface Timer extends RefCounted { + hasRef(): boolean; + refresh(): this; + [Symbol.toPrimitive](): number; + } + /** + * This object is created internally and is returned from `setImmediate()`. It + * can be passed to `clearImmediate()` in order to cancel the scheduled + * actions. + * + * By default, when an immediate is scheduled, the Node.js event loop will continue + * running as long as the immediate is active. The `Immediate` object returned by `setImmediate()` exports both `immediate.ref()` and `immediate.unref()` functions that can be used to + * control this default behavior. + */ + class Immediate implements RefCounted { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `Immediate` is active. Calling `immediate.ref()` multiple times will have no + * effect. + * + * By default, all `Immediate` objects are "ref'ed", making it normally unnecessary + * to call `immediate.ref()` unless `immediate.unref()` had been called previously. + * @since v9.7.0 + * @return a reference to `immediate` + */ + ref(): this; + /** + * When called, the active `Immediate` object will not require the Node.js event + * loop to remain active. If there is no other activity keeping the event loop + * running, the process may exit before the `Immediate` object's callback is + * invoked. Calling `immediate.unref()` multiple times will have no effect. + * @since v9.7.0 + * @return a reference to `immediate` + */ + unref(): this; + /** + * If true, the `Immediate` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + _onImmediate: Function; // to distinguish it from the Timeout class + /** + * Cancels the immediate. This is similar to calling `clearImmediate()`. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + } + /** + * This object is created internally and is returned from `setTimeout()` and `setInterval()`. It can be passed to either `clearTimeout()` or `clearInterval()` in order to cancel the + * scheduled actions. + * + * By default, when a timer is scheduled using either `setTimeout()` or `setInterval()`, the Node.js event loop will continue running as long as the + * timer is active. Each of the `Timeout` objects returned by these functions + * export both `timeout.ref()` and `timeout.unref()` functions that can be used to + * control this default behavior. + */ + class Timeout implements Timer { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the`Timeout` is active. Calling `timeout.ref()` multiple times will have no effect. + * + * By default, all `Timeout` objects are "ref'ed", making it normally unnecessary + * to call `timeout.ref()` unless `timeout.unref()` had been called previously. + * @since v0.9.1 + * @return a reference to `timeout` + */ + ref(): this; + /** + * When called, the active `Timeout` object will not require the Node.js event loop + * to remain active. If there is no other activity keeping the event loop running, + * the process may exit before the `Timeout` object's callback is invoked. Calling `timeout.unref()` multiple times will have no effect. + * @since v0.9.1 + * @return a reference to `timeout` + */ + unref(): this; + /** + * If true, the `Timeout` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + /** + * Sets the timer's start time to the current time, and reschedules the timer to + * call its callback at the previously specified duration adjusted to the current + * time. This is useful for refreshing a timer without allocating a new + * JavaScript object. + * + * Using this on a timer that has already called its callback will reactivate the + * timer. + * @since v10.2.0 + * @return a reference to `timeout` + */ + refresh(): this; + [Symbol.toPrimitive](): number; + /** + * Cancels the timeout. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + } + } + /** + * Schedules execution of a one-time `callback` after `delay` milliseconds. + * + * The `callback` will likely not be invoked in precisely `delay` milliseconds. + * Node.js makes no guarantees about the exact timing of when callbacks will fire, + * nor of their ordering. The callback will be called as close as possible to the + * time specified. + * + * When `delay` is larger than `2147483647` or less than `1`, the `delay` will be set to `1`. Non-integer delays are truncated to an integer. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setTimeout()`. + * @since v0.0.1 + * @param callback The function to call when the timer elapses. + * @param [delay=1] The number of milliseconds to wait before calling the `callback`. + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearTimeout} + */ + function setTimeout( + callback: (...args: TArgs) => void, + ms?: number, + ...args: TArgs + ): NodeJS.Timeout; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setTimeout(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setTimeout { + const __promisify__: typeof setTimeoutPromise; + } + /** + * Cancels a `Timeout` object created by `setTimeout()`. + * @since v0.0.1 + * @param timeout A `Timeout` object as returned by {@link setTimeout} or the `primitive` of the `Timeout` object as a string or a number. + */ + function clearTimeout(timeoutId: NodeJS.Timeout | string | number | undefined): void; + /** + * Schedules repeated execution of `callback` every `delay` milliseconds. + * + * When `delay` is larger than `2147483647` or less than `1`, the `delay` will be + * set to `1`. Non-integer delays are truncated to an integer. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setInterval()`. + * @since v0.0.1 + * @param callback The function to call when the timer elapses. + * @param [delay=1] The number of milliseconds to wait before calling the `callback`. + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearInterval} + */ + function setInterval( + callback: (...args: TArgs) => void, + ms?: number, + ...args: TArgs + ): NodeJS.Timeout; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setInterval(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setInterval { + const __promisify__: typeof setIntervalPromise; + } + /** + * Cancels a `Timeout` object created by `setInterval()`. + * @since v0.0.1 + * @param timeout A `Timeout` object as returned by {@link setInterval} or the `primitive` of the `Timeout` object as a string or a number. + */ + function clearInterval(intervalId: NodeJS.Timeout | string | number | undefined): void; + /** + * Schedules the "immediate" execution of the `callback` after I/O events' + * callbacks. + * + * When multiple calls to `setImmediate()` are made, the `callback` functions are + * queued for execution in the order in which they are created. The entire callback + * queue is processed every event loop iteration. If an immediate timer is queued + * from inside an executing callback, that timer will not be triggered until the + * next event loop iteration. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setImmediate()`. + * @since v0.9.1 + * @param callback The function to call at the end of this turn of the Node.js `Event Loop` + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearImmediate} + */ + function setImmediate( + callback: (...args: TArgs) => void, + ...args: TArgs + ): NodeJS.Immediate; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setImmediate(callback: (args: void) => void): NodeJS.Immediate; + namespace setImmediate { + const __promisify__: typeof setImmediatePromise; + } + /** + * Cancels an `Immediate` object created by `setImmediate()`. + * @since v0.9.1 + * @param immediate An `Immediate` object as returned by {@link setImmediate}. + */ + function clearImmediate(immediateId: NodeJS.Immediate | undefined): void; + function queueMicrotask(callback: () => void): void; + } +} +declare module "node:timers" { + export * from "timers"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/timers/promises.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/timers/promises.d.ts new file mode 100644 index 0000000..3530418 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/timers/promises.d.ts @@ -0,0 +1,97 @@ +/** + * The `timers/promises` API provides an alternative set of timer functions + * that return `Promise` objects. The API is accessible via `import timersPromises from 'node:timers/promises'`. + * + * ```js + * import { + * setTimeout, + * setImmediate, + * setInterval, + * } from 'node:timers/promises'; + * ``` + * @since v15.0.0 + */ +declare module "timers/promises" { + import { TimerOptions } from "node:timers"; + /** + * ```js + * import { + * setTimeout, + * } from 'node:timers/promises'; + * + * const res = await setTimeout(100, 'result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + * @param value A value with which the promise is fulfilled. + */ + function setTimeout(delay?: number, value?: T, options?: TimerOptions): Promise; + /** + * ```js + * import { + * setImmediate, + * } from 'node:timers/promises'; + * + * const res = await setImmediate('result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param value A value with which the promise is fulfilled. + */ + function setImmediate(value?: T, options?: TimerOptions): Promise; + /** + * Returns an async iterator that generates values in an interval of `delay` ms. + * If `ref` is `true`, you need to call `next()` of async iterator explicitly + * or implicitly to keep the event loop alive. + * + * ```js + * import { + * setInterval, + * } from 'node:timers/promises'; + * + * const interval = 100; + * for await (const startTime of setInterval(interval, Date.now())) { + * const now = Date.now(); + * console.log(now); + * if ((now - startTime) > 1000) + * break; + * } + * console.log(Date.now()); + * ``` + * @since v15.9.0 + */ + function setInterval(delay?: number, value?: T, options?: TimerOptions): AsyncIterable; + interface Scheduler { + /** + * An experimental API defined by the [Scheduling APIs](https://github.com/WICG/scheduling-apis) draft specification being developed as a standard Web Platform API. + * + * Calling `timersPromises.scheduler.wait(delay, options)` is roughly equivalent to calling `timersPromises.setTimeout(delay, undefined, options)` except that the `ref` + * option is not supported. + * + * ```js + * import { scheduler } from 'node:timers/promises'; + * + * await scheduler.wait(1000); // Wait one second before continuing + * ``` + * @since v16.14.0 + * @experimental + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + */ + wait: (delay?: number, options?: TimerOptions) => Promise; + /** + * An experimental API defined by the [Scheduling APIs](https://nodejs.org/docs/latest-v20.x/api/async_hooks.html#promise-execution-tracking) draft specification + * being developed as a standard Web Platform API. + * Calling `timersPromises.scheduler.yield()` is equivalent to calling `timersPromises.setImmediate()` with no arguments. + * @since v16.14.0 + * @experimental + */ + yield: () => Promise; + } + const scheduler: Scheduler; +} +declare module "node:timers/promises" { + export * from "timers/promises"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/tls.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/tls.d.ts new file mode 100644 index 0000000..ba37f06 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/tls.d.ts @@ -0,0 +1,1226 @@ +/** + * The `node:tls` module provides an implementation of the Transport Layer Security + * (TLS) and Secure Socket Layer (SSL) protocols that is built on top of OpenSSL. + * The module can be accessed using: + * + * ```js + * import tls from 'node:tls'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/tls.js) + */ +declare module "tls" { + import { X509Certificate } from "node:crypto"; + import * as net from "node:net"; + import * as stream from "stream"; + const CLIENT_RENEG_LIMIT: number; + const CLIENT_RENEG_WINDOW: number; + interface Certificate { + /** + * Country code. + */ + C: string; + /** + * Street. + */ + ST: string; + /** + * Locality. + */ + L: string; + /** + * Organization. + */ + O: string; + /** + * Organizational unit. + */ + OU: string; + /** + * Common name. + */ + CN: string; + } + interface PeerCertificate { + /** + * `true` if a Certificate Authority (CA), `false` otherwise. + * @since v18.13.0 + */ + ca: boolean; + /** + * The DER encoded X.509 certificate data. + */ + raw: Buffer; + /** + * The certificate subject. + */ + subject: Certificate; + /** + * The certificate issuer, described in the same terms as the `subject`. + */ + issuer: Certificate; + /** + * The date-time the certificate is valid from. + */ + valid_from: string; + /** + * The date-time the certificate is valid to. + */ + valid_to: string; + /** + * The certificate serial number, as a hex string. + */ + serialNumber: string; + /** + * The SHA-1 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint: string; + /** + * The SHA-256 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint256: string; + /** + * The SHA-512 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint512: string; + /** + * The extended key usage, a set of OIDs. + */ + ext_key_usage?: string[]; + /** + * A string containing concatenated names for the subject, + * an alternative to the `subject` names. + */ + subjectaltname?: string; + /** + * An array describing the AuthorityInfoAccess, used with OCSP. + */ + infoAccess?: NodeJS.Dict; + /** + * For RSA keys: The RSA bit size. + * + * For EC keys: The key size in bits. + */ + bits?: number; + /** + * The RSA exponent, as a string in hexadecimal number notation. + */ + exponent?: string; + /** + * The RSA modulus, as a hexadecimal string. + */ + modulus?: string; + /** + * The public key. + */ + pubkey?: Buffer; + /** + * The ASN.1 name of the OID of the elliptic curve. + * Well-known curves are identified by an OID. + * While it is unusual, it is possible that the curve + * is identified by its mathematical properties, + * in which case it will not have an OID. + */ + asn1Curve?: string; + /** + * The NIST name for the elliptic curve, if it has one + * (not all well-known curves have been assigned names by NIST). + */ + nistCurve?: string; + } + interface DetailedPeerCertificate extends PeerCertificate { + /** + * The issuer certificate object. + * For self-signed certificates, this may be a circular reference. + */ + issuerCertificate: DetailedPeerCertificate; + } + interface CipherNameAndProtocol { + /** + * The cipher name. + */ + name: string; + /** + * SSL/TLS protocol version. + */ + version: string; + /** + * IETF name for the cipher suite. + */ + standardName: string; + } + interface EphemeralKeyInfo { + /** + * The supported types are 'DH' and 'ECDH'. + */ + type: string; + /** + * The name property is available only when type is 'ECDH'. + */ + name?: string | undefined; + /** + * The size of parameter of an ephemeral key exchange. + */ + size: number; + } + interface KeyObject { + /** + * Private keys in PEM format. + */ + pem: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface PxfObject { + /** + * PFX or PKCS12 encoded private key and certificate chain. + */ + buf: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface TLSSocketOptions extends SecureContextOptions, CommonConnectionOptions { + /** + * If true the TLS socket will be instantiated in server-mode. + * Defaults to false. + */ + isServer?: boolean | undefined; + /** + * An optional net.Server instance. + */ + server?: net.Server | undefined; + /** + * An optional Buffer instance containing a TLS session. + */ + session?: Buffer | undefined; + /** + * If true, specifies that the OCSP status request extension will be + * added to the client hello and an 'OCSPResponse' event will be + * emitted on the socket before establishing a secure communication + */ + requestOCSP?: boolean | undefined; + } + /** + * Performs transparent encryption of written data and all required TLS + * negotiation. + * + * Instances of `tls.TLSSocket` implement the duplex `Stream` interface. + * + * Methods that return TLS connection metadata (e.g.{@link TLSSocket.getPeerCertificate}) will only return data while the + * connection is open. + * @since v0.11.4 + */ + class TLSSocket extends net.Socket { + /** + * Construct a new tls.TLSSocket object from an existing TCP socket. + */ + constructor(socket: net.Socket | stream.Duplex, options?: TLSSocketOptions); + /** + * This property is `true` if the peer certificate was signed by one of the CAs + * specified when creating the `tls.TLSSocket` instance, otherwise `false`. + * @since v0.11.4 + */ + authorized: boolean; + /** + * Returns the reason why the peer's certificate was not been verified. This + * property is set only when `tlsSocket.authorized === false`. + * @since v0.11.4 + */ + authorizationError: Error; + /** + * Always returns `true`. This may be used to distinguish TLS sockets from regular`net.Socket` instances. + * @since v0.11.4 + */ + encrypted: true; + /** + * String containing the selected ALPN protocol. + * Before a handshake has completed, this value is always null. + * When a handshake is completed but not ALPN protocol was selected, tlsSocket.alpnProtocol equals false. + */ + alpnProtocol: string | false | null; + /** + * Returns an object representing the local certificate. The returned object has + * some properties corresponding to the fields of the certificate. + * + * See {@link TLSSocket.getPeerCertificate} for an example of the certificate + * structure. + * + * If there is no local certificate, an empty object will be returned. If the + * socket has been destroyed, `null` will be returned. + * @since v11.2.0 + */ + getCertificate(): PeerCertificate | object | null; + /** + * Returns an object containing information on the negotiated cipher suite. + * + * For example, a TLSv1.2 protocol with AES256-SHA cipher: + * + * ```json + * { + * "name": "AES256-SHA", + * "standardName": "TLS_RSA_WITH_AES_256_CBC_SHA", + * "version": "SSLv3" + * } + * ``` + * + * See [SSL\_CIPHER\_get\_name](https://www.openssl.org/docs/man1.1.1/man3/SSL_CIPHER_get_name.html) for more information. + * @since v0.11.4 + */ + getCipher(): CipherNameAndProtocol; + /** + * Returns an object representing the type, name, and size of parameter of + * an ephemeral key exchange in `perfect forward secrecy` on a client + * connection. It returns an empty object when the key exchange is not + * ephemeral. As this is only supported on a client socket; `null` is returned + * if called on a server socket. The supported types are `'DH'` and `'ECDH'`. The `name` property is available only when type is `'ECDH'`. + * + * For example: `{ type: 'ECDH', name: 'prime256v1', size: 256 }`. + * @since v5.0.0 + */ + getEphemeralKeyInfo(): EphemeralKeyInfo | object | null; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that has been sent to the socket as part of a SSL/TLS handshake, or `undefined` if no `Finished` message has been sent yet. + */ + getFinished(): Buffer | undefined; + /** + * Returns an object representing the peer's certificate. If the peer does not + * provide a certificate, an empty object will be returned. If the socket has been + * destroyed, `null` will be returned. + * + * If the full certificate chain was requested, each certificate will include an`issuerCertificate` property containing an object representing its issuer's + * certificate. + * @since v0.11.4 + * @param detailed Include the full certificate chain if `true`, otherwise include just the peer's certificate. + * @return A certificate object. + */ + getPeerCertificate(detailed: true): DetailedPeerCertificate; + getPeerCertificate(detailed?: false): PeerCertificate; + getPeerCertificate(detailed?: boolean): PeerCertificate | DetailedPeerCertificate; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_peer_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that is expected or has actually been received from the socket as part of a SSL/TLS handshake, or `undefined` if there is no `Finished` message so + * far. + */ + getPeerFinished(): Buffer | undefined; + /** + * Returns a string containing the negotiated SSL/TLS protocol version of the + * current connection. The value `'unknown'` will be returned for connected + * sockets that have not completed the handshaking process. The value `null` will + * be returned for server sockets or disconnected client sockets. + * + * Protocol versions are: + * + * * `'SSLv3'` + * * `'TLSv1'` + * * `'TLSv1.1'` + * * `'TLSv1.2'` + * * `'TLSv1.3'` + * + * See the OpenSSL [`SSL_get_version`](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_version.html) documentation for more information. + * @since v5.7.0 + */ + getProtocol(): string | null; + /** + * Returns the TLS session data or `undefined` if no session was + * negotiated. On the client, the data can be provided to the `session` option of {@link connect} to resume the connection. On the server, it may be useful + * for debugging. + * + * See `Session Resumption` for more information. + * + * Note: `getSession()` works only for TLSv1.2 and below. For TLSv1.3, applications + * must use the `'session'` event (it also works for TLSv1.2 and below). + * @since v0.11.4 + */ + getSession(): Buffer | undefined; + /** + * See [SSL\_get\_shared\_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) for more information. + * @since v12.11.0 + * @return List of signature algorithms shared between the server and the client in the order of decreasing preference. + */ + getSharedSigalgs(): string[]; + /** + * For a client, returns the TLS session ticket if one is available, or`undefined`. For a server, always returns `undefined`. + * + * It may be useful for debugging. + * + * See `Session Resumption` for more information. + * @since v0.11.4 + */ + getTLSTicket(): Buffer | undefined; + /** + * See `Session Resumption` for more information. + * @since v0.5.6 + * @return `true` if the session was reused, `false` otherwise. + */ + isSessionReused(): boolean; + /** + * The `tlsSocket.renegotiate()` method initiates a TLS renegotiation process. + * Upon completion, the `callback` function will be passed a single argument + * that is either an `Error` (if the request failed) or `null`. + * + * This method can be used to request a peer's certificate after the secure + * connection has been established. + * + * When running as the server, the socket will be destroyed with an error after `handshakeTimeout` timeout. + * + * For TLSv1.3, renegotiation cannot be initiated, it is not supported by the + * protocol. + * @since v0.11.8 + * @param callback If `renegotiate()` returned `true`, callback is attached once to the `'secure'` event. If `renegotiate()` returned `false`, `callback` will be called in the next tick with + * an error, unless the `tlsSocket` has been destroyed, in which case `callback` will not be called at all. + * @return `true` if renegotiation was initiated, `false` otherwise. + */ + renegotiate( + options: { + rejectUnauthorized?: boolean | undefined; + requestCert?: boolean | undefined; + }, + callback: (err: Error | null) => void, + ): undefined | boolean; + /** + * The `tlsSocket.setMaxSendFragment()` method sets the maximum TLS fragment size. + * Returns `true` if setting the limit succeeded; `false` otherwise. + * + * Smaller fragment sizes decrease the buffering latency on the client: larger + * fragments are buffered by the TLS layer until the entire fragment is received + * and its integrity is verified; large fragments can span multiple roundtrips + * and their processing can be delayed due to packet loss or reordering. However, + * smaller fragments add extra TLS framing bytes and CPU overhead, which may + * decrease overall server throughput. + * @since v0.11.11 + * @param [size=16384] The maximum TLS fragment size. The maximum value is `16384`. + */ + setMaxSendFragment(size: number): boolean; + /** + * Disables TLS renegotiation for this `TLSSocket` instance. Once called, attempts + * to renegotiate will trigger an `'error'` event on the `TLSSocket`. + * @since v8.4.0 + */ + disableRenegotiation(): void; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * + * The format of the output is identical to the output of`openssl s_client -trace` or `openssl s_server -trace`. While it is produced by + * OpenSSL's `SSL_trace()` function, the format is undocumented, can change + * without notice, and should not be relied on. + * @since v12.2.0 + */ + enableTrace(): void; + /** + * Returns the peer certificate as an `X509Certificate` object. + * + * If there is no peer certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getPeerX509Certificate(): X509Certificate | undefined; + /** + * Returns the local certificate as an `X509Certificate` object. + * + * If there is no local certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getX509Certificate(): X509Certificate | undefined; + /** + * Keying material is used for validations to prevent different kind of attacks in + * network protocols, for example in the specifications of IEEE 802.1X. + * + * Example + * + * ```js + * const keyingMaterial = tlsSocket.exportKeyingMaterial( + * 128, + * 'client finished'); + * + * /* + * Example return value of keyingMaterial: + * + * + * ``` + * + * See the OpenSSL [`SSL_export_keying_material`](https://www.openssl.org/docs/man1.1.1/man3/SSL_export_keying_material.html) documentation for more + * information. + * @since v13.10.0, v12.17.0 + * @param length number of bytes to retrieve from keying material + * @param label an application specific label, typically this will be a value from the [IANA Exporter Label + * Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels). + * @param context Optionally provide a context. + * @return requested bytes of the keying material + */ + exportKeyingMaterial(length: number, label: string, context: Buffer): Buffer; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + addListener(event: "secureConnect", listener: () => void): this; + addListener(event: "session", listener: (session: Buffer) => void): this; + addListener(event: "keylog", listener: (line: Buffer) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "OCSPResponse", response: Buffer): boolean; + emit(event: "secureConnect"): boolean; + emit(event: "session", session: Buffer): boolean; + emit(event: "keylog", line: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "OCSPResponse", listener: (response: Buffer) => void): this; + on(event: "secureConnect", listener: () => void): this; + on(event: "session", listener: (session: Buffer) => void): this; + on(event: "keylog", listener: (line: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "OCSPResponse", listener: (response: Buffer) => void): this; + once(event: "secureConnect", listener: () => void): this; + once(event: "session", listener: (session: Buffer) => void): this; + once(event: "keylog", listener: (line: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + prependListener(event: "secureConnect", listener: () => void): this; + prependListener(event: "session", listener: (session: Buffer) => void): this; + prependListener(event: "keylog", listener: (line: Buffer) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + prependOnceListener(event: "secureConnect", listener: () => void): this; + prependOnceListener(event: "session", listener: (session: Buffer) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer) => void): this; + } + interface CommonConnectionOptions { + /** + * An optional TLS context object from tls.createSecureContext() + */ + secureContext?: SecureContext | undefined; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * @default false + */ + enableTrace?: boolean | undefined; + /** + * If true the server will request a certificate from clients that + * connect and attempt to verify that certificate. Defaults to + * false. + */ + requestCert?: boolean | undefined; + /** + * An array of strings or a Buffer naming possible ALPN protocols. + * (Protocols should be ordered by their priority.) + */ + ALPNProtocols?: string[] | Uint8Array[] | Uint8Array | undefined; + /** + * SNICallback(servername, cb) A function that will be + * called if the client supports SNI TLS extension. Two arguments + * will be passed when called: servername and cb. SNICallback should + * invoke cb(null, ctx), where ctx is a SecureContext instance. + * (tls.createSecureContext(...) can be used to get a proper + * SecureContext.) If SNICallback wasn't provided the default callback + * with high-level API will be used (see below). + */ + SNICallback?: ((servername: string, cb: (err: Error | null, ctx?: SecureContext) => void) => void) | undefined; + /** + * If true the server will reject any connection which is not + * authorized with the list of supplied CAs. This option only has an + * effect if requestCert is true. + * @default true + */ + rejectUnauthorized?: boolean | undefined; + } + interface TlsOptions extends SecureContextOptions, CommonConnectionOptions, net.ServerOpts { + /** + * Abort the connection if the SSL/TLS handshake does not finish in the + * specified number of milliseconds. A 'tlsClientError' is emitted on + * the tls.Server object whenever a handshake times out. Default: + * 120000 (120 seconds). + */ + handshakeTimeout?: number | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + */ + ticketKeys?: Buffer | undefined; + /** + * @param socket + * @param identity identity parameter sent from the client. + * @return pre-shared key that must either be + * a buffer or `null` to stop the negotiation process. Returned PSK must be + * compatible with the selected cipher's digest. + * + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with the identity provided by the client. + * If the return value is `null` the negotiation process will stop and an + * "unknown_psk_identity" alert message will be sent to the other party. + * If the server wishes to hide the fact that the PSK identity was not known, + * the callback must provide some random data as `psk` to make the connection + * fail with "decrypt_error" before negotiation is finished. + * PSK ciphers are disabled by default, and using TLS-PSK thus + * requires explicitly specifying a cipher suite with the `ciphers` option. + * More information can be found in the RFC 4279. + */ + pskCallback?(socket: TLSSocket, identity: string): DataView | NodeJS.TypedArray | null; + /** + * hint to send to a client to help + * with selecting the identity during TLS-PSK negotiation. Will be ignored + * in TLS 1.3. Upon failing to set pskIdentityHint `tlsClientError` will be + * emitted with `ERR_TLS_PSK_SET_IDENTIY_HINT_FAILED` code. + */ + pskIdentityHint?: string | undefined; + } + interface PSKCallbackNegotation { + psk: DataView | NodeJS.TypedArray; + identity: string; + } + interface ConnectionOptions extends SecureContextOptions, CommonConnectionOptions { + host?: string | undefined; + port?: number | undefined; + path?: string | undefined; // Creates unix socket connection to path. If this option is specified, `host` and `port` are ignored. + socket?: stream.Duplex | undefined; // Establish secure connection on a given socket rather than creating a new socket + checkServerIdentity?: typeof checkServerIdentity | undefined; + servername?: string | undefined; // SNI TLS Extension + session?: Buffer | undefined; + minDHSize?: number | undefined; + lookup?: net.LookupFunction | undefined; + timeout?: number | undefined; + /** + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with optional identity `hint` provided by the server or `null` + * in case of TLS 1.3 where `hint` was removed. + * It will be necessary to provide a custom `tls.checkServerIdentity()` + * for the connection as the default one will try to check hostname/IP + * of the server against the certificate but that's not applicable for PSK + * because there won't be a certificate present. + * More information can be found in the RFC 4279. + * + * @param hint message sent from the server to help client + * decide which identity to use during negotiation. + * Always `null` if TLS 1.3 is used. + * @returns Return `null` to stop the negotiation process. `psk` must be + * compatible with the selected cipher's digest. + * `identity` must use UTF-8 encoding. + */ + pskCallback?(hint: string | null): PSKCallbackNegotation | null; + } + /** + * Accepts encrypted connections using TLS or SSL. + * @since v0.3.2 + */ + class Server extends net.Server { + constructor(secureConnectionListener?: (socket: TLSSocket) => void); + constructor(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void); + /** + * The `server.addContext()` method adds a secure context that will be used if + * the client request's SNI name matches the supplied `hostname` (or wildcard). + * + * When there are multiple matching contexts, the most recently added one is + * used. + * @since v0.5.3 + * @param hostname A SNI host name or wildcard (e.g. `'*'`) + * @param context An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc), or a TLS context object created + * with {@link createSecureContext} itself. + */ + addContext(hostname: string, context: SecureContextOptions): void; + /** + * Returns the session ticket keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @return A 48-byte buffer containing the session ticket keys. + */ + getTicketKeys(): Buffer; + /** + * The `server.setSecureContext()` method replaces the secure context of an + * existing server. Existing connections to the server are not interrupted. + * @since v11.0.0 + * @param options An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc). + */ + setSecureContext(options: SecureContextOptions): void; + /** + * Sets the session ticket keys. + * + * Changes to the ticket keys are effective only for future server connections. + * Existing or currently pending server connections will use the previous keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @param keys A 48-byte buffer containing the session ticket keys. + */ + setTicketKeys(keys: Buffer): void; + /** + * events.EventEmitter + * 1. tlsClientError + * 2. newSession + * 3. OCSPRequest + * 4. resumeSession + * 5. secureConnection + * 6. keylog + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + addListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + addListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + addListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + addListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "tlsClientError", err: Error, tlsSocket: TLSSocket): boolean; + emit(event: "newSession", sessionId: Buffer, sessionData: Buffer, callback: () => void): boolean; + emit( + event: "OCSPRequest", + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit( + event: "resumeSession", + sessionId: Buffer, + callback: (err: Error | null, sessionData: Buffer | null) => void, + ): boolean; + emit(event: "secureConnection", tlsSocket: TLSSocket): boolean; + emit(event: "keylog", line: Buffer, tlsSocket: TLSSocket): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + on(event: "newSession", listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + on( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + on(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + on(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + once( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + once( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + once(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + once(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + prependListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + prependListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + prependListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependOnceListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + prependOnceListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + prependOnceListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + } + /** + * @deprecated since v0.11.3 Use `tls.TLSSocket` instead. + */ + interface SecurePair { + encrypted: TLSSocket; + cleartext: TLSSocket; + } + type SecureVersion = "TLSv1.3" | "TLSv1.2" | "TLSv1.1" | "TLSv1"; + interface SecureContextOptions { + /** + * If set, this will be called when a client opens a connection using the ALPN extension. + * One argument will be passed to the callback: an object containing `servername` and `protocols` fields, + * respectively containing the server name from the SNI extension (if any) and an array of + * ALPN protocol name strings. The callback must return either one of the strings listed in `protocols`, + * which will be returned to the client as the selected ALPN protocol, or `undefined`, + * to reject the connection with a fatal alert. If a string is returned that does not match one of + * the client's ALPN protocols, an error will be thrown. + * This option cannot be used with the `ALPNProtocols` option, and setting both options will throw an error. + */ + ALPNCallback?: ((arg: { servername: string; protocols: string[] }) => string | undefined) | undefined; + /** + * Treat intermediate (non-self-signed) + * certificates in the trust CA certificate list as trusted. + * @since v22.9.0, v20.18.0 + */ + allowPartialTrustChain?: boolean | undefined; + /** + * Optionally override the trusted CA certificates. Default is to trust + * the well-known CAs curated by Mozilla. Mozilla's CAs are completely + * replaced when CAs are explicitly specified using this option. + */ + ca?: string | Buffer | Array | undefined; + /** + * Cert chains in PEM format. One cert chain should be provided per + * private key. Each cert chain should consist of the PEM formatted + * certificate for a provided private key, followed by the PEM + * formatted intermediate certificates (if any), in order, and not + * including the root CA (the root CA must be pre-known to the peer, + * see ca). When providing multiple cert chains, they do not have to + * be in the same order as their private keys in key. If the + * intermediate certificates are not provided, the peer will not be + * able to validate the certificate, and the handshake will fail. + */ + cert?: string | Buffer | Array | undefined; + /** + * Colon-separated list of supported signature algorithms. The list + * can contain digest algorithms (SHA256, MD5 etc.), public key + * algorithms (RSA-PSS, ECDSA etc.), combination of both (e.g + * 'RSA+SHA384') or TLS v1.3 scheme names (e.g. rsa_pss_pss_sha512). + */ + sigalgs?: string | undefined; + /** + * Cipher suite specification, replacing the default. For more + * information, see modifying the default cipher suite. Permitted + * ciphers can be obtained via tls.getCiphers(). Cipher names must be + * uppercased in order for OpenSSL to accept them. + */ + ciphers?: string | undefined; + /** + * Name of an OpenSSL engine which can provide the client certificate. + * @deprecated + */ + clientCertEngine?: string | undefined; + /** + * PEM formatted CRLs (Certificate Revocation Lists). + */ + crl?: string | Buffer | Array | undefined; + /** + * `'auto'` or custom Diffie-Hellman parameters, required for non-ECDHE perfect forward secrecy. + * If omitted or invalid, the parameters are silently discarded and DHE ciphers will not be available. + * ECDHE-based perfect forward secrecy will still be available. + */ + dhparam?: string | Buffer | undefined; + /** + * A string describing a named curve or a colon separated list of curve + * NIDs or names, for example P-521:P-384:P-256, to use for ECDH key + * agreement. Set to auto to select the curve automatically. Use + * crypto.getCurves() to obtain a list of available curve names. On + * recent releases, openssl ecparam -list_curves will also display the + * name and description of each available elliptic curve. Default: + * tls.DEFAULT_ECDH_CURVE. + */ + ecdhCurve?: string | undefined; + /** + * Attempt to use the server's cipher suite preferences instead of the + * client's. When true, causes SSL_OP_CIPHER_SERVER_PREFERENCE to be + * set in secureOptions + */ + honorCipherOrder?: boolean | undefined; + /** + * Private keys in PEM format. PEM allows the option of private keys + * being encrypted. Encrypted keys will be decrypted with + * options.passphrase. Multiple keys using different algorithms can be + * provided either as an array of unencrypted key strings or buffers, + * or an array of objects in the form {pem: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted keys will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + key?: string | Buffer | Array | undefined; + /** + * Name of an OpenSSL engine to get private key from. Should be used + * together with privateKeyIdentifier. + * @deprecated + */ + privateKeyEngine?: string | undefined; + /** + * Identifier of a private key managed by an OpenSSL engine. Should be + * used together with privateKeyEngine. Should not be set together with + * key, because both options define a private key in different ways. + * @deprecated + */ + privateKeyIdentifier?: string | undefined; + /** + * Optionally set the maximum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. + * **Default:** `'TLSv1.3'`, unless changed using CLI options. Using + * `--tls-max-v1.2` sets the default to `'TLSv1.2'`. Using `--tls-max-v1.3` sets the default to + * `'TLSv1.3'`. If multiple of the options are provided, the highest maximum is used. + */ + maxVersion?: SecureVersion | undefined; + /** + * Optionally set the minimum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. It is not recommended to use + * less than TLSv1.2, but it may be required for interoperability. + * **Default:** `'TLSv1.2'`, unless changed using CLI options. Using + * `--tls-v1.0` sets the default to `'TLSv1'`. Using `--tls-v1.1` sets the default to + * `'TLSv1.1'`. Using `--tls-min-v1.3` sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the lowest minimum is used. + */ + minVersion?: SecureVersion | undefined; + /** + * Shared passphrase used for a single private key and/or a PFX. + */ + passphrase?: string | undefined; + /** + * PFX or PKCS12 encoded private key and certificate chain. pfx is an + * alternative to providing key and cert individually. PFX is usually + * encrypted, if it is, passphrase will be used to decrypt it. Multiple + * PFX can be provided either as an array of unencrypted PFX buffers, + * or an array of objects in the form {buf: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted PFX will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + pfx?: string | Buffer | Array | undefined; + /** + * Optionally affect the OpenSSL protocol behavior, which is not + * usually necessary. This should be used carefully if at all! Value is + * a numeric bitmask of the SSL_OP_* options from OpenSSL Options + */ + secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options + /** + * Legacy mechanism to select the TLS protocol version to use, it does + * not support independent control of the minimum and maximum version, + * and does not support limiting the protocol to TLSv1.3. Use + * minVersion and maxVersion instead. The possible values are listed as + * SSL_METHODS, use the function names as strings. For example, use + * 'TLSv1_1_method' to force TLS version 1.1, or 'TLS_method' to allow + * any TLS protocol version up to TLSv1.3. It is not recommended to use + * TLS versions less than 1.2, but it may be required for + * interoperability. Default: none, see minVersion. + */ + secureProtocol?: string | undefined; + /** + * Opaque identifier used by servers to ensure session state is not + * shared between applications. Unused by clients. + */ + sessionIdContext?: string | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + * See Session Resumption for more information. + */ + ticketKeys?: Buffer | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + } + interface SecureContext { + context: any; + } + /** + * Verifies the certificate `cert` is issued to `hostname`. + * + * Returns [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object, populating it with `reason`, `host`, and `cert` on + * failure. On success, returns [undefined](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Undefined_type). + * + * This function is intended to be used in combination with the`checkServerIdentity` option that can be passed to {@link connect} and as + * such operates on a `certificate object`. For other purposes, consider using `x509.checkHost()` instead. + * + * This function can be overwritten by providing an alternative function as the `options.checkServerIdentity` option that is passed to `tls.connect()`. The + * overwriting function can call `tls.checkServerIdentity()` of course, to augment + * the checks done with additional verification. + * + * This function is only called if the certificate passed all other checks, such as + * being issued by trusted CA (`options.ca`). + * + * Earlier versions of Node.js incorrectly accepted certificates for a given`hostname` if a matching `uniformResourceIdentifier` subject alternative name + * was present (see [CVE-2021-44531](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44531)). Applications that wish to accept`uniformResourceIdentifier` subject alternative names can use + * a custom `options.checkServerIdentity` function that implements the desired behavior. + * @since v0.8.4 + * @param hostname The host name or IP address to verify the certificate against. + * @param cert A `certificate object` representing the peer's certificate. + */ + function checkServerIdentity(hostname: string, cert: PeerCertificate): Error | undefined; + /** + * Creates a new {@link Server}. The `secureConnectionListener`, if provided, is + * automatically set as a listener for the `'secureConnection'` event. + * + * The `ticketKeys` options is automatically shared between `node:cluster` module + * workers. + * + * The following illustrates a simple echo server: + * + * ```js + * import tls from 'node:tls'; + * import fs from 'node:fs'; + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * + * // This is necessary only if using client certificate authentication. + * requestCert: true, + * + * // This is necessary only if the client uses a self-signed certificate. + * ca: [ fs.readFileSync('client-cert.pem') ], + * }; + * + * const server = tls.createServer(options, (socket) => { + * console.log('server connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * socket.write('welcome!\n'); + * socket.setEncoding('utf8'); + * socket.pipe(socket); + * }); + * server.listen(8000, () => { + * console.log('server bound'); + * }); + * ``` + * + * The server can be tested by connecting to it using the example client from {@link connect}. + * @since v0.3.2 + */ + function createServer(secureConnectionListener?: (socket: TLSSocket) => void): Server; + function createServer(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void): Server; + /** + * The `callback` function, if specified, will be added as a listener for the `'secureConnect'` event. + * + * `tls.connect()` returns a {@link TLSSocket} object. + * + * Unlike the `https` API, `tls.connect()` does not enable the + * SNI (Server Name Indication) extension by default, which may cause some + * servers to return an incorrect certificate or reject the connection + * altogether. To enable SNI, set the `servername` option in addition + * to `host`. + * + * The following illustrates a client for the echo server example from {@link createServer}: + * + * ```js + * // Assumes an echo server that is listening on port 8000. + * import tls from 'node:tls'; + * import fs from 'node:fs'; + * + * const options = { + * // Necessary only if the server requires client certificate authentication. + * key: fs.readFileSync('client-key.pem'), + * cert: fs.readFileSync('client-cert.pem'), + * + * // Necessary only if the server uses a self-signed certificate. + * ca: [ fs.readFileSync('server-cert.pem') ], + * + * // Necessary only if the server's cert isn't for "localhost". + * checkServerIdentity: () => { return null; }, + * }; + * + * const socket = tls.connect(8000, options, () => { + * console.log('client connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * process.stdin.pipe(socket); + * process.stdin.resume(); + * }); + * socket.setEncoding('utf8'); + * socket.on('data', (data) => { + * console.log(data); + * }); + * socket.on('end', () => { + * console.log('server ends connection'); + * }); + * ``` + * @since v0.11.3 + */ + function connect(options: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + function connect( + port: number, + host?: string, + options?: ConnectionOptions, + secureConnectListener?: () => void, + ): TLSSocket; + function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + /** + * Creates a new secure pair object with two streams, one of which reads and writes + * the encrypted data and the other of which reads and writes the cleartext data. + * Generally, the encrypted stream is piped to/from an incoming encrypted data + * stream and the cleartext one is used as a replacement for the initial encrypted + * stream. + * + * `tls.createSecurePair()` returns a `tls.SecurePair` object with `cleartext` and `encrypted` stream properties. + * + * Using `cleartext` has the same API as {@link TLSSocket}. + * + * The `tls.createSecurePair()` method is now deprecated in favor of`tls.TLSSocket()`. For example, the code: + * + * ```js + * pair = tls.createSecurePair(// ... ); + * pair.encrypted.pipe(socket); + * socket.pipe(pair.encrypted); + * ``` + * + * can be replaced by: + * + * ```js + * secureSocket = tls.TLSSocket(socket, options); + * ``` + * + * where `secureSocket` has the same API as `pair.cleartext`. + * @since v0.3.2 + * @deprecated Since v0.11.3 - Use {@link TLSSocket} instead. + * @param context A secure context object as returned by `tls.createSecureContext()` + * @param isServer `true` to specify that this TLS connection should be opened as a server. + * @param requestCert `true` to specify whether a server should request a certificate from a connecting client. Only applies when `isServer` is `true`. + * @param rejectUnauthorized If not `false` a server automatically reject clients with invalid certificates. Only applies when `isServer` is `true`. + */ + function createSecurePair( + context?: SecureContext, + isServer?: boolean, + requestCert?: boolean, + rejectUnauthorized?: boolean, + ): SecurePair; + /** + * `{@link createServer}` sets the default value of the `honorCipherOrder` option + * to `true`, other APIs that create secure contexts leave it unset. + * + * `{@link createServer}` uses a 128 bit truncated SHA1 hash value generated + * from `process.argv` as the default value of the `sessionIdContext` option, other + * APIs that create secure contexts have no default value. + * + * The `tls.createSecureContext()` method creates a `SecureContext` object. It is + * usable as an argument to several `tls` APIs, such as `server.addContext()`, + * but has no public methods. The {@link Server} constructor and the {@link createServer} method do not support the `secureContext` option. + * + * A key is _required_ for ciphers that use certificates. Either `key` or `pfx` can be used to provide it. + * + * If the `ca` option is not given, then Node.js will default to using [Mozilla's publicly trusted list of + * CAs](https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt). + * + * Custom DHE parameters are discouraged in favor of the new `dhparam: 'auto' `option. When set to `'auto'`, well-known DHE parameters of sufficient strength + * will be selected automatically. Otherwise, if necessary, `openssl dhparam` can + * be used to create custom parameters. The key length must be greater than or + * equal to 1024 bits or else an error will be thrown. Although 1024 bits is + * permissible, use 2048 bits or larger for stronger security. + * @since v0.11.13 + */ + function createSecureContext(options?: SecureContextOptions): SecureContext; + /** + * Returns an array with the names of the supported TLS ciphers. The names are + * lower-case for historical reasons, but must be uppercased to be used in + * the `ciphers` option of `{@link createSecureContext}`. + * + * Not all supported ciphers are enabled by default. See + * [Modifying the default TLS cipher suite](https://nodejs.org/docs/latest-v22.x/api/tls.html#modifying-the-default-tls-cipher-suite). + * + * Cipher names that start with `'tls_'` are for TLSv1.3, all the others are for + * TLSv1.2 and below. + * + * ```js + * console.log(tls.getCiphers()); // ['aes128-gcm-sha256', 'aes128-sha', ...] + * ``` + * @since v0.10.2 + */ + function getCiphers(): string[]; + /** + * The default curve name to use for ECDH key agreement in a tls server. + * The default value is `'auto'`. See `{@link createSecureContext()}` for further + * information. + * @since v0.11.13 + */ + let DEFAULT_ECDH_CURVE: string; + /** + * The default value of the `maxVersion` option of `{@link createSecureContext()}`. + * It can be assigned any of the supported TLS protocol versions, + * `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. **Default:** `'TLSv1.3'`, unless + * changed using CLI options. Using `--tls-max-v1.2` sets the default to `'TLSv1.2'`. Using + * `--tls-max-v1.3` sets the default to `'TLSv1.3'`. If multiple of the options + * are provided, the highest maximum is used. + * @since v11.4.0 + */ + let DEFAULT_MAX_VERSION: SecureVersion; + /** + * The default value of the `minVersion` option of `{@link createSecureContext()}`. + * It can be assigned any of the supported TLS protocol versions, + * `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. **Default:** `'TLSv1.2'`, unless + * changed using CLI options. Using `--tls-min-v1.0` sets the default to + * `'TLSv1'`. Using `--tls-min-v1.1` sets the default to `'TLSv1.1'`. Using + * `--tls-min-v1.3` sets the default to `'TLSv1.3'`. If multiple of the options + * are provided, the lowest minimum is used. + * @since v11.4.0 + */ + let DEFAULT_MIN_VERSION: SecureVersion; + /** + * The default value of the `ciphers` option of `{@link createSecureContext()}`. + * It can be assigned any of the supported OpenSSL ciphers. + * Defaults to the content of `crypto.constants.defaultCoreCipherList`, unless + * changed using CLI options using `--tls-default-ciphers`. + * @since v19.8.0 + */ + let DEFAULT_CIPHERS: string; + /** + * An immutable array of strings representing the root certificates (in PEM format) + * from the bundled Mozilla CA store as supplied by the current Node.js version. + * + * The bundled CA store, as supplied by Node.js, is a snapshot of Mozilla CA store + * that is fixed at release time. It is identical on all supported platforms. + * @since v12.3.0 + */ + const rootCertificates: readonly string[]; +} +declare module "node:tls" { + export * from "tls"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/trace_events.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/trace_events.d.ts new file mode 100644 index 0000000..f334b0b --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/trace_events.d.ts @@ -0,0 +1,197 @@ +/** + * The `node:trace_events` module provides a mechanism to centralize tracing information + * generated by V8, Node.js core, and userspace code. + * + * Tracing can be enabled with the `--trace-event-categories` command-line flag + * or by using the `trace_events` module. The `--trace-event-categories` flag + * accepts a list of comma-separated category names. + * + * The available categories are: + * + * * `node`: An empty placeholder. + * * `node.async_hooks`: Enables capture of detailed [`async_hooks`](https://nodejs.org/docs/latest-v22.x/api/async_hooks.html) trace data. + * The [`async_hooks`](https://nodejs.org/docs/latest-v22.x/api/async_hooks.html) events have a unique `asyncId` and a special `triggerId` `triggerAsyncId` property. + * * `node.bootstrap`: Enables capture of Node.js bootstrap milestones. + * * `node.console`: Enables capture of `console.time()` and `console.count()` output. + * * `node.threadpoolwork.sync`: Enables capture of trace data for threadpool synchronous operations, such as `blob`, `zlib`, `crypto` and `node_api`. + * * `node.threadpoolwork.async`: Enables capture of trace data for threadpool asynchronous operations, such as `blob`, `zlib`, `crypto` and `node_api`. + * * `node.dns.native`: Enables capture of trace data for DNS queries. + * * `node.net.native`: Enables capture of trace data for network. + * * `node.environment`: Enables capture of Node.js Environment milestones. + * * `node.fs.sync`: Enables capture of trace data for file system sync methods. + * * `node.fs_dir.sync`: Enables capture of trace data for file system sync directory methods. + * * `node.fs.async`: Enables capture of trace data for file system async methods. + * * `node.fs_dir.async`: Enables capture of trace data for file system async directory methods. + * * `node.perf`: Enables capture of [Performance API](https://nodejs.org/docs/latest-v22.x/api/perf_hooks.html) measurements. + * * `node.perf.usertiming`: Enables capture of only Performance API User Timing + * measures and marks. + * * `node.perf.timerify`: Enables capture of only Performance API timerify + * measurements. + * * `node.promises.rejections`: Enables capture of trace data tracking the number + * of unhandled Promise rejections and handled-after-rejections. + * * `node.vm.script`: Enables capture of trace data for the `node:vm` module's `runInNewContext()`, `runInContext()`, and `runInThisContext()` methods. + * * `v8`: The [V8](https://nodejs.org/docs/latest-v22.x/api/v8.html) events are GC, compiling, and execution related. + * * `node.http`: Enables capture of trace data for http request / response. + * + * By default the `node`, `node.async_hooks`, and `v8` categories are enabled. + * + * ```bash + * node --trace-event-categories v8,node,node.async_hooks server.js + * ``` + * + * Prior versions of Node.js required the use of the `--trace-events-enabled` flag to enable trace events. This requirement has been removed. However, the `--trace-events-enabled` flag _may_ still be + * used and will enable the `node`, `node.async_hooks`, and `v8` trace event categories by default. + * + * ```bash + * node --trace-events-enabled + * + * # is equivalent to + * + * node --trace-event-categories v8,node,node.async_hooks + * ``` + * + * Alternatively, trace events may be enabled using the `node:trace_events` module: + * + * ```js + * import trace_events from 'node:trace_events'; + * const tracing = trace_events.createTracing({ categories: ['node.perf'] }); + * tracing.enable(); // Enable trace event capture for the 'node.perf' category + * + * // do work + * + * tracing.disable(); // Disable trace event capture for the 'node.perf' category + * ``` + * + * Running Node.js with tracing enabled will produce log files that can be opened + * in the [`chrome://tracing`](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool) tab of Chrome. + * + * The logging file is by default called `node_trace.${rotation}.log`, where `${rotation}` is an incrementing log-rotation id. The filepath pattern can + * be specified with `--trace-event-file-pattern` that accepts a template + * string that supports `${rotation}` and `${pid}`: + * + * ```bash + * node --trace-event-categories v8 --trace-event-file-pattern '${pid}-${rotation}.log' server.js + * ``` + * + * To guarantee that the log file is properly generated after signal events like `SIGINT`, `SIGTERM`, or `SIGBREAK`, make sure to have the appropriate handlers + * in your code, such as: + * + * ```js + * process.on('SIGINT', function onSigint() { + * console.info('Received SIGINT.'); + * process.exit(130); // Or applicable exit code depending on OS and signal + * }); + * ``` + * + * The tracing system uses the same time source + * as the one used by `process.hrtime()`. + * However the trace-event timestamps are expressed in microseconds, + * unlike `process.hrtime()` which returns nanoseconds. + * + * The features from this module are not available in [`Worker`](https://nodejs.org/docs/latest-v22.x/api/worker_threads.html#class-worker) threads. + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/trace_events.js) + */ +declare module "trace_events" { + /** + * The `Tracing` object is used to enable or disable tracing for sets of + * categories. Instances are created using the + * `trace_events.createTracing()` method. + * + * When created, the `Tracing` object is disabled. Calling the + * `tracing.enable()` method adds the categories to the set of enabled trace + * event categories. Calling `tracing.disable()` will remove the categories + * from the set of enabled trace event categories. + */ + interface Tracing { + /** + * A comma-separated list of the trace event categories covered by this + * `Tracing` object. + * @since v10.0.0 + */ + readonly categories: string; + /** + * Disables this `Tracing` object. + * + * Only trace event categories _not_ covered by other enabled `Tracing` + * objects and _not_ specified by the `--trace-event-categories` flag + * will be disabled. + * + * ```js + * import trace_events from 'node:trace_events'; + * const t1 = trace_events.createTracing({ categories: ['node', 'v8'] }); + * const t2 = trace_events.createTracing({ categories: ['node.perf', 'node'] }); + * t1.enable(); + * t2.enable(); + * + * // Prints 'node,node.perf,v8' + * console.log(trace_events.getEnabledCategories()); + * + * t2.disable(); // Will only disable emission of the 'node.perf' category + * + * // Prints 'node,v8' + * console.log(trace_events.getEnabledCategories()); + * ``` + * @since v10.0.0 + */ + disable(): void; + /** + * Enables this `Tracing` object for the set of categories covered by + * the `Tracing` object. + * @since v10.0.0 + */ + enable(): void; + /** + * `true` only if the `Tracing` object has been enabled. + * @since v10.0.0 + */ + readonly enabled: boolean; + } + interface CreateTracingOptions { + /** + * An array of trace category names. Values included in the array are + * coerced to a string when possible. An error will be thrown if the + * value cannot be coerced. + */ + categories: string[]; + } + /** + * Creates and returns a `Tracing` object for the given set of `categories`. + * + * ```js + * import trace_events from 'node:trace_events'; + * const categories = ['node.perf', 'node.async_hooks']; + * const tracing = trace_events.createTracing({ categories }); + * tracing.enable(); + * // do stuff + * tracing.disable(); + * ``` + * @since v10.0.0 + */ + function createTracing(options: CreateTracingOptions): Tracing; + /** + * Returns a comma-separated list of all currently-enabled trace event + * categories. The current set of enabled trace event categories is determined + * by the _union_ of all currently-enabled `Tracing` objects and any categories + * enabled using the `--trace-event-categories` flag. + * + * Given the file `test.js` below, the command `node --trace-event-categories node.perf test.js` will print `'node.async_hooks,node.perf'` to the console. + * + * ```js + * import trace_events from 'node:trace_events'; + * const t1 = trace_events.createTracing({ categories: ['node.async_hooks'] }); + * const t2 = trace_events.createTracing({ categories: ['node.perf'] }); + * const t3 = trace_events.createTracing({ categories: ['v8'] }); + * + * t1.enable(); + * t2.enable(); + * + * console.log(trace_events.getEnabledCategories()); + * ``` + * @since v10.0.0 + */ + function getEnabledCategories(): string | undefined; +} +declare module "node:trace_events" { + export * from "trace_events"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/buffer.buffer.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/buffer.buffer.d.ts new file mode 100644 index 0000000..21f7211 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/buffer.buffer.d.ts @@ -0,0 +1,385 @@ +declare module "buffer" { + global { + interface BufferConstructor { + // see ../buffer.d.ts for implementation shared with all TypeScript versions + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead. + */ + new(str: string, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`). + */ + new(size: number): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: Uint8Array): Buffer; + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}/{SharedArrayBuffer}. + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead. + */ + new(arrayBuffer: ArrayBuffer | SharedArrayBuffer): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: readonly any[]): Buffer; + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead. + */ + new(buffer: Buffer): Buffer; + /** + * Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`. + * Array entries outside that range will be truncated to fit into it. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'. + * const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); + * ``` + * + * If `array` is an `Array`\-like object (that is, one with a `length` property of + * type `number`), it is treated as if it is an array, unless it is a `Buffer` or + * a `Uint8Array`. This means all other `TypedArray` variants get treated as an `Array`. To create a `Buffer` from the bytes backing a `TypedArray`, use `Buffer.copyBytesFrom()`. + * + * A `TypeError` will be thrown if `array` is not an `Array` or another type + * appropriate for `Buffer.from()` variants. + * + * `Buffer.from(array)` and `Buffer.from(string)` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v5.10.0 + */ + from( + arrayBuffer: WithImplicitCoercion, + byteOffset?: number, + length?: number, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param data data to create a new Buffer + */ + from(data: Uint8Array | readonly number[]): Buffer; + from(data: WithImplicitCoercion): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + */ + from( + str: + | WithImplicitCoercion + | { + [Symbol.toPrimitive](hint: "string"): string; + }, + encoding?: BufferEncoding, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param values to create a new Buffer + */ + of(...items: number[]): Buffer; + /** + * Returns a new `Buffer` which is the result of concatenating all the `Buffer` instances in the `list` together. + * + * If the list has no items, or if the `totalLength` is 0, then a new zero-length `Buffer` is returned. + * + * If `totalLength` is not provided, it is calculated from the `Buffer` instances + * in `list` by adding their lengths. + * + * If `totalLength` is provided, it is coerced to an unsigned integer. If the + * combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is + * truncated to `totalLength`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a single `Buffer` from a list of three `Buffer` instances. + * + * const buf1 = Buffer.alloc(10); + * const buf2 = Buffer.alloc(14); + * const buf3 = Buffer.alloc(18); + * const totalLength = buf1.length + buf2.length + buf3.length; + * + * console.log(totalLength); + * // Prints: 42 + * + * const bufA = Buffer.concat([buf1, buf2, buf3], totalLength); + * + * console.log(bufA); + * // Prints: + * console.log(bufA.length); + * // Prints: 42 + * ``` + * + * `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v0.7.11 + * @param list List of `Buffer` or {@link Uint8Array} instances to concatenate. + * @param totalLength Total length of the `Buffer` instances in `list` when concatenated. + */ + concat(list: readonly Uint8Array[], totalLength?: number): Buffer; + /** + * Copies the underlying memory of `view` into a new `Buffer`. + * + * ```js + * const u16 = new Uint16Array([0, 0xffff]); + * const buf = Buffer.copyBytesFrom(u16, 1, 1); + * u16[1] = 0; + * console.log(buf.length); // 2 + * console.log(buf[0]); // 255 + * console.log(buf[1]); // 255 + * ``` + * @since v19.8.0 + * @param view The {TypedArray} to copy. + * @param [offset=0] The starting offset within `view`. + * @param [length=view.length - offset] The number of elements from `view` to copy. + */ + copyBytesFrom(view: NodeJS.TypedArray, offset?: number, length?: number): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5); + * + * console.log(buf); + * // Prints: + * ``` + * + * If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5, 'a'); + * + * console.log(buf); + * // Prints: + * ``` + * + * If both `fill` and `encoding` are specified, the allocated `Buffer` will be + * initialized by calling `buf.fill(fill, encoding)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); + * + * console.log(buf); + * // Prints: + * ``` + * + * Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance + * contents will never contain sensitive data from previous allocations, including + * data that might not have been allocated for `Buffer`s. + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + * @param [fill=0] A value to pre-fill the new `Buffer` with. + * @param [encoding='utf8'] If `fill` is a string, this is its encoding. + */ + alloc(size: number, fill?: string | Uint8Array | number, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(10); + * + * console.log(buf); + * // Prints (contents may vary): + * + * buf.fill(0); + * + * console.log(buf); + * // Prints: + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * + * The `Buffer` module pre-allocates an internal `Buffer` instance of + * size `Buffer.poolSize` that is used as a pool for the fast allocation of new `Buffer` instances created using `Buffer.allocUnsafe()`, `Buffer.from(array)`, + * and `Buffer.concat()` only when `size` is less than `Buffer.poolSize >>> 1` (floor of `Buffer.poolSize` divided by two). + * + * Use of this pre-allocated internal memory pool is a key difference between + * calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. + * Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less + * than or equal to half `Buffer.poolSize`. The + * difference is subtle but can be important when an application requires the + * additional performance that `Buffer.allocUnsafe()` provides. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafe(size: number): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. A zero-length `Buffer` is created if + * `size` is 0. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize + * such `Buffer` instances with zeroes. + * + * When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, + * allocations under 4 KiB are sliced from a single pre-allocated `Buffer`. This + * allows applications to avoid the garbage collection overhead of creating many + * individually allocated `Buffer` instances. This approach improves both + * performance and memory usage by eliminating the need to track and clean up as + * many individual `ArrayBuffer` objects. + * + * However, in the case where a developer may need to retain a small chunk of + * memory from a pool for an indeterminate amount of time, it may be appropriate + * to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and + * then copying out the relevant bits. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Need to keep around a few small chunks of memory. + * const store = []; + * + * socket.on('readable', () => { + * let data; + * while (null !== (data = readable.read())) { + * // Allocate for retained data. + * const sb = Buffer.allocUnsafeSlow(10); + * + * // Copy the data into the new allocation. + * data.copy(sb, 0, 0, 10); + * + * store.push(sb); + * } + * }); + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.12.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafeSlow(size: number): Buffer; + } + interface Buffer extends Uint8Array { + // see ../buffer.d.ts for implementation shared with all TypeScript versions + + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * This method is not compatible with the `Uint8Array.prototype.slice()`, + * which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * const copiedBuf = Uint8Array.prototype.slice.call(buf); + * copiedBuf[0]++; + * console.log(copiedBuf.toString()); + * // Prints: cuffer + * + * console.log(buf.toString()); + * // Prints: buffer + * + * // With buf.slice(), the original buffer is modified. + * const notReallyCopiedBuf = buf.slice(); + * notReallyCopiedBuf[0]++; + * console.log(notReallyCopiedBuf.toString()); + * // Prints: cuffer + * console.log(buf.toString()); + * // Also prints: cuffer (!) + * ``` + * @since v0.3.0 + * @deprecated Use `subarray` instead. + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + slice(start?: number, end?: number): Buffer; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * Specifying `end` greater than `buf.length` will return the same result as + * that of `end` equal to `buf.length`. + * + * This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). + * + * Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte + * // from the original `Buffer`. + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * const buf2 = buf1.subarray(0, 3); + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: abc + * + * buf1[0] = 33; + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: !bc + * ``` + * + * Specifying negative indexes causes the slice to be generated relative to the + * end of `buf` rather than the beginning. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * console.log(buf.subarray(-6, -1).toString()); + * // Prints: buffe + * // (Equivalent to buf.subarray(0, 5).) + * + * console.log(buf.subarray(-6, -2).toString()); + * // Prints: buff + * // (Equivalent to buf.subarray(0, 4).) + * + * console.log(buf.subarray(-5, -2).toString()); + * // Prints: uff + * // (Equivalent to buf.subarray(1, 4).) + * ``` + * @since v3.0.0 + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + subarray(start?: number, end?: number): Buffer; + } + } +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/globals.typedarray.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/globals.typedarray.d.ts new file mode 100644 index 0000000..0e4633b --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/globals.typedarray.d.ts @@ -0,0 +1,19 @@ +export {}; // Make this a module + +declare global { + namespace NodeJS { + type TypedArray = + | Uint8Array + | Uint8ClampedArray + | Uint16Array + | Uint32Array + | Int8Array + | Int16Array + | Int32Array + | BigUint64Array + | BigInt64Array + | Float32Array + | Float64Array; + type ArrayBufferView = TypedArray | DataView; + } +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/index.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/index.d.ts new file mode 100644 index 0000000..96c9532 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/ts5.6/index.d.ts @@ -0,0 +1,92 @@ +/** + * License for programmatically and manually incorporated + * documentation aka. `JSDoc` from https://github.com/nodejs/node/tree/master/doc + * + * Copyright Node.js contributors. All rights reserved. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ + +// NOTE: These definitions support Node.js and TypeScript 4.9 through 5.6. + +// Reference required TypeScript libs: +/// + +// TypeScript backwards-compatibility definitions: +/// + +// Definitions specific to TypeScript 4.9 through 5.6: +/// +/// + +// Definitions for Node.js modules that are not specific to any version of TypeScript: +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/tty.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/tty.d.ts new file mode 100644 index 0000000..f567946 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/tty.d.ts @@ -0,0 +1,208 @@ +/** + * The `node:tty` module provides the `tty.ReadStream` and `tty.WriteStream` classes. In most cases, it will not be necessary or possible to use this module + * directly. However, it can be accessed using: + * + * ```js + * import tty from 'node:tty'; + * ``` + * + * When Node.js detects that it is being run with a text terminal ("TTY") + * attached, `process.stdin` will, by default, be initialized as an instance of `tty.ReadStream` and both `process.stdout` and `process.stderr` will, by + * default, be instances of `tty.WriteStream`. The preferred method of determining + * whether Node.js is being run within a TTY context is to check that the value of + * the `process.stdout.isTTY` property is `true`: + * + * ```console + * $ node -p -e "Boolean(process.stdout.isTTY)" + * true + * $ node -p -e "Boolean(process.stdout.isTTY)" | cat + * false + * ``` + * + * In most cases, there should be little to no reason for an application to + * manually create instances of the `tty.ReadStream` and `tty.WriteStream` classes. + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/tty.js) + */ +declare module "tty" { + import * as net from "node:net"; + /** + * The `tty.isatty()` method returns `true` if the given `fd` is associated with + * a TTY and `false` if it is not, including whenever `fd` is not a non-negative + * integer. + * @since v0.5.8 + * @param fd A numeric file descriptor + */ + function isatty(fd: number): boolean; + /** + * Represents the readable side of a TTY. In normal circumstances `process.stdin` will be the only `tty.ReadStream` instance in a Node.js + * process and there should be no reason to create additional instances. + * @since v0.5.8 + */ + class ReadStream extends net.Socket { + constructor(fd: number, options?: net.SocketConstructorOpts); + /** + * A `boolean` that is `true` if the TTY is currently configured to operate as a + * raw device. + * + * This flag is always `false` when a process starts, even if the terminal is + * operating in raw mode. Its value will change with subsequent calls to `setRawMode`. + * @since v0.7.7 + */ + isRaw: boolean; + /** + * Allows configuration of `tty.ReadStream` so that it operates as a raw device. + * + * When in raw mode, input is always available character-by-character, not + * including modifiers. Additionally, all special processing of characters by the + * terminal is disabled, including echoing input + * characters. Ctrl+C will no longer cause a `SIGINT` when + * in this mode. + * @since v0.7.7 + * @param mode If `true`, configures the `tty.ReadStream` to operate as a raw device. If `false`, configures the `tty.ReadStream` to operate in its default mode. The `readStream.isRaw` + * property will be set to the resulting mode. + * @return The read stream instance. + */ + setRawMode(mode: boolean): this; + /** + * A `boolean` that is always `true` for `tty.ReadStream` instances. + * @since v0.5.8 + */ + isTTY: boolean; + } + /** + * -1 - to the left from cursor + * 0 - the entire line + * 1 - to the right from cursor + */ + type Direction = -1 | 0 | 1; + /** + * Represents the writable side of a TTY. In normal circumstances, `process.stdout` and `process.stderr` will be the only`tty.WriteStream` instances created for a Node.js process and there + * should be no reason to create additional instances. + * @since v0.5.8 + */ + class WriteStream extends net.Socket { + constructor(fd: number); + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "resize", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "resize"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "resize", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "resize", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "resize", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "resize", listener: () => void): this; + /** + * `writeStream.clearLine()` clears the current line of this `WriteStream` in a + * direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + clearLine(dir: Direction, callback?: () => void): boolean; + /** + * `writeStream.clearScreenDown()` clears this `WriteStream` from the current + * cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + clearScreenDown(callback?: () => void): boolean; + /** + * `writeStream.cursorTo()` moves this `WriteStream`'s cursor to the specified + * position. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + cursorTo(x: number, y?: number, callback?: () => void): boolean; + cursorTo(x: number, callback: () => void): boolean; + /** + * `writeStream.moveCursor()` moves this `WriteStream`'s cursor _relative_ to its + * current position. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + moveCursor(dx: number, dy: number, callback?: () => void): boolean; + /** + * Returns: + * + * * `1` for 2, + * * `4` for 16, + * * `8` for 256, + * * `24` for 16,777,216 colors supported. + * + * Use this to determine what colors the terminal supports. Due to the nature of + * colors in terminals it is possible to either have false positives or false + * negatives. It depends on process information and the environment variables that + * may lie about what terminal is used. + * It is possible to pass in an `env` object to simulate the usage of a specific + * terminal. This can be useful to check how specific environment settings behave. + * + * To enforce a specific color support, use one of the below environment settings. + * + * * 2 colors: `FORCE_COLOR = 0` (Disables colors) + * * 16 colors: `FORCE_COLOR = 1` + * * 256 colors: `FORCE_COLOR = 2` + * * 16,777,216 colors: `FORCE_COLOR = 3` + * + * Disabling color support is also possible by using the `NO_COLOR` and `NODE_DISABLE_COLORS` environment variables. + * @since v9.9.0 + * @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal. + */ + getColorDepth(env?: object): number; + /** + * Returns `true` if the `writeStream` supports at least as many colors as provided + * in `count`. Minimum support is 2 (black and white). + * + * This has the same false positives and negatives as described in `writeStream.getColorDepth()`. + * + * ```js + * process.stdout.hasColors(); + * // Returns true or false depending on if `stdout` supports at least 16 colors. + * process.stdout.hasColors(256); + * // Returns true or false depending on if `stdout` supports at least 256 colors. + * process.stdout.hasColors({ TMUX: '1' }); + * // Returns true. + * process.stdout.hasColors(2 ** 24, { TMUX: '1' }); + * // Returns false (the environment setting pretends to support 2 ** 8 colors). + * ``` + * @since v11.13.0, v10.16.0 + * @param [count=16] The number of colors that are requested (minimum 2). + * @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal. + */ + hasColors(count?: number): boolean; + hasColors(env?: object): boolean; + hasColors(count: number, env?: object): boolean; + /** + * `writeStream.getWindowSize()` returns the size of the TTY + * corresponding to this `WriteStream`. The array is of the type `[numColumns, numRows]` where `numColumns` and `numRows` represent the number + * of columns and rows in the corresponding TTY. + * @since v0.7.7 + */ + getWindowSize(): [number, number]; + /** + * A `number` specifying the number of columns the TTY currently has. This property + * is updated whenever the `'resize'` event is emitted. + * @since v0.7.7 + */ + columns: number; + /** + * A `number` specifying the number of rows the TTY currently has. This property + * is updated whenever the `'resize'` event is emitted. + * @since v0.7.7 + */ + rows: number; + /** + * A `boolean` that is always `true`. + * @since v0.5.8 + */ + isTTY: boolean; + } +} +declare module "node:tty" { + export * from "tty"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/url.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/url.d.ts new file mode 100644 index 0000000..72232c7 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/url.d.ts @@ -0,0 +1,972 @@ +/** + * The `node:url` module provides utilities for URL resolution and parsing. It can + * be accessed using: + * + * ```js + * import url from 'node:url'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/url.js) + */ +declare module "url" { + import { Blob as NodeBlob } from "node:buffer"; + import { ClientRequestArgs } from "node:http"; + import { ParsedUrlQuery, ParsedUrlQueryInput } from "node:querystring"; + // Input to `url.format` + interface UrlObject { + auth?: string | null | undefined; + hash?: string | null | undefined; + host?: string | null | undefined; + hostname?: string | null | undefined; + href?: string | null | undefined; + pathname?: string | null | undefined; + protocol?: string | null | undefined; + search?: string | null | undefined; + slashes?: boolean | null | undefined; + port?: string | number | null | undefined; + query?: string | null | ParsedUrlQueryInput | undefined; + } + // Output of `url.parse` + interface Url { + auth: string | null; + hash: string | null; + host: string | null; + hostname: string | null; + href: string; + path: string | null; + pathname: string | null; + protocol: string | null; + search: string | null; + slashes: boolean | null; + port: string | null; + query: string | null | ParsedUrlQuery; + } + interface UrlWithParsedQuery extends Url { + query: ParsedUrlQuery; + } + interface UrlWithStringQuery extends Url { + query: string | null; + } + interface FileUrlToPathOptions { + /** + * `true` if the `path` should be return as a windows filepath, `false` for posix, and `undefined` for the system default. + * @default undefined + * @since v22.1.0 + */ + windows?: boolean | undefined; + } + interface PathToFileUrlOptions { + /** + * `true` if the `path` should be return as a windows filepath, `false` for posix, and `undefined` for the system default. + * @default undefined + * @since v22.1.0 + */ + windows?: boolean | undefined; + } + /** + * The `url.parse()` method takes a URL string, parses it, and returns a URL + * object. + * + * A `TypeError` is thrown if `urlString` is not a string. + * + * A `URIError` is thrown if the `auth` property is present but cannot be decoded. + * + * `url.parse()` uses a lenient, non-standard algorithm for parsing URL + * strings. It is prone to security issues such as [host name spoofing](https://hackerone.com/reports/678487) and incorrect handling of usernames and passwords. Do not use with untrusted + * input. CVEs are not issued for `url.parse()` vulnerabilities. Use the `WHATWG URL` API instead. + * @since v0.1.25 + * @deprecated Use the WHATWG URL API instead. + * @param urlString The URL string to parse. + * @param [parseQueryString=false] If `true`, the `query` property will always be set to an object returned by the {@link querystring} module's `parse()` method. If `false`, the `query` property + * on the returned URL object will be an unparsed, undecoded string. + * @param [slashesDenoteHost=false] If `true`, the first token after the literal string `//` and preceding the next `/` will be interpreted as the `host`. For instance, given `//foo/bar`, the + * result would be `{host: 'foo', pathname: '/bar'}` rather than `{pathname: '//foo/bar'}`. + */ + function parse(urlString: string): UrlWithStringQuery; + function parse( + urlString: string, + parseQueryString: false | undefined, + slashesDenoteHost?: boolean, + ): UrlWithStringQuery; + function parse(urlString: string, parseQueryString: true, slashesDenoteHost?: boolean): UrlWithParsedQuery; + function parse(urlString: string, parseQueryString: boolean, slashesDenoteHost?: boolean): Url; + /** + * The `url.format()` method returns a formatted URL string derived from `urlObject`. + * + * ```js + * import url from 'node:url'; + * url.format({ + * protocol: 'https', + * hostname: 'example.com', + * pathname: '/some/path', + * query: { + * page: 1, + * format: 'json', + * }, + * }); + * + * // => 'https://example.com/some/path?page=1&format=json' + * ``` + * + * If `urlObject` is not an object or a string, `url.format()` will throw a `TypeError`. + * + * The formatting process operates as follows: + * + * * A new empty string `result` is created. + * * If `urlObject.protocol` is a string, it is appended as-is to `result`. + * * Otherwise, if `urlObject.protocol` is not `undefined` and is not a string, an `Error` is thrown. + * * For all string values of `urlObject.protocol` that _do not end_ with an ASCII + * colon (`:`) character, the literal string `:` will be appended to `result`. + * * If either of the following conditions is true, then the literal string `//` will be appended to `result`: + * * `urlObject.slashes` property is true; + * * `urlObject.protocol` begins with `http`, `https`, `ftp`, `gopher`, or `file`; + * * If the value of the `urlObject.auth` property is truthy, and either `urlObject.host` or `urlObject.hostname` are not `undefined`, the value of `urlObject.auth` will be coerced into a string + * and appended to `result` followed by the literal string `@`. + * * If the `urlObject.host` property is `undefined` then: + * * If the `urlObject.hostname` is a string, it is appended to `result`. + * * Otherwise, if `urlObject.hostname` is not `undefined` and is not a string, + * an `Error` is thrown. + * * If the `urlObject.port` property value is truthy, and `urlObject.hostname` is not `undefined`: + * * The literal string `:` is appended to `result`, and + * * The value of `urlObject.port` is coerced to a string and appended to `result`. + * * Otherwise, if the `urlObject.host` property value is truthy, the value of `urlObject.host` is coerced to a string and appended to `result`. + * * If the `urlObject.pathname` property is a string that is not an empty string: + * * If the `urlObject.pathname` _does not start_ with an ASCII forward slash + * (`/`), then the literal string `'/'` is appended to `result`. + * * The value of `urlObject.pathname` is appended to `result`. + * * Otherwise, if `urlObject.pathname` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.search` property is `undefined` and if the `urlObject.query`property is an `Object`, the literal string `?` is appended to `result` followed by the output of calling the + * `querystring` module's `stringify()` method passing the value of `urlObject.query`. + * * Otherwise, if `urlObject.search` is a string: + * * If the value of `urlObject.search` _does not start_ with the ASCII question + * mark (`?`) character, the literal string `?` is appended to `result`. + * * The value of `urlObject.search` is appended to `result`. + * * Otherwise, if `urlObject.search` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.hash` property is a string: + * * If the value of `urlObject.hash` _does not start_ with the ASCII hash (`#`) + * character, the literal string `#` is appended to `result`. + * * The value of `urlObject.hash` is appended to `result`. + * * Otherwise, if the `urlObject.hash` property is not `undefined` and is not a + * string, an `Error` is thrown. + * * `result` is returned. + * @since v0.1.25 + * @legacy Use the WHATWG URL API instead. + * @param urlObject A URL object (as returned by `url.parse()` or constructed otherwise). If a string, it is converted to an object by passing it to `url.parse()`. + */ + function format(urlObject: URL, options?: URLFormatOptions): string; + /** + * The `url.format()` method returns a formatted URL string derived from `urlObject`. + * + * ```js + * import url from 'node:url'; + * url.format({ + * protocol: 'https', + * hostname: 'example.com', + * pathname: '/some/path', + * query: { + * page: 1, + * format: 'json', + * }, + * }); + * + * // => 'https://example.com/some/path?page=1&format=json' + * ``` + * + * If `urlObject` is not an object or a string, `url.format()` will throw a `TypeError`. + * + * The formatting process operates as follows: + * + * * A new empty string `result` is created. + * * If `urlObject.protocol` is a string, it is appended as-is to `result`. + * * Otherwise, if `urlObject.protocol` is not `undefined` and is not a string, an `Error` is thrown. + * * For all string values of `urlObject.protocol` that _do not end_ with an ASCII + * colon (`:`) character, the literal string `:` will be appended to `result`. + * * If either of the following conditions is true, then the literal string `//` will be appended to `result`: + * * `urlObject.slashes` property is true; + * * `urlObject.protocol` begins with `http`, `https`, `ftp`, `gopher`, or `file`; + * * If the value of the `urlObject.auth` property is truthy, and either `urlObject.host` or `urlObject.hostname` are not `undefined`, the value of `urlObject.auth` will be coerced into a string + * and appended to `result` followed by the literal string `@`. + * * If the `urlObject.host` property is `undefined` then: + * * If the `urlObject.hostname` is a string, it is appended to `result`. + * * Otherwise, if `urlObject.hostname` is not `undefined` and is not a string, + * an `Error` is thrown. + * * If the `urlObject.port` property value is truthy, and `urlObject.hostname` is not `undefined`: + * * The literal string `:` is appended to `result`, and + * * The value of `urlObject.port` is coerced to a string and appended to `result`. + * * Otherwise, if the `urlObject.host` property value is truthy, the value of `urlObject.host` is coerced to a string and appended to `result`. + * * If the `urlObject.pathname` property is a string that is not an empty string: + * * If the `urlObject.pathname` _does not start_ with an ASCII forward slash + * (`/`), then the literal string `'/'` is appended to `result`. + * * The value of `urlObject.pathname` is appended to `result`. + * * Otherwise, if `urlObject.pathname` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.search` property is `undefined` and if the `urlObject.query`property is an `Object`, the literal string `?` is appended to `result` followed by the output of calling the + * `querystring` module's `stringify()` method passing the value of `urlObject.query`. + * * Otherwise, if `urlObject.search` is a string: + * * If the value of `urlObject.search` _does not start_ with the ASCII question + * mark (`?`) character, the literal string `?` is appended to `result`. + * * The value of `urlObject.search` is appended to `result`. + * * Otherwise, if `urlObject.search` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.hash` property is a string: + * * If the value of `urlObject.hash` _does not start_ with the ASCII hash (`#`) + * character, the literal string `#` is appended to `result`. + * * The value of `urlObject.hash` is appended to `result`. + * * Otherwise, if the `urlObject.hash` property is not `undefined` and is not a + * string, an `Error` is thrown. + * * `result` is returned. + * @since v0.1.25 + * @legacy Use the WHATWG URL API instead. + * @param urlObject A URL object (as returned by `url.parse()` or constructed otherwise). If a string, it is converted to an object by passing it to `url.parse()`. + */ + function format(urlObject: UrlObject | string): string; + /** + * The `url.resolve()` method resolves a target URL relative to a base URL in a + * manner similar to that of a web browser resolving an anchor tag. + * + * ```js + * import url from 'node:url'; + * url.resolve('/one/two/three', 'four'); // '/one/two/four' + * url.resolve('http://example.com/', '/one'); // 'http://example.com/one' + * url.resolve('http://example.com/one', '/two'); // 'http://example.com/two' + * ``` + * + * To achieve the same result using the WHATWG URL API: + * + * ```js + * function resolve(from, to) { + * const resolvedUrl = new URL(to, new URL(from, 'resolve://')); + * if (resolvedUrl.protocol === 'resolve:') { + * // `from` is a relative URL. + * const { pathname, search, hash } = resolvedUrl; + * return pathname + search + hash; + * } + * return resolvedUrl.toString(); + * } + * + * resolve('/one/two/three', 'four'); // '/one/two/four' + * resolve('http://example.com/', '/one'); // 'http://example.com/one' + * resolve('http://example.com/one', '/two'); // 'http://example.com/two' + * ``` + * @since v0.1.25 + * @legacy Use the WHATWG URL API instead. + * @param from The base URL to use if `to` is a relative URL. + * @param to The target URL to resolve. + */ + function resolve(from: string, to: string): string; + /** + * Returns the [Punycode](https://tools.ietf.org/html/rfc5891#section-4.4) ASCII serialization of the `domain`. If `domain` is an + * invalid domain, the empty string is returned. + * + * It performs the inverse operation to {@link domainToUnicode}. + * + * ```js + * import url from 'node:url'; + * + * console.log(url.domainToASCII('español.com')); + * // Prints xn--espaol-zwa.com + * console.log(url.domainToASCII('中文.com')); + * // Prints xn--fiq228c.com + * console.log(url.domainToASCII('xn--iñvalid.com')); + * // Prints an empty string + * ``` + * @since v7.4.0, v6.13.0 + */ + function domainToASCII(domain: string): string; + /** + * Returns the Unicode serialization of the `domain`. If `domain` is an invalid + * domain, the empty string is returned. + * + * It performs the inverse operation to {@link domainToASCII}. + * + * ```js + * import url from 'node:url'; + * + * console.log(url.domainToUnicode('xn--espaol-zwa.com')); + * // Prints español.com + * console.log(url.domainToUnicode('xn--fiq228c.com')); + * // Prints 中文.com + * console.log(url.domainToUnicode('xn--iñvalid.com')); + * // Prints an empty string + * ``` + * @since v7.4.0, v6.13.0 + */ + function domainToUnicode(domain: string): string; + /** + * This function ensures the correct decodings of percent-encoded characters as + * well as ensuring a cross-platform valid absolute path string. + * + * ```js + * import { fileURLToPath } from 'node:url'; + * + * const __filename = fileURLToPath(import.meta.url); + * + * new URL('file:///C:/path/').pathname; // Incorrect: /C:/path/ + * fileURLToPath('file:///C:/path/'); // Correct: C:\path\ (Windows) + * + * new URL('file://nas/foo.txt').pathname; // Incorrect: /foo.txt + * fileURLToPath('file://nas/foo.txt'); // Correct: \\nas\foo.txt (Windows) + * + * new URL('file:///你好.txt').pathname; // Incorrect: /%E4%BD%A0%E5%A5%BD.txt + * fileURLToPath('file:///你好.txt'); // Correct: /你好.txt (POSIX) + * + * new URL('file:///hello world').pathname; // Incorrect: /hello%20world + * fileURLToPath('file:///hello world'); // Correct: /hello world (POSIX) + * ``` + * @since v10.12.0 + * @param url The file URL string or URL object to convert to a path. + * @return The fully-resolved platform-specific Node.js file path. + */ + function fileURLToPath(url: string | URL, options?: FileUrlToPathOptions): string; + /** + * This function ensures that `path` is resolved absolutely, and that the URL + * control characters are correctly encoded when converting into a File URL. + * + * ```js + * import { pathToFileURL } from 'node:url'; + * + * new URL('/foo#1', 'file:'); // Incorrect: file:///foo#1 + * pathToFileURL('/foo#1'); // Correct: file:///foo%231 (POSIX) + * + * new URL('/some/path%.c', 'file:'); // Incorrect: file:///some/path%.c + * pathToFileURL('/some/path%.c'); // Correct: file:///some/path%25.c (POSIX) + * ``` + * @since v10.12.0 + * @param path The path to convert to a File URL. + * @return The file URL object. + */ + function pathToFileURL(path: string, options?: PathToFileUrlOptions): URL; + /** + * This utility function converts a URL object into an ordinary options object as + * expected by the `http.request()` and `https.request()` APIs. + * + * ```js + * import { urlToHttpOptions } from 'node:url'; + * const myURL = new URL('https://a:b@測試?abc#foo'); + * + * console.log(urlToHttpOptions(myURL)); + * /* + * { + * protocol: 'https:', + * hostname: 'xn--g6w251d', + * hash: '#foo', + * search: '?abc', + * pathname: '/', + * path: '/?abc', + * href: 'https://a:b@xn--g6w251d/?abc#foo', + * auth: 'a:b' + * } + * + * ``` + * @since v15.7.0, v14.18.0 + * @param url The `WHATWG URL` object to convert to an options object. + * @return Options object + */ + function urlToHttpOptions(url: URL): ClientRequestArgs; + interface URLFormatOptions { + /** + * `true` if the serialized URL string should include the username and password, `false` otherwise. + * @default true + */ + auth?: boolean | undefined; + /** + * `true` if the serialized URL string should include the fragment, `false` otherwise. + * @default true + */ + fragment?: boolean | undefined; + /** + * `true` if the serialized URL string should include the search query, `false` otherwise. + * @default true + */ + search?: boolean | undefined; + /** + * `true` if Unicode characters appearing in the host component of the URL string should be encoded directly as opposed to + * being Punycode encoded. + * @default false + */ + unicode?: boolean | undefined; + } + /** + * Browser-compatible `URL` class, implemented by following the WHATWG URL + * Standard. [Examples of parsed URLs](https://url.spec.whatwg.org/#example-url-parsing) may be found in the Standard itself. + * The `URL` class is also available on the global object. + * + * In accordance with browser conventions, all properties of `URL` objects + * are implemented as getters and setters on the class prototype, rather than as + * data properties on the object itself. Thus, unlike `legacy urlObject`s, + * using the `delete` keyword on any properties of `URL` objects (e.g. `delete myURL.protocol`, `delete myURL.pathname`, etc) has no effect but will still + * return `true`. + * @since v7.0.0, v6.13.0 + */ + class URL { + /** + * Creates a `'blob:nodedata:...'` URL string that represents the given `Blob` object and can be used to retrieve the `Blob` later. + * + * ```js + * import { + * Blob, + * resolveObjectURL, + * } from 'node:buffer'; + * + * const blob = new Blob(['hello']); + * const id = URL.createObjectURL(blob); + * + * // later... + * + * const otherBlob = resolveObjectURL(id); + * console.log(otherBlob.size); + * ``` + * + * The data stored by the registered `Blob` will be retained in memory until `URL.revokeObjectURL()` is called to remove it. + * + * `Blob` objects are registered within the current thread. If using Worker + * Threads, `Blob` objects registered within one Worker will not be available + * to other workers or the main thread. + * @since v16.7.0 + * @experimental + */ + static createObjectURL(blob: NodeBlob): string; + /** + * Removes the stored `Blob` identified by the given ID. Attempting to revoke a + * ID that isn't registered will silently fail. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + static revokeObjectURL(id: string): void; + /** + * Checks if an `input` relative to the `base` can be parsed to a `URL`. + * + * ```js + * const isValid = URL.canParse('/foo', 'https://example.org/'); // true + * + * const isNotValid = URL.canParse('/foo'); // false + * ``` + * @since v19.9.0 + * @param input The absolute or relative input URL to parse. If `input` is relative, then `base` is required. If `input` is absolute, the `base` is ignored. If `input` is not a string, it is + * `converted to a string` first. + * @param base The base URL to resolve against if the `input` is not absolute. If `base` is not a string, it is `converted to a string` first. + */ + static canParse(input: string, base?: string): boolean; + /** + * Parses a string as a URL. If `base` is provided, it will be used as the base URL for the purpose of resolving non-absolute `input` URLs. + * Returns `null` if `input` is not a valid. + * @param input The absolute or relative input URL to parse. If `input` is relative, then `base` is required. If `input` is absolute, the `base` is ignored. If `input` is not a string, it is + * `converted to a string` first. + * @param base The base URL to resolve against if the `input` is not absolute. If `base` is not a string, it is `converted to a string` first. + * @since v22.1.0 + */ + static parse(input: string, base?: string): URL | null; + constructor(input: string | { toString: () => string }, base?: string | URL); + /** + * Gets and sets the fragment portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/foo#bar'); + * console.log(myURL.hash); + * // Prints #bar + * + * myURL.hash = 'baz'; + * console.log(myURL.href); + * // Prints https://example.org/foo#baz + * ``` + * + * Invalid URL characters included in the value assigned to the `hash` property + * are `percent-encoded`. The selection of which characters to + * percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + hash: string; + /** + * Gets and sets the host portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org:81/foo'); + * console.log(myURL.host); + * // Prints example.org:81 + * + * myURL.host = 'example.com:82'; + * console.log(myURL.href); + * // Prints https://example.com:82/foo + * ``` + * + * Invalid host values assigned to the `host` property are ignored. + */ + host: string; + /** + * Gets and sets the host name portion of the URL. The key difference between`url.host` and `url.hostname` is that `url.hostname` does _not_ include the + * port. + * + * ```js + * const myURL = new URL('https://example.org:81/foo'); + * console.log(myURL.hostname); + * // Prints example.org + * + * // Setting the hostname does not change the port + * myURL.hostname = 'example.com'; + * console.log(myURL.href); + * // Prints https://example.com:81/foo + * + * // Use myURL.host to change the hostname and port + * myURL.host = 'example.org:82'; + * console.log(myURL.href); + * // Prints https://example.org:82/foo + * ``` + * + * Invalid host name values assigned to the `hostname` property are ignored. + */ + hostname: string; + /** + * Gets and sets the serialized URL. + * + * ```js + * const myURL = new URL('https://example.org/foo'); + * console.log(myURL.href); + * // Prints https://example.org/foo + * + * myURL.href = 'https://example.com/bar'; + * console.log(myURL.href); + * // Prints https://example.com/bar + * ``` + * + * Getting the value of the `href` property is equivalent to calling {@link toString}. + * + * Setting the value of this property to a new value is equivalent to creating a + * new `URL` object using `new URL(value)`. Each of the `URL` object's properties will be modified. + * + * If the value assigned to the `href` property is not a valid URL, a `TypeError` will be thrown. + */ + href: string; + /** + * Gets the read-only serialization of the URL's origin. + * + * ```js + * const myURL = new URL('https://example.org/foo/bar?baz'); + * console.log(myURL.origin); + * // Prints https://example.org + * ``` + * + * ```js + * const idnURL = new URL('https://測試'); + * console.log(idnURL.origin); + * // Prints https://xn--g6w251d + * + * console.log(idnURL.hostname); + * // Prints xn--g6w251d + * ``` + */ + readonly origin: string; + /** + * Gets and sets the password portion of the URL. + * + * ```js + * const myURL = new URL('https://abc:xyz@example.com'); + * console.log(myURL.password); + * // Prints xyz + * + * myURL.password = '123'; + * console.log(myURL.href); + * // Prints https://abc:123@example.com/ + * ``` + * + * Invalid URL characters included in the value assigned to the `password` property + * are `percent-encoded`. The selection of which characters to + * percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + password: string; + /** + * Gets and sets the path portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/abc/xyz?123'); + * console.log(myURL.pathname); + * // Prints /abc/xyz + * + * myURL.pathname = '/abcdef'; + * console.log(myURL.href); + * // Prints https://example.org/abcdef?123 + * ``` + * + * Invalid URL characters included in the value assigned to the `pathname` property are `percent-encoded`. The selection of which characters + * to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + pathname: string; + /** + * Gets and sets the port portion of the URL. + * + * The port value may be a number or a string containing a number in the range `0` to `65535` (inclusive). Setting the value to the default port of the `URL` objects given `protocol` will + * result in the `port` value becoming + * the empty string (`''`). + * + * The port value can be an empty string in which case the port depends on + * the protocol/scheme: + * + * + * + * Upon assigning a value to the port, the value will first be converted to a + * string using `.toString()`. + * + * If that string is invalid but it begins with a number, the leading number is + * assigned to `port`. + * If the number lies outside the range denoted above, it is ignored. + * + * ```js + * const myURL = new URL('https://example.org:8888'); + * console.log(myURL.port); + * // Prints 8888 + * + * // Default ports are automatically transformed to the empty string + * // (HTTPS protocol's default port is 443) + * myURL.port = '443'; + * console.log(myURL.port); + * // Prints the empty string + * console.log(myURL.href); + * // Prints https://example.org/ + * + * myURL.port = 1234; + * console.log(myURL.port); + * // Prints 1234 + * console.log(myURL.href); + * // Prints https://example.org:1234/ + * + * // Completely invalid port strings are ignored + * myURL.port = 'abcd'; + * console.log(myURL.port); + * // Prints 1234 + * + * // Leading numbers are treated as a port number + * myURL.port = '5678abcd'; + * console.log(myURL.port); + * // Prints 5678 + * + * // Non-integers are truncated + * myURL.port = 1234.5678; + * console.log(myURL.port); + * // Prints 1234 + * + * // Out-of-range numbers which are not represented in scientific notation + * // will be ignored. + * myURL.port = 1e10; // 10000000000, will be range-checked as described below + * console.log(myURL.port); + * // Prints 1234 + * ``` + * + * Numbers which contain a decimal point, + * such as floating-point numbers or numbers in scientific notation, + * are not an exception to this rule. + * Leading numbers up to the decimal point will be set as the URL's port, + * assuming they are valid: + * + * ```js + * myURL.port = 4.567e21; + * console.log(myURL.port); + * // Prints 4 (because it is the leading number in the string '4.567e21') + * ``` + */ + port: string; + /** + * Gets and sets the protocol portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org'); + * console.log(myURL.protocol); + * // Prints https: + * + * myURL.protocol = 'ftp'; + * console.log(myURL.href); + * // Prints ftp://example.org/ + * ``` + * + * Invalid URL protocol values assigned to the `protocol` property are ignored. + */ + protocol: string; + /** + * Gets and sets the serialized query portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/abc?123'); + * console.log(myURL.search); + * // Prints ?123 + * + * myURL.search = 'abc=xyz'; + * console.log(myURL.href); + * // Prints https://example.org/abc?abc=xyz + * ``` + * + * Any invalid URL characters appearing in the value assigned the `search` property will be `percent-encoded`. The selection of which + * characters to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + search: string; + /** + * Gets the `URLSearchParams` object representing the query parameters of the + * URL. This property is read-only but the `URLSearchParams` object it provides + * can be used to mutate the URL instance; to replace the entirety of query + * parameters of the URL, use the {@link search} setter. See `URLSearchParams` documentation for details. + * + * Use care when using `.searchParams` to modify the `URL` because, + * per the WHATWG specification, the `URLSearchParams` object uses + * different rules to determine which characters to percent-encode. For + * instance, the `URL` object will not percent encode the ASCII tilde (`~`) + * character, while `URLSearchParams` will always encode it: + * + * ```js + * const myURL = new URL('https://example.org/abc?foo=~bar'); + * + * console.log(myURL.search); // prints ?foo=~bar + * + * // Modify the URL via searchParams... + * myURL.searchParams.sort(); + * + * console.log(myURL.search); // prints ?foo=%7Ebar + * ``` + */ + readonly searchParams: URLSearchParams; + /** + * Gets and sets the username portion of the URL. + * + * ```js + * const myURL = new URL('https://abc:xyz@example.com'); + * console.log(myURL.username); + * // Prints abc + * + * myURL.username = '123'; + * console.log(myURL.href); + * // Prints https://123:xyz@example.com/ + * ``` + * + * Any invalid URL characters appearing in the value assigned the `username` property will be `percent-encoded`. The selection of which + * characters to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + username: string; + /** + * The `toString()` method on the `URL` object returns the serialized URL. The + * value returned is equivalent to that of {@link href} and {@link toJSON}. + */ + toString(): string; + /** + * The `toJSON()` method on the `URL` object returns the serialized URL. The + * value returned is equivalent to that of {@link href} and {@link toString}. + * + * This method is automatically called when an `URL` object is serialized + * with [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). + * + * ```js + * const myURLs = [ + * new URL('https://www.example.com'), + * new URL('https://test.example.org'), + * ]; + * console.log(JSON.stringify(myURLs)); + * // Prints ["https://www.example.com/","https://test.example.org/"] + * ``` + */ + toJSON(): string; + } + interface URLSearchParamsIterator extends NodeJS.Iterator { + [Symbol.iterator](): URLSearchParamsIterator; + } + /** + * The `URLSearchParams` API provides read and write access to the query of a `URL`. The `URLSearchParams` class can also be used standalone with one of the + * four following constructors. + * The `URLSearchParams` class is also available on the global object. + * + * The WHATWG `URLSearchParams` interface and the `querystring` module have + * similar purpose, but the purpose of the `querystring` module is more + * general, as it allows the customization of delimiter characters (`&` and `=`). + * On the other hand, this API is designed purely for URL query strings. + * + * ```js + * const myURL = new URL('https://example.org/?abc=123'); + * console.log(myURL.searchParams.get('abc')); + * // Prints 123 + * + * myURL.searchParams.append('abc', 'xyz'); + * console.log(myURL.href); + * // Prints https://example.org/?abc=123&abc=xyz + * + * myURL.searchParams.delete('abc'); + * myURL.searchParams.set('a', 'b'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b + * + * const newSearchParams = new URLSearchParams(myURL.searchParams); + * // The above is equivalent to + * // const newSearchParams = new URLSearchParams(myURL.search); + * + * newSearchParams.append('a', 'c'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b + * console.log(newSearchParams.toString()); + * // Prints a=b&a=c + * + * // newSearchParams.toString() is implicitly called + * myURL.search = newSearchParams; + * console.log(myURL.href); + * // Prints https://example.org/?a=b&a=c + * newSearchParams.delete('a'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b&a=c + * ``` + * @since v7.5.0, v6.13.0 + */ + class URLSearchParams implements Iterable<[string, string]> { + constructor( + init?: + | URLSearchParams + | string + | Record + | Iterable<[string, string]> + | ReadonlyArray<[string, string]>, + ); + /** + * Append a new name-value pair to the query string. + */ + append(name: string, value: string): void; + /** + * If `value` is provided, removes all name-value pairs + * where name is `name` and value is `value`. + * + * If `value` is not provided, removes all name-value pairs whose name is `name`. + */ + delete(name: string, value?: string): void; + /** + * Returns an ES6 `Iterator` over each of the name-value pairs in the query. + * Each item of the iterator is a JavaScript `Array`. The first item of the `Array` is the `name`, the second item of the `Array` is the `value`. + * + * Alias for `urlSearchParams[@@iterator]()`. + */ + entries(): URLSearchParamsIterator<[string, string]>; + /** + * Iterates over each name-value pair in the query and invokes the given function. + * + * ```js + * const myURL = new URL('https://example.org/?a=b&c=d'); + * myURL.searchParams.forEach((value, name, searchParams) => { + * console.log(name, value, myURL.searchParams === searchParams); + * }); + * // Prints: + * // a b true + * // c d true + * ``` + * @param fn Invoked for each name-value pair in the query + * @param thisArg To be used as `this` value for when `fn` is called + */ + forEach( + fn: (this: TThis, value: string, name: string, searchParams: URLSearchParams) => void, + thisArg?: TThis, + ): void; + /** + * Returns the value of the first name-value pair whose name is `name`. If there + * are no such pairs, `null` is returned. + * @return or `null` if there is no name-value pair with the given `name`. + */ + get(name: string): string | null; + /** + * Returns the values of all name-value pairs whose name is `name`. If there are + * no such pairs, an empty array is returned. + */ + getAll(name: string): string[]; + /** + * Checks if the `URLSearchParams` object contains key-value pair(s) based on `name` and an optional `value` argument. + * + * If `value` is provided, returns `true` when name-value pair with + * same `name` and `value` exists. + * + * If `value` is not provided, returns `true` if there is at least one name-value + * pair whose name is `name`. + */ + has(name: string, value?: string): boolean; + /** + * Returns an ES6 `Iterator` over the names of each name-value pair. + * + * ```js + * const params = new URLSearchParams('foo=bar&foo=baz'); + * for (const name of params.keys()) { + * console.log(name); + * } + * // Prints: + * // foo + * // foo + * ``` + */ + keys(): URLSearchParamsIterator; + /** + * Sets the value in the `URLSearchParams` object associated with `name` to `value`. If there are any pre-existing name-value pairs whose names are `name`, + * set the first such pair's value to `value` and remove all others. If not, + * append the name-value pair to the query string. + * + * ```js + * const params = new URLSearchParams(); + * params.append('foo', 'bar'); + * params.append('foo', 'baz'); + * params.append('abc', 'def'); + * console.log(params.toString()); + * // Prints foo=bar&foo=baz&abc=def + * + * params.set('foo', 'def'); + * params.set('xyz', 'opq'); + * console.log(params.toString()); + * // Prints foo=def&abc=def&xyz=opq + * ``` + */ + set(name: string, value: string): void; + /** + * The total number of parameter entries. + * @since v19.8.0 + */ + readonly size: number; + /** + * Sort all existing name-value pairs in-place by their names. Sorting is done + * with a [stable sorting algorithm](https://en.wikipedia.org/wiki/Sorting_algorithm#Stability), so relative order between name-value pairs + * with the same name is preserved. + * + * This method can be used, in particular, to increase cache hits. + * + * ```js + * const params = new URLSearchParams('query[]=abc&type=search&query[]=123'); + * params.sort(); + * console.log(params.toString()); + * // Prints query%5B%5D=abc&query%5B%5D=123&type=search + * ``` + * @since v7.7.0, v6.13.0 + */ + sort(): void; + /** + * Returns the search parameters serialized as a string, with characters + * percent-encoded where necessary. + */ + toString(): string; + /** + * Returns an ES6 `Iterator` over the values of each name-value pair. + */ + values(): URLSearchParamsIterator; + [Symbol.iterator](): URLSearchParamsIterator<[string, string]>; + } + import { URL as _URL, URLSearchParams as _URLSearchParams } from "url"; + global { + interface URLSearchParams extends _URLSearchParams {} + interface URL extends _URL {} + interface Global { + URL: typeof _URL; + URLSearchParams: typeof _URLSearchParams; + } + /** + * `URL` class is a global reference for `import { URL } from 'url'` + * https://nodejs.org/api/url.html#the-whatwg-url-api + * @since v10.0.0 + */ + var URL: typeof globalThis extends { + onmessage: any; + URL: infer T; + } ? T + : typeof _URL; + /** + * `URLSearchParams` class is a global reference for `import { URLSearchParams } from 'node:url'` + * https://nodejs.org/api/url.html#class-urlsearchparams + * @since v10.0.0 + */ + var URLSearchParams: typeof globalThis extends { + onmessage: any; + URLSearchParams: infer T; + } ? T + : typeof _URLSearchParams; + } +} +declare module "node:url" { + export * from "url"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/util.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/util.d.ts new file mode 100644 index 0000000..4f7d29b --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/util.d.ts @@ -0,0 +1,2371 @@ +/** + * The `node:util` module supports the needs of Node.js internal APIs. Many of the + * utilities are useful for application and module developers as well. To access + * it: + * + * ```js + * import util from 'node:util'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/util.js) + */ +declare module "util" { + import * as types from "node:util/types"; + export interface InspectOptions { + /** + * If `true`, object's non-enumerable symbols and properties are included in the formatted result. + * `WeakMap` and `WeakSet` entries are also included as well as user defined prototype properties (excluding method properties). + * @default false + */ + showHidden?: boolean | undefined; + /** + * Specifies the number of times to recurse while formatting object. + * This is useful for inspecting large objects. + * To recurse up to the maximum call stack size pass `Infinity` or `null`. + * @default 2 + */ + depth?: number | null | undefined; + /** + * If `true`, the output is styled with ANSI color codes. Colors are customizable. + */ + colors?: boolean | undefined; + /** + * If `false`, `[util.inspect.custom](depth, opts, inspect)` functions are not invoked. + * @default true + */ + customInspect?: boolean | undefined; + /** + * If `true`, `Proxy` inspection includes the target and handler objects. + * @default false + */ + showProxy?: boolean | undefined; + /** + * Specifies the maximum number of `Array`, `TypedArray`, `WeakMap`, and `WeakSet` elements + * to include when formatting. Set to `null` or `Infinity` to show all elements. + * Set to `0` or negative to show no elements. + * @default 100 + */ + maxArrayLength?: number | null | undefined; + /** + * Specifies the maximum number of characters to + * include when formatting. Set to `null` or `Infinity` to show all elements. + * Set to `0` or negative to show no characters. + * @default 10000 + */ + maxStringLength?: number | null | undefined; + /** + * The length at which input values are split across multiple lines. + * Set to `Infinity` to format the input as a single line + * (in combination with `compact` set to `true` or any number >= `1`). + * @default 80 + */ + breakLength?: number | undefined; + /** + * Setting this to `false` causes each object key + * to be displayed on a new line. It will also add new lines to text that is + * longer than `breakLength`. If set to a number, the most `n` inner elements + * are united on a single line as long as all properties fit into + * `breakLength`. Short array elements are also grouped together. Note that no + * text will be reduced below 16 characters, no matter the `breakLength` size. + * For more information, see the example below. + * @default true + */ + compact?: boolean | number | undefined; + /** + * If set to `true` or a function, all properties of an object, and `Set` and `Map` + * entries are sorted in the resulting string. + * If set to `true` the default sort is used. + * If set to a function, it is used as a compare function. + */ + sorted?: boolean | ((a: string, b: string) => number) | undefined; + /** + * If set to `true`, getters are going to be + * inspected as well. If set to `'get'` only getters without setter are going + * to be inspected. If set to `'set'` only getters having a corresponding + * setter are going to be inspected. This might cause side effects depending on + * the getter function. + * @default false + */ + getters?: "get" | "set" | boolean | undefined; + /** + * If set to `true`, an underscore is used to separate every three digits in all bigints and numbers. + * @default false + */ + numericSeparator?: boolean | undefined; + } + export type Style = + | "special" + | "number" + | "bigint" + | "boolean" + | "undefined" + | "null" + | "string" + | "symbol" + | "date" + | "regexp" + | "module"; + export type CustomInspectFunction = (depth: number, options: InspectOptionsStylized) => any; // TODO: , inspect: inspect + export interface InspectOptionsStylized extends InspectOptions { + stylize(text: string, styleType: Style): string; + } + export interface StacktraceObject { + /** + * Returns the name of the function associated with this stack frame. + */ + functionName: string; + /** + * Returns the name of the resource that contains the script for the + * function for this StackFrame. + */ + scriptName: string; + /** + * Returns the number, 1-based, of the line for the associate function call. + */ + lineNumber: number; + /** + * Returns the 1-based column offset on the line for the associated function call. + */ + column: number; + } + /** + * The `util.format()` method returns a formatted string using the first argument + * as a `printf`-like format string which can contain zero or more format + * specifiers. Each specifier is replaced with the converted value from the + * corresponding argument. Supported specifiers are: + * + * If a specifier does not have a corresponding argument, it is not replaced: + * + * ```js + * util.format('%s:%s', 'foo'); + * // Returns: 'foo:%s' + * ``` + * + * Values that are not part of the format string are formatted using `util.inspect()` if their type is not `string`. + * + * If there are more arguments passed to the `util.format()` method than the + * number of specifiers, the extra arguments are concatenated to the returned + * string, separated by spaces: + * + * ```js + * util.format('%s:%s', 'foo', 'bar', 'baz'); + * // Returns: 'foo:bar baz' + * ``` + * + * If the first argument does not contain a valid format specifier, `util.format()` returns a string that is the concatenation of all arguments separated by spaces: + * + * ```js + * util.format(1, 2, 3); + * // Returns: '1 2 3' + * ``` + * + * If only one argument is passed to `util.format()`, it is returned as it is + * without any formatting: + * + * ```js + * util.format('%% %s'); + * // Returns: '%% %s' + * ``` + * + * `util.format()` is a synchronous method that is intended as a debugging tool. + * Some input values can have a significant performance overhead that can block the + * event loop. Use this function with care and never in a hot code path. + * @since v0.5.3 + * @param format A `printf`-like format string. + */ + export function format(format?: any, ...param: any[]): string; + /** + * This function is identical to {@link format}, except in that it takes + * an `inspectOptions` argument which specifies options that are passed along to {@link inspect}. + * + * ```js + * util.formatWithOptions({ colors: true }, 'See object %O', { foo: 42 }); + * // Returns 'See object { foo: 42 }', where `42` is colored as a number + * // when printed to a terminal. + * ``` + * @since v10.0.0 + */ + export function formatWithOptions(inspectOptions: InspectOptions, format?: any, ...param: any[]): string; + /** + * Returns an array of stacktrace objects containing the stack of + * the caller function. + * + * ```js + * const util = require('node:util'); + * + * function exampleFunction() { + * const callSites = util.getCallSite(); + * + * console.log('Call Sites:'); + * callSites.forEach((callSite, index) => { + * console.log(`CallSite ${index + 1}:`); + * console.log(`Function Name: ${callSite.functionName}`); + * console.log(`Script Name: ${callSite.scriptName}`); + * console.log(`Line Number: ${callSite.lineNumber}`); + * console.log(`Column Number: ${callSite.column}`); + * }); + * // CallSite 1: + * // Function Name: exampleFunction + * // Script Name: /home/example.js + * // Line Number: 5 + * // Column Number: 26 + * + * // CallSite 2: + * // Function Name: anotherFunction + * // Script Name: /home/example.js + * // Line Number: 22 + * // Column Number: 3 + * + * // ... + * } + * + * // A function to simulate another stack layer + * function anotherFunction() { + * exampleFunction(); + * } + * + * anotherFunction(); + * ``` + * @param frames Number of frames returned in the stacktrace. + * **Default:** `10`. Allowable range is between 1 and 200. + * @return An array of stacktrace objects + * @since v22.9.0 + */ + export function getCallSite(frames?: number): StacktraceObject[]; + /** + * Returns the string name for a numeric error code that comes from a Node.js API. + * The mapping between error codes and error names is platform-dependent. + * See `Common System Errors` for the names of common errors. + * + * ```js + * fs.access('file/that/does/not/exist', (err) => { + * const name = util.getSystemErrorName(err.errno); + * console.error(name); // ENOENT + * }); + * ``` + * @since v9.7.0 + */ + export function getSystemErrorName(err: number): string; + /** + * Returns a Map of all system error codes available from the Node.js API. + * The mapping between error codes and error names is platform-dependent. + * See `Common System Errors` for the names of common errors. + * + * ```js + * fs.access('file/that/does/not/exist', (err) => { + * const errorMap = util.getSystemErrorMap(); + * const name = errorMap.get(err.errno); + * console.error(name); // ENOENT + * }); + * ``` + * @since v16.0.0, v14.17.0 + */ + export function getSystemErrorMap(): Map; + /** + * The `util.log()` method prints the given `string` to `stdout` with an included + * timestamp. + * + * ```js + * import util from 'node:util'; + * + * util.log('Timestamped message.'); + * ``` + * @since v0.3.0 + * @deprecated Since v6.0.0 - Use a third party module instead. + */ + export function log(string: string): void; + /** + * Returns the `string` after replacing any surrogate code points + * (or equivalently, any unpaired surrogate code units) with the + * Unicode "replacement character" U+FFFD. + * @since v16.8.0, v14.18.0 + */ + export function toUSVString(string: string): string; + /** + * Creates and returns an `AbortController` instance whose `AbortSignal` is marked + * as transferable and can be used with `structuredClone()` or `postMessage()`. + * @since v18.11.0 + * @experimental + * @returns A transferable AbortController + */ + export function transferableAbortController(): AbortController; + /** + * Marks the given `AbortSignal` as transferable so that it can be used with`structuredClone()` and `postMessage()`. + * + * ```js + * const signal = transferableAbortSignal(AbortSignal.timeout(100)); + * const channel = new MessageChannel(); + * channel.port2.postMessage(signal, [signal]); + * ``` + * @since v18.11.0 + * @experimental + * @param signal The AbortSignal + * @returns The same AbortSignal + */ + export function transferableAbortSignal(signal: AbortSignal): AbortSignal; + /** + * Listens to abort event on the provided `signal` and + * returns a promise that is fulfilled when the `signal` is + * aborted. If the passed `resource` is garbage collected before the `signal` is + * aborted, the returned promise shall remain pending indefinitely. + * + * ```js + * import { aborted } from 'node:util'; + * + * const dependent = obtainSomethingAbortable(); + * + * aborted(dependent.signal, dependent).then(() => { + * // Do something when dependent is aborted. + * }); + * + * dependent.on('event', () => { + * dependent.abort(); + * }); + * ``` + * @since v19.7.0 + * @experimental + * @param resource Any non-null entity, reference to which is held weakly. + */ + export function aborted(signal: AbortSignal, resource: any): Promise; + /** + * The `util.inspect()` method returns a string representation of `object` that is + * intended for debugging. The output of `util.inspect` may change at any time + * and should not be depended upon programmatically. Additional `options` may be + * passed that alter the result. `util.inspect()` will use the constructor's name and/or `@@toStringTag` to make + * an identifiable tag for an inspected value. + * + * ```js + * class Foo { + * get [Symbol.toStringTag]() { + * return 'bar'; + * } + * } + * + * class Bar {} + * + * const baz = Object.create(null, { [Symbol.toStringTag]: { value: 'foo' } }); + * + * util.inspect(new Foo()); // 'Foo [bar] {}' + * util.inspect(new Bar()); // 'Bar {}' + * util.inspect(baz); // '[foo] {}' + * ``` + * + * Circular references point to their anchor by using a reference index: + * + * ```js + * import { inspect } from 'node:util'; + * + * const obj = {}; + * obj.a = [obj]; + * obj.b = {}; + * obj.b.inner = obj.b; + * obj.b.obj = obj; + * + * console.log(inspect(obj)); + * // { + * // a: [ [Circular *1] ], + * // b: { inner: [Circular *2], obj: [Circular *1] } + * // } + * ``` + * + * The following example inspects all properties of the `util` object: + * + * ```js + * import util from 'node:util'; + * + * console.log(util.inspect(util, { showHidden: true, depth: null })); + * ``` + * + * The following example highlights the effect of the `compact` option: + * + * ```js + * import util from 'node:util'; + * + * const o = { + * a: [1, 2, [[ + * 'Lorem ipsum dolor sit amet,\nconsectetur adipiscing elit, sed do ' + + * 'eiusmod \ntempor incididunt ut labore et dolore magna aliqua.', + * 'test', + * 'foo']], 4], + * b: new Map([['za', 1], ['zb', 'test']]), + * }; + * console.log(util.inspect(o, { compact: true, depth: 5, breakLength: 80 })); + * + * // { a: + * // [ 1, + * // 2, + * // [ [ 'Lorem ipsum dolor sit amet,\nconsectetur [...]', // A long line + * // 'test', + * // 'foo' ] ], + * // 4 ], + * // b: Map(2) { 'za' => 1, 'zb' => 'test' } } + * + * // Setting `compact` to false or an integer creates more reader friendly output. + * console.log(util.inspect(o, { compact: false, depth: 5, breakLength: 80 })); + * + * // { + * // a: [ + * // 1, + * // 2, + * // [ + * // [ + * // 'Lorem ipsum dolor sit amet,\n' + + * // 'consectetur adipiscing elit, sed do eiusmod \n' + + * // 'tempor incididunt ut labore et dolore magna aliqua.', + * // 'test', + * // 'foo' + * // ] + * // ], + * // 4 + * // ], + * // b: Map(2) { + * // 'za' => 1, + * // 'zb' => 'test' + * // } + * // } + * + * // Setting `breakLength` to e.g. 150 will print the "Lorem ipsum" text in a + * // single line. + * ``` + * + * The `showHidden` option allows [`WeakMap`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap) and + * [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) entries to be + * inspected. If there are more entries than `maxArrayLength`, there is no + * guarantee which entries are displayed. That means retrieving the same [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) entries twice may + * result in different output. Furthermore, entries + * with no remaining strong references may be garbage collected at any time. + * + * ```js + * import { inspect } from 'node:util'; + * + * const obj = { a: 1 }; + * const obj2 = { b: 2 }; + * const weakSet = new WeakSet([obj, obj2]); + * + * console.log(inspect(weakSet, { showHidden: true })); + * // WeakSet { { a: 1 }, { b: 2 } } + * ``` + * + * The `sorted` option ensures that an object's property insertion order does not + * impact the result of `util.inspect()`. + * + * ```js + * import { inspect } from 'node:util'; + * import assert from 'node:assert'; + * + * const o1 = { + * b: [2, 3, 1], + * a: '`a` comes before `b`', + * c: new Set([2, 3, 1]), + * }; + * console.log(inspect(o1, { sorted: true })); + * // { a: '`a` comes before `b`', b: [ 2, 3, 1 ], c: Set(3) { 1, 2, 3 } } + * console.log(inspect(o1, { sorted: (a, b) => b.localeCompare(a) })); + * // { c: Set(3) { 3, 2, 1 }, b: [ 2, 3, 1 ], a: '`a` comes before `b`' } + * + * const o2 = { + * c: new Set([2, 1, 3]), + * a: '`a` comes before `b`', + * b: [2, 3, 1], + * }; + * assert.strict.equal( + * inspect(o1, { sorted: true }), + * inspect(o2, { sorted: true }), + * ); + * ``` + * + * The `numericSeparator` option adds an underscore every three digits to all + * numbers. + * + * ```js + * import { inspect } from 'node:util'; + * + * const thousand = 1_000; + * const million = 1_000_000; + * const bigNumber = 123_456_789n; + * const bigDecimal = 1_234.123_45; + * + * console.log(inspect(thousand, { numericSeparator: true })); + * // 1_000 + * console.log(inspect(million, { numericSeparator: true })); + * // 1_000_000 + * console.log(inspect(bigNumber, { numericSeparator: true })); + * // 123_456_789n + * console.log(inspect(bigDecimal, { numericSeparator: true })); + * // 1_234.123_45 + * ``` + * + * `util.inspect()` is a synchronous method intended for debugging. Its maximum + * output length is approximately 128 MiB. Inputs that result in longer output will + * be truncated. + * @since v0.3.0 + * @param object Any JavaScript primitive or `Object`. + * @return The representation of `object`. + */ + export function inspect(object: any, showHidden?: boolean, depth?: number | null, color?: boolean): string; + export function inspect(object: any, options?: InspectOptions): string; + export namespace inspect { + let colors: NodeJS.Dict<[number, number]>; + let styles: { + [K in Style]: string; + }; + let defaultOptions: InspectOptions; + /** + * Allows changing inspect settings from the repl. + */ + let replDefaults: InspectOptions; + /** + * That can be used to declare custom inspect functions. + */ + const custom: unique symbol; + } + /** + * Alias for [`Array.isArray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/isArray). + * + * Returns `true` if the given `object` is an `Array`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isArray([]); + * // Returns: true + * util.isArray(new Array()); + * // Returns: true + * util.isArray({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use `isArray` instead. + */ + export function isArray(object: unknown): object is unknown[]; + /** + * Returns `true` if the given `object` is a `RegExp`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isRegExp(/some regexp/); + * // Returns: true + * util.isRegExp(new RegExp('another regexp')); + * // Returns: true + * util.isRegExp({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Deprecated + */ + export function isRegExp(object: unknown): object is RegExp; + /** + * Returns `true` if the given `object` is a `Date`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isDate(new Date()); + * // Returns: true + * util.isDate(Date()); + * // false (without 'new' returns a String) + * util.isDate({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use {@link types.isDate} instead. + */ + export function isDate(object: unknown): object is Date; + /** + * Returns `true` if the given `object` is an `Error`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isError(new Error()); + * // Returns: true + * util.isError(new TypeError()); + * // Returns: true + * util.isError({ name: 'Error', message: 'an error occurred' }); + * // Returns: false + * ``` + * + * This method relies on `Object.prototype.toString()` behavior. It is + * possible to obtain an incorrect result when the `object` argument manipulates `@@toStringTag`. + * + * ```js + * import util from 'node:util'; + * const obj = { name: 'Error', message: 'an error occurred' }; + * + * util.isError(obj); + * // Returns: false + * obj[Symbol.toStringTag] = 'Error'; + * util.isError(obj); + * // Returns: true + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use {@link types.isNativeError} instead. + */ + export function isError(object: unknown): object is Error; + /** + * Usage of `util.inherits()` is discouraged. Please use the ES6 `class` and `extends` keywords to get language level inheritance support. Also note + * that the two styles are [semantically incompatible](https://github.com/nodejs/node/issues/4179). + * + * Inherit the prototype methods from one [constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/constructor) into another. The + * prototype of `constructor` will be set to a new object created from `superConstructor`. + * + * This mainly adds some input validation on top of`Object.setPrototypeOf(constructor.prototype, superConstructor.prototype)`. + * As an additional convenience, `superConstructor` will be accessible + * through the `constructor.super_` property. + * + * ```js + * import util from 'node:util'; + * import EventEmitter from 'node:events'; + * + * function MyStream() { + * EventEmitter.call(this); + * } + * + * util.inherits(MyStream, EventEmitter); + * + * MyStream.prototype.write = function(data) { + * this.emit('data', data); + * }; + * + * const stream = new MyStream(); + * + * console.log(stream instanceof EventEmitter); // true + * console.log(MyStream.super_ === EventEmitter); // true + * + * stream.on('data', (data) => { + * console.log(`Received data: "${data}"`); + * }); + * stream.write('It works!'); // Received data: "It works!" + * ``` + * + * ES6 example using `class` and `extends`: + * + * ```js + * import EventEmitter from 'node:events'; + * + * class MyStream extends EventEmitter { + * write(data) { + * this.emit('data', data); + * } + * } + * + * const stream = new MyStream(); + * + * stream.on('data', (data) => { + * console.log(`Received data: "${data}"`); + * }); + * stream.write('With ES6'); + * ``` + * @since v0.3.0 + * @legacy Use ES2015 class syntax and `extends` keyword instead. + */ + export function inherits(constructor: unknown, superConstructor: unknown): void; + export type DebugLoggerFunction = (msg: string, ...param: unknown[]) => void; + export interface DebugLogger extends DebugLoggerFunction { + enabled: boolean; + } + /** + * The `util.debuglog()` method is used to create a function that conditionally + * writes debug messages to `stderr` based on the existence of the `NODE_DEBUG`environment variable. If the `section` name appears within the value of that + * environment variable, then the returned function operates similar to `console.error()`. If not, then the returned function is a no-op. + * + * ```js + * import util from 'node:util'; + * const debuglog = util.debuglog('foo'); + * + * debuglog('hello from foo [%d]', 123); + * ``` + * + * If this program is run with `NODE_DEBUG=foo` in the environment, then + * it will output something like: + * + * ```console + * FOO 3245: hello from foo [123] + * ``` + * + * where `3245` is the process id. If it is not run with that + * environment variable set, then it will not print anything. + * + * The `section` supports wildcard also: + * + * ```js + * import util from 'node:util'; + * const debuglog = util.debuglog('foo-bar'); + * + * debuglog('hi there, it\'s foo-bar [%d]', 2333); + * ``` + * + * if it is run with `NODE_DEBUG=foo*` in the environment, then it will output + * something like: + * + * ```console + * FOO-BAR 3257: hi there, it's foo-bar [2333] + * ``` + * + * Multiple comma-separated `section` names may be specified in the `NODE_DEBUG`environment variable: `NODE_DEBUG=fs,net,tls`. + * + * The optional `callback` argument can be used to replace the logging function + * with a different function that doesn't have any initialization or + * unnecessary wrapping. + * + * ```js + * import util from 'node:util'; + * let debuglog = util.debuglog('internals', (debug) => { + * // Replace with a logging function that optimizes out + * // testing if the section is enabled + * debuglog = debug; + * }); + * ``` + * @since v0.11.3 + * @param section A string identifying the portion of the application for which the `debuglog` function is being created. + * @param callback A callback invoked the first time the logging function is called with a function argument that is a more optimized logging function. + * @return The logging function + */ + export function debuglog(section: string, callback?: (fn: DebugLoggerFunction) => void): DebugLogger; + export const debug: typeof debuglog; + /** + * Returns `true` if the given `object` is a `Boolean`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isBoolean(1); + * // Returns: false + * util.isBoolean(0); + * // Returns: false + * util.isBoolean(false); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'boolean'` instead. + */ + export function isBoolean(object: unknown): object is boolean; + /** + * Returns `true` if the given `object` is a `Buffer`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isBuffer({ length: 0 }); + * // Returns: false + * util.isBuffer([]); + * // Returns: false + * util.isBuffer(Buffer.from('hello world')); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `isBuffer` instead. + */ + export function isBuffer(object: unknown): object is Buffer; + /** + * Returns `true` if the given `object` is a `Function`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * function Foo() {} + * const Bar = () => {}; + * + * util.isFunction({}); + * // Returns: false + * util.isFunction(Foo); + * // Returns: true + * util.isFunction(Bar); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'function'` instead. + */ + export function isFunction(object: unknown): boolean; + /** + * Returns `true` if the given `object` is strictly `null`. Otherwise, returns`false`. + * + * ```js + * import util from 'node:util'; + * + * util.isNull(0); + * // Returns: false + * util.isNull(undefined); + * // Returns: false + * util.isNull(null); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === null` instead. + */ + export function isNull(object: unknown): object is null; + /** + * Returns `true` if the given `object` is `null` or `undefined`. Otherwise, + * returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isNullOrUndefined(0); + * // Returns: false + * util.isNullOrUndefined(undefined); + * // Returns: true + * util.isNullOrUndefined(null); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === undefined || value === null` instead. + */ + export function isNullOrUndefined(object: unknown): object is null | undefined; + /** + * Returns `true` if the given `object` is a `Number`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isNumber(false); + * // Returns: false + * util.isNumber(Infinity); + * // Returns: true + * util.isNumber(0); + * // Returns: true + * util.isNumber(NaN); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'number'` instead. + */ + export function isNumber(object: unknown): object is number; + /** + * Returns `true` if the given `object` is strictly an `Object`**and** not a`Function` (even though functions are objects in JavaScript). + * Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isObject(5); + * // Returns: false + * util.isObject(null); + * // Returns: false + * util.isObject({}); + * // Returns: true + * util.isObject(() => {}); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value !== null && typeof value === 'object'` instead. + */ + export function isObject(object: unknown): boolean; + /** + * Returns `true` if the given `object` is a primitive type. Otherwise, returns`false`. + * + * ```js + * import util from 'node:util'; + * + * util.isPrimitive(5); + * // Returns: true + * util.isPrimitive('foo'); + * // Returns: true + * util.isPrimitive(false); + * // Returns: true + * util.isPrimitive(null); + * // Returns: true + * util.isPrimitive(undefined); + * // Returns: true + * util.isPrimitive({}); + * // Returns: false + * util.isPrimitive(() => {}); + * // Returns: false + * util.isPrimitive(/^$/); + * // Returns: false + * util.isPrimitive(new Date()); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `(typeof value !== 'object' && typeof value !== 'function') || value === null` instead. + */ + export function isPrimitive(object: unknown): boolean; + /** + * Returns `true` if the given `object` is a `string`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isString(''); + * // Returns: true + * util.isString('foo'); + * // Returns: true + * util.isString(String('foo')); + * // Returns: true + * util.isString(5); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'string'` instead. + */ + export function isString(object: unknown): object is string; + /** + * Returns `true` if the given `object` is a `Symbol`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * util.isSymbol(5); + * // Returns: false + * util.isSymbol('foo'); + * // Returns: false + * util.isSymbol(Symbol('foo')); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'symbol'` instead. + */ + export function isSymbol(object: unknown): object is symbol; + /** + * Returns `true` if the given `object` is `undefined`. Otherwise, returns `false`. + * + * ```js + * import util from 'node:util'; + * + * const foo = undefined; + * util.isUndefined(5); + * // Returns: false + * util.isUndefined(foo); + * // Returns: true + * util.isUndefined(null); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === undefined` instead. + */ + export function isUndefined(object: unknown): object is undefined; + /** + * The `util.deprecate()` method wraps `fn` (which may be a function or class) in + * such a way that it is marked as deprecated. + * + * ```js + * import util from 'node:util'; + * + * exports.obsoleteFunction = util.deprecate(() => { + * // Do something here. + * }, 'obsoleteFunction() is deprecated. Use newShinyFunction() instead.'); + * ``` + * + * When called, `util.deprecate()` will return a function that will emit a `DeprecationWarning` using the `'warning'` event. The warning will + * be emitted and printed to `stderr` the first time the returned function is + * called. After the warning is emitted, the wrapped function is called without + * emitting a warning. + * + * If the same optional `code` is supplied in multiple calls to `util.deprecate()`, + * the warning will be emitted only once for that `code`. + * + * ```js + * import util from 'node:util'; + * + * const fn1 = util.deprecate(someFunction, someMessage, 'DEP0001'); + * const fn2 = util.deprecate(someOtherFunction, someOtherMessage, 'DEP0001'); + * fn1(); // Emits a deprecation warning with code DEP0001 + * fn2(); // Does not emit a deprecation warning because it has the same code + * ``` + * + * If either the `--no-deprecation` or `--no-warnings` command-line flags are + * used, or if the `process.noDeprecation` property is set to `true`_prior_ to + * the first deprecation warning, the `util.deprecate()` method does nothing. + * + * If the `--trace-deprecation` or `--trace-warnings` command-line flags are set, + * or the `process.traceDeprecation` property is set to `true`, a warning and a + * stack trace are printed to `stderr` the first time the deprecated function is + * called. + * + * If the `--throw-deprecation` command-line flag is set, or the `process.throwDeprecation` property is set to `true`, then an exception will be + * thrown when the deprecated function is called. + * + * The `--throw-deprecation` command-line flag and `process.throwDeprecation` property take precedence over `--trace-deprecation` and `process.traceDeprecation`. + * @since v0.8.0 + * @param fn The function that is being deprecated. + * @param msg A warning message to display when the deprecated function is invoked. + * @param code A deprecation code. See the `list of deprecated APIs` for a list of codes. + * @return The deprecated function wrapped to emit a warning. + */ + export function deprecate(fn: T, msg: string, code?: string): T; + /** + * Returns `true` if there is deep strict equality between `val1` and `val2`. + * Otherwise, returns `false`. + * + * See `assert.deepStrictEqual()` for more information about deep strict + * equality. + * @since v9.0.0 + */ + export function isDeepStrictEqual(val1: unknown, val2: unknown): boolean; + /** + * Returns `str` with any ANSI escape codes removed. + * + * ```js + * console.log(util.stripVTControlCharacters('\u001B[4mvalue\u001B[0m')); + * // Prints "value" + * ``` + * @since v16.11.0 + */ + export function stripVTControlCharacters(str: string): string; + /** + * Takes an `async` function (or a function that returns a `Promise`) and returns a + * function following the error-first callback style, i.e. taking + * an `(err, value) => ...` callback as the last argument. In the callback, the + * first argument will be the rejection reason (or `null` if the `Promise` resolved), and the second argument will be the resolved value. + * + * ```js + * import util from 'node:util'; + * + * async function fn() { + * return 'hello world'; + * } + * const callbackFunction = util.callbackify(fn); + * + * callbackFunction((err, ret) => { + * if (err) throw err; + * console.log(ret); + * }); + * ``` + * + * Will print: + * + * ```text + * hello world + * ``` + * + * The callback is executed asynchronously, and will have a limited stack trace. + * If the callback throws, the process will emit an `'uncaughtException'` event, and if not handled will exit. + * + * Since `null` has a special meaning as the first argument to a callback, if a + * wrapped function rejects a `Promise` with a falsy value as a reason, the value + * is wrapped in an `Error` with the original value stored in a field named `reason`. + * + * ```js + * function fn() { + * return Promise.reject(null); + * } + * const callbackFunction = util.callbackify(fn); + * + * callbackFunction((err, ret) => { + * // When the Promise was rejected with `null` it is wrapped with an Error and + * // the original value is stored in `reason`. + * err && Object.hasOwn(err, 'reason') && err.reason === null; // true + * }); + * ``` + * @since v8.2.0 + * @param fn An `async` function + * @return a callback style function + */ + export function callbackify(fn: () => Promise): (callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: () => Promise, + ): (callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1) => Promise, + ): (arg1: T1, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1) => Promise, + ): (arg1: T1, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2) => Promise, + ): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2) => Promise, + ): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + callback: (err: NodeJS.ErrnoException | null, result: TResult) => void, + ) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + arg5: T5, + callback: (err: NodeJS.ErrnoException | null, result: TResult) => void, + ) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + arg5: T5, + arg6: T6, + callback: (err: NodeJS.ErrnoException) => void, + ) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + arg5: T5, + arg6: T6, + callback: (err: NodeJS.ErrnoException | null, result: TResult) => void, + ) => void; + export interface CustomPromisifyLegacy extends Function { + __promisify__: TCustom; + } + export interface CustomPromisifySymbol extends Function { + [promisify.custom]: TCustom; + } + export type CustomPromisify = + | CustomPromisifySymbol + | CustomPromisifyLegacy; + /** + * Takes a function following the common error-first callback style, i.e. taking + * an `(err, value) => ...` callback as the last argument, and returns a version + * that returns promises. + * + * ```js + * import util from 'node:util'; + * import fs from 'node:fs'; + * + * const stat = util.promisify(fs.stat); + * stat('.').then((stats) => { + * // Do something with `stats` + * }).catch((error) => { + * // Handle the error. + * }); + * ``` + * + * Or, equivalently using `async function`s: + * + * ```js + * import util from 'node:util'; + * import fs from 'node:fs'; + * + * const stat = util.promisify(fs.stat); + * + * async function callStat() { + * const stats = await stat('.'); + * console.log(`This directory is owned by ${stats.uid}`); + * } + * + * callStat(); + * ``` + * + * If there is an `original[util.promisify.custom]` property present, `promisify` will return its value, see `Custom promisified functions`. + * + * `promisify()` assumes that `original` is a function taking a callback as its + * final argument in all cases. If `original` is not a function, `promisify()` will throw an error. If `original` is a function but its last argument is not + * an error-first callback, it will still be passed an error-first + * callback as its last argument. + * + * Using `promisify()` on class methods or other methods that use `this` may not + * work as expected unless handled specially: + * + * ```js + * import util from 'node:util'; + * + * class Foo { + * constructor() { + * this.a = 42; + * } + * + * bar(callback) { + * callback(null, this.a); + * } + * } + * + * const foo = new Foo(); + * + * const naiveBar = util.promisify(foo.bar); + * // TypeError: Cannot read property 'a' of undefined + * // naiveBar().then(a => console.log(a)); + * + * naiveBar.call(foo).then((a) => console.log(a)); // '42' + * + * const bindBar = naiveBar.bind(foo); + * bindBar().then((a) => console.log(a)); // '42' + * ``` + * @since v8.0.0 + */ + export function promisify(fn: CustomPromisify): TCustom; + export function promisify( + fn: (callback: (err: any, result: TResult) => void) => void, + ): () => Promise; + export function promisify(fn: (callback: (err?: any) => void) => void): () => Promise; + export function promisify( + fn: (arg1: T1, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1) => Promise; + export function promisify(fn: (arg1: T1, callback: (err?: any) => void) => void): (arg1: T1) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; + export function promisify(fn: Function): Function; + export namespace promisify { + /** + * That can be used to declare custom promisified variants of functions. + */ + const custom: unique symbol; + } + /** + * Stability: 1.1 - Active development + * Given an example `.env` file: + * + * ```js + * import { parseEnv } from 'node:util'; + * + * parseEnv('HELLO=world\nHELLO=oh my\n'); + * // Returns: { HELLO: 'oh my' } + * ``` + * @param content The raw contents of a `.env` file. + * @since v20.12.0 + */ + export function parseEnv(content: string): object; + // https://nodejs.org/docs/latest/api/util.html#foreground-colors + type ForegroundColors = + | "black" + | "blackBright" + | "blue" + | "blueBright" + | "cyan" + | "cyanBright" + | "gray" + | "green" + | "greenBright" + | "grey" + | "magenta" + | "magentaBright" + | "red" + | "redBright" + | "white" + | "whiteBright" + | "yellow" + | "yellowBright"; + // https://nodejs.org/docs/latest/api/util.html#background-colors + type BackgroundColors = + | "bgBlack" + | "bgBlackBright" + | "bgBlue" + | "bgBlueBright" + | "bgCyan" + | "bgCyanBright" + | "bgGray" + | "bgGreen" + | "bgGreenBright" + | "bgGrey" + | "bgMagenta" + | "bgMagentaBright" + | "bgRed" + | "bgRedBright" + | "bgWhite" + | "bgWhiteBright" + | "bgYellow" + | "bgYellowBright"; + // https://nodejs.org/docs/latest/api/util.html#modifiers + type Modifiers = + | "blink" + | "bold" + | "dim" + | "doubleunderline" + | "framed" + | "hidden" + | "inverse" + | "italic" + | "overlined" + | "reset" + | "strikethrough" + | "underline"; + /** + * Stability: 1.1 - Active development + * + * This function returns a formatted text considering the `format` passed. + * + * ```js + * import { styleText } from 'node:util'; + * const errorMessage = styleText('red', 'Error! Error!'); + * console.log(errorMessage); + * ``` + * + * `util.inspect.colors` also provides text formats such as `italic`, and `underline` and you can combine both: + * + * ```js + * console.log( + * util.styleText(['underline', 'italic'], 'My italic underlined message'), + * ); + * ``` + * + * When passing an array of formats, the order of the format applied is left to right so the following style + * might overwrite the previous one. + * + * ```js + * console.log( + * util.styleText(['red', 'green'], 'text'), // green + * ); + * ``` + * + * The full list of formats can be found in [modifiers](https://nodejs.org/docs/latest-v22.x/api/util.html#modifiers). + * @param format A text format or an Array of text formats defined in `util.inspect.colors`. + * @param text The text to to be formatted. + * @since v20.12.0 + */ + export function styleText( + format: + | ForegroundColors + | BackgroundColors + | Modifiers + | Array, + text: string, + ): string; + /** + * An implementation of the [WHATWG Encoding Standard](https://encoding.spec.whatwg.org/) `TextDecoder` API. + * + * ```js + * const decoder = new TextDecoder(); + * const u8arr = new Uint8Array([72, 101, 108, 108, 111]); + * console.log(decoder.decode(u8arr)); // Hello + * ``` + * @since v8.3.0 + */ + export class TextDecoder { + /** + * The encoding supported by the `TextDecoder` instance. + */ + readonly encoding: string; + /** + * The value will be `true` if decoding errors result in a `TypeError` being + * thrown. + */ + readonly fatal: boolean; + /** + * The value will be `true` if the decoding result will include the byte order + * mark. + */ + readonly ignoreBOM: boolean; + constructor( + encoding?: string, + options?: { + fatal?: boolean | undefined; + ignoreBOM?: boolean | undefined; + }, + ); + /** + * Decodes the `input` and returns a string. If `options.stream` is `true`, any + * incomplete byte sequences occurring at the end of the `input` are buffered + * internally and emitted after the next call to `textDecoder.decode()`. + * + * If `textDecoder.fatal` is `true`, decoding errors that occur will result in a `TypeError` being thrown. + * @param input An `ArrayBuffer`, `DataView`, or `TypedArray` instance containing the encoded data. + */ + decode( + input?: NodeJS.ArrayBufferView | ArrayBuffer | null, + options?: { + stream?: boolean | undefined; + }, + ): string; + } + export interface EncodeIntoResult { + /** + * The read Unicode code units of input. + */ + read: number; + /** + * The written UTF-8 bytes of output. + */ + written: number; + } + export { types }; + + //// TextEncoder/Decoder + /** + * An implementation of the [WHATWG Encoding Standard](https://encoding.spec.whatwg.org/) `TextEncoder` API. All + * instances of `TextEncoder` only support UTF-8 encoding. + * + * ```js + * const encoder = new TextEncoder(); + * const uint8array = encoder.encode('this is some data'); + * ``` + * + * The `TextEncoder` class is also available on the global object. + * @since v8.3.0 + */ + export class TextEncoder { + /** + * The encoding supported by the `TextEncoder` instance. Always set to `'utf-8'`. + */ + readonly encoding: string; + /** + * UTF-8 encodes the `input` string and returns a `Uint8Array` containing the + * encoded bytes. + * @param [input='an empty string'] The text to encode. + */ + encode(input?: string): Uint8Array; + /** + * UTF-8 encodes the `src` string to the `dest` Uint8Array and returns an object + * containing the read Unicode code units and written UTF-8 bytes. + * + * ```js + * const encoder = new TextEncoder(); + * const src = 'this is some data'; + * const dest = new Uint8Array(10); + * const { read, written } = encoder.encodeInto(src, dest); + * ``` + * @param src The text to encode. + * @param dest The array to hold the encode result. + */ + encodeInto(src: string, dest: Uint8Array): EncodeIntoResult; + } + import { TextDecoder as _TextDecoder, TextEncoder as _TextEncoder } from "util"; + global { + /** + * `TextDecoder` class is a global reference for `import { TextDecoder } from 'node:util'` + * https://nodejs.org/api/globals.html#textdecoder + * @since v11.0.0 + */ + var TextDecoder: typeof globalThis extends { + onmessage: any; + TextDecoder: infer TextDecoder; + } ? TextDecoder + : typeof _TextDecoder; + /** + * `TextEncoder` class is a global reference for `import { TextEncoder } from 'node:util'` + * https://nodejs.org/api/globals.html#textencoder + * @since v11.0.0 + */ + var TextEncoder: typeof globalThis extends { + onmessage: any; + TextEncoder: infer TextEncoder; + } ? TextEncoder + : typeof _TextEncoder; + } + + //// parseArgs + /** + * Provides a higher level API for command-line argument parsing than interacting + * with `process.argv` directly. Takes a specification for the expected arguments + * and returns a structured object with the parsed options and positionals. + * + * ```js + * import { parseArgs } from 'node:util'; + * const args = ['-f', '--bar', 'b']; + * const options = { + * foo: { + * type: 'boolean', + * short: 'f', + * }, + * bar: { + * type: 'string', + * }, + * }; + * const { + * values, + * positionals, + * } = parseArgs({ args, options }); + * console.log(values, positionals); + * // Prints: [Object: null prototype] { foo: true, bar: 'b' } [] + * ``` + * @since v18.3.0, v16.17.0 + * @param config Used to provide arguments for parsing and to configure the parser. `config` supports the following properties: + * @return The parsed command line arguments: + */ + export function parseArgs(config?: T): ParsedResults; + interface ParseArgsOptionConfig { + /** + * Type of argument. + */ + type: "string" | "boolean"; + /** + * Whether this option can be provided multiple times. + * If `true`, all values will be collected in an array. + * If `false`, values for the option are last-wins. + * @default false. + */ + multiple?: boolean | undefined; + /** + * A single character alias for the option. + */ + short?: string | undefined; + /** + * The default option value when it is not set by args. + * It must be of the same type as the the `type` property. + * When `multiple` is `true`, it must be an array. + * @since v18.11.0 + */ + default?: string | boolean | string[] | boolean[] | undefined; + } + interface ParseArgsOptionsConfig { + [longOption: string]: ParseArgsOptionConfig; + } + export interface ParseArgsConfig { + /** + * Array of argument strings. + */ + args?: string[] | undefined; + /** + * Used to describe arguments known to the parser. + */ + options?: ParseArgsOptionsConfig | undefined; + /** + * Should an error be thrown when unknown arguments are encountered, + * or when arguments are passed that do not match the `type` configured in `options`. + * @default true + */ + strict?: boolean | undefined; + /** + * Whether this command accepts positional arguments. + */ + allowPositionals?: boolean | undefined; + /** + * If `true`, allows explicitly setting boolean options to `false` by prefixing the option name with `--no-`. + * @default false + * @since v22.4.0 + */ + allowNegative?: boolean | undefined; + /** + * Return the parsed tokens. This is useful for extending the built-in behavior, + * from adding additional checks through to reprocessing the tokens in different ways. + * @default false + */ + tokens?: boolean | undefined; + } + /* + IfDefaultsTrue and IfDefaultsFalse are helpers to handle default values for missing boolean properties. + TypeScript does not have exact types for objects: https://github.com/microsoft/TypeScript/issues/12936 + This means it is impossible to distinguish between "field X is definitely not present" and "field X may or may not be present". + But we expect users to generally provide their config inline or `as const`, which means TS will always know whether a given field is present. + So this helper treats "not definitely present" (i.e., not `extends boolean`) as being "definitely not present", i.e. it should have its default value. + This is technically incorrect but is a much nicer UX for the common case. + The IfDefaultsTrue version is for things which default to true; the IfDefaultsFalse version is for things which default to false. + */ + type IfDefaultsTrue = T extends true ? IfTrue + : T extends false ? IfFalse + : IfTrue; + + // we put the `extends false` condition first here because `undefined` compares like `any` when `strictNullChecks: false` + type IfDefaultsFalse = T extends false ? IfFalse + : T extends true ? IfTrue + : IfFalse; + + type ExtractOptionValue = IfDefaultsTrue< + T["strict"], + O["type"] extends "string" ? string : O["type"] extends "boolean" ? boolean : string | boolean, + string | boolean + >; + + type ApplyOptionalModifiers> = ( + & { -readonly [LongOption in keyof O]?: V[LongOption] } + & { [LongOption in keyof O as O[LongOption]["default"] extends {} ? LongOption : never]: V[LongOption] } + ) extends infer P ? { [K in keyof P]: P[K] } : never; // resolve intersection to object + + type ParsedValues = + & IfDefaultsTrue + & (T["options"] extends ParseArgsOptionsConfig ? ApplyOptionalModifiers< + T["options"], + { + [LongOption in keyof T["options"]]: IfDefaultsFalse< + T["options"][LongOption]["multiple"], + Array>, + ExtractOptionValue + >; + } + > + : {}); + + type ParsedPositionals = IfDefaultsTrue< + T["strict"], + IfDefaultsFalse, + IfDefaultsTrue + >; + + type PreciseTokenForOptions< + K extends string, + O extends ParseArgsOptionConfig, + > = O["type"] extends "string" ? { + kind: "option"; + index: number; + name: K; + rawName: string; + value: string; + inlineValue: boolean; + } + : O["type"] extends "boolean" ? { + kind: "option"; + index: number; + name: K; + rawName: string; + value: undefined; + inlineValue: undefined; + } + : OptionToken & { name: K }; + + type TokenForOptions< + T extends ParseArgsConfig, + K extends keyof T["options"] = keyof T["options"], + > = K extends unknown + ? T["options"] extends ParseArgsOptionsConfig ? PreciseTokenForOptions + : OptionToken + : never; + + type ParsedOptionToken = IfDefaultsTrue, OptionToken>; + + type ParsedPositionalToken = IfDefaultsTrue< + T["strict"], + IfDefaultsFalse, + IfDefaultsTrue + >; + + type ParsedTokens = Array< + ParsedOptionToken | ParsedPositionalToken | { kind: "option-terminator"; index: number } + >; + + type PreciseParsedResults = IfDefaultsFalse< + T["tokens"], + { + values: ParsedValues; + positionals: ParsedPositionals; + tokens: ParsedTokens; + }, + { + values: ParsedValues; + positionals: ParsedPositionals; + } + >; + + type OptionToken = + | { kind: "option"; index: number; name: string; rawName: string; value: string; inlineValue: boolean } + | { + kind: "option"; + index: number; + name: string; + rawName: string; + value: undefined; + inlineValue: undefined; + }; + + type Token = + | OptionToken + | { kind: "positional"; index: number; value: string } + | { kind: "option-terminator"; index: number }; + + // If ParseArgsConfig extends T, then the user passed config constructed elsewhere. + // So we can't rely on the `"not definitely present" implies "definitely not present"` assumption mentioned above. + type ParsedResults = ParseArgsConfig extends T ? { + values: { + [longOption: string]: undefined | string | boolean | Array; + }; + positionals: string[]; + tokens?: Token[]; + } + : PreciseParsedResults; + + /** + * An implementation of [the MIMEType class](https://bmeck.github.io/node-proposal-mime-api/). + * + * In accordance with browser conventions, all properties of `MIMEType` objects + * are implemented as getters and setters on the class prototype, rather than as + * data properties on the object itself. + * + * A MIME string is a structured string containing multiple meaningful + * components. When parsed, a `MIMEType` object is returned containing + * properties for each of these components. + * @since v19.1.0, v18.13.0 + * @experimental + */ + export class MIMEType { + /** + * Creates a new MIMEType object by parsing the input. + * + * A `TypeError` will be thrown if the `input` is not a valid MIME. + * Note that an effort will be made to coerce the given values into strings. + * @param input The input MIME to parse. + */ + constructor(input: string | { toString: () => string }); + + /** + * Gets and sets the type portion of the MIME. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const myMIME = new MIMEType('text/javascript'); + * console.log(myMIME.type); + * // Prints: text + * myMIME.type = 'application'; + * console.log(myMIME.type); + * // Prints: application + * console.log(String(myMIME)); + * // Prints: application/javascript + * ``` + */ + type: string; + /** + * Gets and sets the subtype portion of the MIME. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const myMIME = new MIMEType('text/ecmascript'); + * console.log(myMIME.subtype); + * // Prints: ecmascript + * myMIME.subtype = 'javascript'; + * console.log(myMIME.subtype); + * // Prints: javascript + * console.log(String(myMIME)); + * // Prints: text/javascript + * ``` + */ + subtype: string; + /** + * Gets the essence of the MIME. This property is read only. + * Use `mime.type` or `mime.subtype` to alter the MIME. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const myMIME = new MIMEType('text/javascript;key=value'); + * console.log(myMIME.essence); + * // Prints: text/javascript + * myMIME.type = 'application'; + * console.log(myMIME.essence); + * // Prints: application/javascript + * console.log(String(myMIME)); + * // Prints: application/javascript;key=value + * ``` + */ + readonly essence: string; + /** + * Gets the `MIMEParams` object representing the + * parameters of the MIME. This property is read-only. See `MIMEParams` documentation for details. + */ + readonly params: MIMEParams; + /** + * The `toString()` method on the `MIMEType` object returns the serialized MIME. + * + * Because of the need for standard compliance, this method does not allow users + * to customize the serialization process of the MIME. + */ + toString(): string; + } + /** + * The `MIMEParams` API provides read and write access to the parameters of a `MIMEType`. + * @since v19.1.0, v18.13.0 + */ + export class MIMEParams { + /** + * Remove all name-value pairs whose name is `name`. + */ + delete(name: string): void; + /** + * Returns an iterator over each of the name-value pairs in the parameters. + * Each item of the iterator is a JavaScript `Array`. The first item of the array + * is the `name`, the second item of the array is the `value`. + */ + entries(): NodeJS.Iterator<[name: string, value: string]>; + /** + * Returns the value of the first name-value pair whose name is `name`. If there + * are no such pairs, `null` is returned. + * @return or `null` if there is no name-value pair with the given `name`. + */ + get(name: string): string | null; + /** + * Returns `true` if there is at least one name-value pair whose name is `name`. + */ + has(name: string): boolean; + /** + * Returns an iterator over the names of each name-value pair. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const { params } = new MIMEType('text/plain;foo=0;bar=1'); + * for (const name of params.keys()) { + * console.log(name); + * } + * // Prints: + * // foo + * // bar + * ``` + */ + keys(): NodeJS.Iterator; + /** + * Sets the value in the `MIMEParams` object associated with `name` to `value`. If there are any pre-existing name-value pairs whose names are `name`, + * set the first such pair's value to `value`. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const { params } = new MIMEType('text/plain;foo=0;bar=1'); + * params.set('foo', 'def'); + * params.set('baz', 'xyz'); + * console.log(params.toString()); + * // Prints: foo=def;bar=1;baz=xyz + * ``` + */ + set(name: string, value: string): void; + /** + * Returns an iterator over the values of each name-value pair. + */ + values(): NodeJS.Iterator; + /** + * Returns an iterator over each of the name-value pairs in the parameters. + */ + [Symbol.iterator](): NodeJS.Iterator<[name: string, value: string]>; + } +} +declare module "util/types" { + import { KeyObject, webcrypto } from "node:crypto"; + /** + * Returns `true` if the value is a built-in [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) or + * [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instance. + * + * See also `util.types.isArrayBuffer()` and `util.types.isSharedArrayBuffer()`. + * + * ```js + * util.types.isAnyArrayBuffer(new ArrayBuffer()); // Returns true + * util.types.isAnyArrayBuffer(new SharedArrayBuffer()); // Returns true + * ``` + * @since v10.0.0 + */ + function isAnyArrayBuffer(object: unknown): object is ArrayBufferLike; + /** + * Returns `true` if the value is an `arguments` object. + * + * ```js + * function foo() { + * util.types.isArgumentsObject(arguments); // Returns true + * } + * ``` + * @since v10.0.0 + */ + function isArgumentsObject(object: unknown): object is IArguments; + /** + * Returns `true` if the value is a built-in [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) instance. + * This does _not_ include [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instances. Usually, it is + * desirable to test for both; See `util.types.isAnyArrayBuffer()` for that. + * + * ```js + * util.types.isArrayBuffer(new ArrayBuffer()); // Returns true + * util.types.isArrayBuffer(new SharedArrayBuffer()); // Returns false + * ``` + * @since v10.0.0 + */ + function isArrayBuffer(object: unknown): object is ArrayBuffer; + /** + * Returns `true` if the value is an instance of one of the [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) views, such as typed + * array objects or [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView). Equivalent to + * [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * + * ```js + * util.types.isArrayBufferView(new Int8Array()); // true + * util.types.isArrayBufferView(Buffer.from('hello world')); // true + * util.types.isArrayBufferView(new DataView(new ArrayBuffer(16))); // true + * util.types.isArrayBufferView(new ArrayBuffer()); // false + * ``` + * @since v10.0.0 + */ + function isArrayBufferView(object: unknown): object is NodeJS.ArrayBufferView; + /** + * Returns `true` if the value is an [async function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function). + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * util.types.isAsyncFunction(function foo() {}); // Returns false + * util.types.isAsyncFunction(async function foo() {}); // Returns true + * ``` + * @since v10.0.0 + */ + function isAsyncFunction(object: unknown): boolean; + /** + * Returns `true` if the value is a `BigInt64Array` instance. + * + * ```js + * util.types.isBigInt64Array(new BigInt64Array()); // Returns true + * util.types.isBigInt64Array(new BigUint64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isBigInt64Array(value: unknown): value is BigInt64Array; + /** + * Returns `true` if the value is a `BigUint64Array` instance. + * + * ```js + * util.types.isBigUint64Array(new BigInt64Array()); // Returns false + * util.types.isBigUint64Array(new BigUint64Array()); // Returns true + * ``` + * @since v10.0.0 + */ + function isBigUint64Array(value: unknown): value is BigUint64Array; + /** + * Returns `true` if the value is a boolean object, e.g. created + * by `new Boolean()`. + * + * ```js + * util.types.isBooleanObject(false); // Returns false + * util.types.isBooleanObject(true); // Returns false + * util.types.isBooleanObject(new Boolean(false)); // Returns true + * util.types.isBooleanObject(new Boolean(true)); // Returns true + * util.types.isBooleanObject(Boolean(false)); // Returns false + * util.types.isBooleanObject(Boolean(true)); // Returns false + * ``` + * @since v10.0.0 + */ + function isBooleanObject(object: unknown): object is Boolean; + /** + * Returns `true` if the value is any boxed primitive object, e.g. created + * by `new Boolean()`, `new String()` or `Object(Symbol())`. + * + * For example: + * + * ```js + * util.types.isBoxedPrimitive(false); // Returns false + * util.types.isBoxedPrimitive(new Boolean(false)); // Returns true + * util.types.isBoxedPrimitive(Symbol('foo')); // Returns false + * util.types.isBoxedPrimitive(Object(Symbol('foo'))); // Returns true + * util.types.isBoxedPrimitive(Object(BigInt(5))); // Returns true + * ``` + * @since v10.11.0 + */ + function isBoxedPrimitive(object: unknown): object is String | Number | BigInt | Boolean | Symbol; + /** + * Returns `true` if the value is a built-in [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView) instance. + * + * ```js + * const ab = new ArrayBuffer(20); + * util.types.isDataView(new DataView(ab)); // Returns true + * util.types.isDataView(new Float64Array()); // Returns false + * ``` + * + * See also [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * @since v10.0.0 + */ + function isDataView(object: unknown): object is DataView; + /** + * Returns `true` if the value is a built-in [`Date`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date) instance. + * + * ```js + * util.types.isDate(new Date()); // Returns true + * ``` + * @since v10.0.0 + */ + function isDate(object: unknown): object is Date; + /** + * Returns `true` if the value is a native `External` value. + * + * A native `External` value is a special type of object that contains a + * raw C++ pointer (`void*`) for access from native code, and has no other + * properties. Such objects are created either by Node.js internals or native + * addons. In JavaScript, they are [frozen](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze) objects with a`null` prototype. + * + * ```c + * #include + * #include + * napi_value result; + * static napi_value MyNapi(napi_env env, napi_callback_info info) { + * int* raw = (int*) malloc(1024); + * napi_status status = napi_create_external(env, (void*) raw, NULL, NULL, &result); + * if (status != napi_ok) { + * napi_throw_error(env, NULL, "napi_create_external failed"); + * return NULL; + * } + * return result; + * } + * ... + * DECLARE_NAPI_PROPERTY("myNapi", MyNapi) + * ... + * ``` + * + * ```js + * const native = require('napi_addon.node'); + * const data = native.myNapi(); + * util.types.isExternal(data); // returns true + * util.types.isExternal(0); // returns false + * util.types.isExternal(new String('foo')); // returns false + * ``` + * + * For further information on `napi_create_external`, refer to `napi_create_external()`. + * @since v10.0.0 + */ + function isExternal(object: unknown): boolean; + /** + * Returns `true` if the value is a built-in [`Float32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float32Array) instance. + * + * ```js + * util.types.isFloat32Array(new ArrayBuffer()); // Returns false + * util.types.isFloat32Array(new Float32Array()); // Returns true + * util.types.isFloat32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isFloat32Array(object: unknown): object is Float32Array; + /** + * Returns `true` if the value is a built-in [`Float64Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float64Array) instance. + * + * ```js + * util.types.isFloat64Array(new ArrayBuffer()); // Returns false + * util.types.isFloat64Array(new Uint8Array()); // Returns false + * util.types.isFloat64Array(new Float64Array()); // Returns true + * ``` + * @since v10.0.0 + */ + function isFloat64Array(object: unknown): object is Float64Array; + /** + * Returns `true` if the value is a generator function. + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * util.types.isGeneratorFunction(function foo() {}); // Returns false + * util.types.isGeneratorFunction(function* foo() {}); // Returns true + * ``` + * @since v10.0.0 + */ + function isGeneratorFunction(object: unknown): object is GeneratorFunction; + /** + * Returns `true` if the value is a generator object as returned from a + * built-in generator function. + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * function* foo() {} + * const generator = foo(); + * util.types.isGeneratorObject(generator); // Returns true + * ``` + * @since v10.0.0 + */ + function isGeneratorObject(object: unknown): object is Generator; + /** + * Returns `true` if the value is a built-in [`Int8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int8Array) instance. + * + * ```js + * util.types.isInt8Array(new ArrayBuffer()); // Returns false + * util.types.isInt8Array(new Int8Array()); // Returns true + * util.types.isInt8Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt8Array(object: unknown): object is Int8Array; + /** + * Returns `true` if the value is a built-in [`Int16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int16Array) instance. + * + * ```js + * util.types.isInt16Array(new ArrayBuffer()); // Returns false + * util.types.isInt16Array(new Int16Array()); // Returns true + * util.types.isInt16Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt16Array(object: unknown): object is Int16Array; + /** + * Returns `true` if the value is a built-in [`Int32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int32Array) instance. + * + * ```js + * util.types.isInt32Array(new ArrayBuffer()); // Returns false + * util.types.isInt32Array(new Int32Array()); // Returns true + * util.types.isInt32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt32Array(object: unknown): object is Int32Array; + /** + * Returns `true` if the value is a built-in [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) instance. + * + * ```js + * util.types.isMap(new Map()); // Returns true + * ``` + * @since v10.0.0 + */ + function isMap( + object: T | {}, + ): object is T extends ReadonlyMap ? (unknown extends T ? never : ReadonlyMap) + : Map; + /** + * Returns `true` if the value is an iterator returned for a built-in [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) instance. + * + * ```js + * const map = new Map(); + * util.types.isMapIterator(map.keys()); // Returns true + * util.types.isMapIterator(map.values()); // Returns true + * util.types.isMapIterator(map.entries()); // Returns true + * util.types.isMapIterator(map[Symbol.iterator]()); // Returns true + * ``` + * @since v10.0.0 + */ + function isMapIterator(object: unknown): boolean; + /** + * Returns `true` if the value is an instance of a [Module Namespace Object](https://tc39.github.io/ecma262/#sec-module-namespace-exotic-objects). + * + * ```js + * import * as ns from './a.js'; + * + * util.types.isModuleNamespaceObject(ns); // Returns true + * ``` + * @since v10.0.0 + */ + function isModuleNamespaceObject(value: unknown): boolean; + /** + * Returns `true` if the value was returned by the constructor of a [built-in `Error` type](https://tc39.es/ecma262/#sec-error-objects). + * + * ```js + * console.log(util.types.isNativeError(new Error())); // true + * console.log(util.types.isNativeError(new TypeError())); // true + * console.log(util.types.isNativeError(new RangeError())); // true + * ``` + * + * Subclasses of the native error types are also native errors: + * + * ```js + * class MyError extends Error {} + * console.log(util.types.isNativeError(new MyError())); // true + * ``` + * + * A value being `instanceof` a native error class is not equivalent to `isNativeError()` returning `true` for that value. `isNativeError()` returns `true` for errors + * which come from a different [realm](https://tc39.es/ecma262/#realm) while `instanceof Error` returns `false` for these errors: + * + * ```js + * import vm from 'node:vm'; + * const context = vm.createContext({}); + * const myError = vm.runInContext('new Error()', context); + * console.log(util.types.isNativeError(myError)); // true + * console.log(myError instanceof Error); // false + * ``` + * + * Conversely, `isNativeError()` returns `false` for all objects which were not + * returned by the constructor of a native error. That includes values + * which are `instanceof` native errors: + * + * ```js + * const myError = { __proto__: Error.prototype }; + * console.log(util.types.isNativeError(myError)); // false + * console.log(myError instanceof Error); // true + * ``` + * @since v10.0.0 + */ + function isNativeError(object: unknown): object is Error; + /** + * Returns `true` if the value is a number object, e.g. created + * by `new Number()`. + * + * ```js + * util.types.isNumberObject(0); // Returns false + * util.types.isNumberObject(new Number(0)); // Returns true + * ``` + * @since v10.0.0 + */ + function isNumberObject(object: unknown): object is Number; + /** + * Returns `true` if the value is a built-in [`Promise`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise). + * + * ```js + * util.types.isPromise(Promise.resolve(42)); // Returns true + * ``` + * @since v10.0.0 + */ + function isPromise(object: unknown): object is Promise; + /** + * Returns `true` if the value is a [`Proxy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy) instance. + * + * ```js + * const target = {}; + * const proxy = new Proxy(target, {}); + * util.types.isProxy(target); // Returns false + * util.types.isProxy(proxy); // Returns true + * ``` + * @since v10.0.0 + */ + function isProxy(object: unknown): boolean; + /** + * Returns `true` if the value is a regular expression object. + * + * ```js + * util.types.isRegExp(/abc/); // Returns true + * util.types.isRegExp(new RegExp('abc')); // Returns true + * ``` + * @since v10.0.0 + */ + function isRegExp(object: unknown): object is RegExp; + /** + * Returns `true` if the value is a built-in [`Set`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) instance. + * + * ```js + * util.types.isSet(new Set()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSet( + object: T | {}, + ): object is T extends ReadonlySet ? (unknown extends T ? never : ReadonlySet) : Set; + /** + * Returns `true` if the value is an iterator returned for a built-in [`Set`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) instance. + * + * ```js + * const set = new Set(); + * util.types.isSetIterator(set.keys()); // Returns true + * util.types.isSetIterator(set.values()); // Returns true + * util.types.isSetIterator(set.entries()); // Returns true + * util.types.isSetIterator(set[Symbol.iterator]()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSetIterator(object: unknown): boolean; + /** + * Returns `true` if the value is a built-in [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instance. + * This does _not_ include [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) instances. Usually, it is + * desirable to test for both; See `util.types.isAnyArrayBuffer()` for that. + * + * ```js + * util.types.isSharedArrayBuffer(new ArrayBuffer()); // Returns false + * util.types.isSharedArrayBuffer(new SharedArrayBuffer()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSharedArrayBuffer(object: unknown): object is SharedArrayBuffer; + /** + * Returns `true` if the value is a string object, e.g. created + * by `new String()`. + * + * ```js + * util.types.isStringObject('foo'); // Returns false + * util.types.isStringObject(new String('foo')); // Returns true + * ``` + * @since v10.0.0 + */ + function isStringObject(object: unknown): object is String; + /** + * Returns `true` if the value is a symbol object, created + * by calling `Object()` on a `Symbol` primitive. + * + * ```js + * const symbol = Symbol('foo'); + * util.types.isSymbolObject(symbol); // Returns false + * util.types.isSymbolObject(Object(symbol)); // Returns true + * ``` + * @since v10.0.0 + */ + function isSymbolObject(object: unknown): object is Symbol; + /** + * Returns `true` if the value is a built-in [`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray) instance. + * + * ```js + * util.types.isTypedArray(new ArrayBuffer()); // Returns false + * util.types.isTypedArray(new Uint8Array()); // Returns true + * util.types.isTypedArray(new Float64Array()); // Returns true + * ``` + * + * See also [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * @since v10.0.0 + */ + function isTypedArray(object: unknown): object is NodeJS.TypedArray; + /** + * Returns `true` if the value is a built-in [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) instance. + * + * ```js + * util.types.isUint8Array(new ArrayBuffer()); // Returns false + * util.types.isUint8Array(new Uint8Array()); // Returns true + * util.types.isUint8Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint8Array(object: unknown): object is Uint8Array; + /** + * Returns `true` if the value is a built-in [`Uint8ClampedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8ClampedArray) instance. + * + * ```js + * util.types.isUint8ClampedArray(new ArrayBuffer()); // Returns false + * util.types.isUint8ClampedArray(new Uint8ClampedArray()); // Returns true + * util.types.isUint8ClampedArray(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint8ClampedArray(object: unknown): object is Uint8ClampedArray; + /** + * Returns `true` if the value is a built-in [`Uint16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint16Array) instance. + * + * ```js + * util.types.isUint16Array(new ArrayBuffer()); // Returns false + * util.types.isUint16Array(new Uint16Array()); // Returns true + * util.types.isUint16Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint16Array(object: unknown): object is Uint16Array; + /** + * Returns `true` if the value is a built-in [`Uint32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint32Array) instance. + * + * ```js + * util.types.isUint32Array(new ArrayBuffer()); // Returns false + * util.types.isUint32Array(new Uint32Array()); // Returns true + * util.types.isUint32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint32Array(object: unknown): object is Uint32Array; + /** + * Returns `true` if the value is a built-in [`WeakMap`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap) instance. + * + * ```js + * util.types.isWeakMap(new WeakMap()); // Returns true + * ``` + * @since v10.0.0 + */ + function isWeakMap(object: unknown): object is WeakMap; + /** + * Returns `true` if the value is a built-in [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) instance. + * + * ```js + * util.types.isWeakSet(new WeakSet()); // Returns true + * ``` + * @since v10.0.0 + */ + function isWeakSet(object: unknown): object is WeakSet; + /** + * Returns `true` if `value` is a `KeyObject`, `false` otherwise. + * @since v16.2.0 + */ + function isKeyObject(object: unknown): object is KeyObject; + /** + * Returns `true` if `value` is a `CryptoKey`, `false` otherwise. + * @since v16.2.0 + */ + function isCryptoKey(object: unknown): object is webcrypto.CryptoKey; +} +declare module "node:util" { + export * from "util"; +} +declare module "node:util/types" { + export * from "util/types"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/v8.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/v8.d.ts new file mode 100644 index 0000000..9676a81 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/v8.d.ts @@ -0,0 +1,808 @@ +/** + * The `node:v8` module exposes APIs that are specific to the version of [V8](https://developers.google.com/v8/) built into the Node.js binary. It can be accessed using: + * + * ```js + * import v8 from 'node:v8'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/v8.js) + */ +declare module "v8" { + import { Readable } from "node:stream"; + interface HeapSpaceInfo { + space_name: string; + space_size: number; + space_used_size: number; + space_available_size: number; + physical_space_size: number; + } + // ** Signifies if the --zap_code_space option is enabled or not. 1 == enabled, 0 == disabled. */ + type DoesZapCodeSpaceFlag = 0 | 1; + interface HeapInfo { + total_heap_size: number; + total_heap_size_executable: number; + total_physical_size: number; + total_available_size: number; + used_heap_size: number; + heap_size_limit: number; + malloced_memory: number; + peak_malloced_memory: number; + does_zap_garbage: DoesZapCodeSpaceFlag; + number_of_native_contexts: number; + number_of_detached_contexts: number; + total_global_handles_size: number; + used_global_handles_size: number; + external_memory: number; + } + interface HeapCodeStatistics { + code_and_metadata_size: number; + bytecode_and_metadata_size: number; + external_script_source_size: number; + } + interface HeapSnapshotOptions { + /** + * If true, expose internals in the heap snapshot. + * @default false + */ + exposeInternals?: boolean; + /** + * If true, expose numeric values in artificial fields. + * @default false + */ + exposeNumericValues?: boolean; + } + /** + * Returns an integer representing a version tag derived from the V8 version, + * command-line flags, and detected CPU features. This is useful for determining + * whether a `vm.Script` `cachedData` buffer is compatible with this instance + * of V8. + * + * ```js + * console.log(v8.cachedDataVersionTag()); // 3947234607 + * // The value returned by v8.cachedDataVersionTag() is derived from the V8 + * // version, command-line flags, and detected CPU features. Test that the value + * // does indeed update when flags are toggled. + * v8.setFlagsFromString('--allow_natives_syntax'); + * console.log(v8.cachedDataVersionTag()); // 183726201 + * ``` + * @since v8.0.0 + */ + function cachedDataVersionTag(): number; + /** + * Returns an object with the following properties: + * + * `does_zap_garbage` is a 0/1 boolean, which signifies whether the `--zap_code_space` option is enabled or not. This makes V8 overwrite heap + * garbage with a bit pattern. The RSS footprint (resident set size) gets bigger + * because it continuously touches all heap pages and that makes them less likely + * to get swapped out by the operating system. + * + * `number_of_native_contexts` The value of native\_context is the number of the + * top-level contexts currently active. Increase of this number over time indicates + * a memory leak. + * + * `number_of_detached_contexts` The value of detached\_context is the number + * of contexts that were detached and not yet garbage collected. This number + * being non-zero indicates a potential memory leak. + * + * `total_global_handles_size` The value of total\_global\_handles\_size is the + * total memory size of V8 global handles. + * + * `used_global_handles_size` The value of used\_global\_handles\_size is the + * used memory size of V8 global handles. + * + * `external_memory` The value of external\_memory is the memory size of array + * buffers and external strings. + * + * ```js + * { + * total_heap_size: 7326976, + * total_heap_size_executable: 4194304, + * total_physical_size: 7326976, + * total_available_size: 1152656, + * used_heap_size: 3476208, + * heap_size_limit: 1535115264, + * malloced_memory: 16384, + * peak_malloced_memory: 1127496, + * does_zap_garbage: 0, + * number_of_native_contexts: 1, + * number_of_detached_contexts: 0, + * total_global_handles_size: 8192, + * used_global_handles_size: 3296, + * external_memory: 318824 + * } + * ``` + * @since v1.0.0 + */ + function getHeapStatistics(): HeapInfo; + /** + * Returns statistics about the V8 heap spaces, i.e. the segments which make up + * the V8 heap. Neither the ordering of heap spaces, nor the availability of a + * heap space can be guaranteed as the statistics are provided via the + * V8 [`GetHeapSpaceStatistics`](https://v8docs.nodesource.com/node-13.2/d5/dda/classv8_1_1_isolate.html#ac673576f24fdc7a33378f8f57e1d13a4) function and may change from one V8 version to the + * next. + * + * The value returned is an array of objects containing the following properties: + * + * ```json + * [ + * { + * "space_name": "new_space", + * "space_size": 2063872, + * "space_used_size": 951112, + * "space_available_size": 80824, + * "physical_space_size": 2063872 + * }, + * { + * "space_name": "old_space", + * "space_size": 3090560, + * "space_used_size": 2493792, + * "space_available_size": 0, + * "physical_space_size": 3090560 + * }, + * { + * "space_name": "code_space", + * "space_size": 1260160, + * "space_used_size": 644256, + * "space_available_size": 960, + * "physical_space_size": 1260160 + * }, + * { + * "space_name": "map_space", + * "space_size": 1094160, + * "space_used_size": 201608, + * "space_available_size": 0, + * "physical_space_size": 1094160 + * }, + * { + * "space_name": "large_object_space", + * "space_size": 0, + * "space_used_size": 0, + * "space_available_size": 1490980608, + * "physical_space_size": 0 + * } + * ] + * ``` + * @since v6.0.0 + */ + function getHeapSpaceStatistics(): HeapSpaceInfo[]; + /** + * The `v8.setFlagsFromString()` method can be used to programmatically set + * V8 command-line flags. This method should be used with care. Changing settings + * after the VM has started may result in unpredictable behavior, including + * crashes and data loss; or it may simply do nothing. + * + * The V8 options available for a version of Node.js may be determined by running `node --v8-options`. + * + * Usage: + * + * ```js + * // Print GC events to stdout for one minute. + * import v8 from 'node:v8'; + * v8.setFlagsFromString('--trace_gc'); + * setTimeout(() => { v8.setFlagsFromString('--notrace_gc'); }, 60e3); + * ``` + * @since v1.0.0 + */ + function setFlagsFromString(flags: string): void; + /** + * This is similar to the [`queryObjects()` console API](https://developer.chrome.com/docs/devtools/console/utilities#queryObjects-function) + * provided by the Chromium DevTools console. It can be used to search for objects that have the matching constructor on its prototype chain + * in the heap after a full garbage collection, which can be useful for memory leak regression tests. To avoid surprising results, users should + * avoid using this API on constructors whose implementation they don't control, or on constructors that can be invoked by other parties in the + * application. + * + * To avoid accidental leaks, this API does not return raw references to the objects found. By default, it returns the count of the objects + * found. If `options.format` is `'summary'`, it returns an array containing brief string representations for each object. The visibility provided + * in this API is similar to what the heap snapshot provides, while users can save the cost of serialization and parsing and directly filter the + * target objects during the search. + * + * Only objects created in the current execution context are included in the results. + * + * ```js + * import { queryObjects } from 'node:v8'; + * class A { foo = 'bar'; } + * console.log(queryObjects(A)); // 0 + * const a = new A(); + * console.log(queryObjects(A)); // 1 + * // [ "A { foo: 'bar' }" ] + * console.log(queryObjects(A, { format: 'summary' })); + * + * class B extends A { bar = 'qux'; } + * const b = new B(); + * console.log(queryObjects(B)); // 1 + * // [ "B { foo: 'bar', bar: 'qux' }" ] + * console.log(queryObjects(B, { format: 'summary' })); + * + * // Note that, when there are child classes inheriting from a constructor, + * // the constructor also shows up in the prototype chain of the child + * // classes's prototoype, so the child classes's prototoype would also be + * // included in the result. + * console.log(queryObjects(A)); // 3 + * // [ "B { foo: 'bar', bar: 'qux' }", 'A {}', "A { foo: 'bar' }" ] + * console.log(queryObjects(A, { format: 'summary' })); + * ``` + * @param ctor The constructor that can be used to search on the prototype chain in order to filter target objects in the heap. + * @since v20.13.0 + * @experimental + */ + function queryObjects(ctor: Function): number | string[]; + function queryObjects(ctor: Function, options: { format: "count" }): number; + function queryObjects(ctor: Function, options: { format: "summary" }): string[]; + /** + * Generates a snapshot of the current V8 heap and returns a Readable + * Stream that may be used to read the JSON serialized representation. + * This JSON stream format is intended to be used with tools such as + * Chrome DevTools. The JSON schema is undocumented and specific to the + * V8 engine. Therefore, the schema may change from one version of V8 to the next. + * + * Creating a heap snapshot requires memory about twice the size of the heap at + * the time the snapshot is created. This results in the risk of OOM killers + * terminating the process. + * + * Generating a snapshot is a synchronous operation which blocks the event loop + * for a duration depending on the heap size. + * + * ```js + * // Print heap snapshot to the console + * import v8 from 'node:v8'; + * const stream = v8.getHeapSnapshot(); + * stream.pipe(process.stdout); + * ``` + * @since v11.13.0 + * @return A Readable containing the V8 heap snapshot. + */ + function getHeapSnapshot(options?: HeapSnapshotOptions): Readable; + /** + * Generates a snapshot of the current V8 heap and writes it to a JSON + * file. This file is intended to be used with tools such as Chrome + * DevTools. The JSON schema is undocumented and specific to the V8 + * engine, and may change from one version of V8 to the next. + * + * A heap snapshot is specific to a single V8 isolate. When using `worker threads`, a heap snapshot generated from the main thread will + * not contain any information about the workers, and vice versa. + * + * Creating a heap snapshot requires memory about twice the size of the heap at + * the time the snapshot is created. This results in the risk of OOM killers + * terminating the process. + * + * Generating a snapshot is a synchronous operation which blocks the event loop + * for a duration depending on the heap size. + * + * ```js + * import { writeHeapSnapshot } from 'node:v8'; + * import { + * Worker, + * isMainThread, + * parentPort, + * } from 'node:worker_threads'; + * + * if (isMainThread) { + * const worker = new Worker(__filename); + * + * worker.once('message', (filename) => { + * console.log(`worker heapdump: ${filename}`); + * // Now get a heapdump for the main thread. + * console.log(`main thread heapdump: ${writeHeapSnapshot()}`); + * }); + * + * // Tell the worker to create a heapdump. + * worker.postMessage('heapdump'); + * } else { + * parentPort.once('message', (message) => { + * if (message === 'heapdump') { + * // Generate a heapdump for the worker + * // and return the filename to the parent. + * parentPort.postMessage(writeHeapSnapshot()); + * } + * }); + * } + * ``` + * @since v11.13.0 + * @param filename The file path where the V8 heap snapshot is to be saved. If not specified, a file name with the pattern `'Heap-${yyyymmdd}-${hhmmss}-${pid}-${thread_id}.heapsnapshot'` will be + * generated, where `{pid}` will be the PID of the Node.js process, `{thread_id}` will be `0` when `writeHeapSnapshot()` is called from the main Node.js thread or the id of a + * worker thread. + * @return The filename where the snapshot was saved. + */ + function writeHeapSnapshot(filename?: string, options?: HeapSnapshotOptions): string; + /** + * Get statistics about code and its metadata in the heap, see + * V8 [`GetHeapCodeAndMetadataStatistics`](https://v8docs.nodesource.com/node-13.2/d5/dda/classv8_1_1_isolate.html#a6079122af17612ef54ef3348ce170866) API. Returns an object with the + * following properties: + * + * ```js + * { + * code_and_metadata_size: 212208, + * bytecode_and_metadata_size: 161368, + * external_script_source_size: 1410794, + * cpu_profiler_metadata_size: 0, + * } + * ``` + * @since v12.8.0 + */ + function getHeapCodeStatistics(): HeapCodeStatistics; + /** + * @since v8.0.0 + */ + class Serializer { + /** + * Writes out a header, which includes the serialization format version. + */ + writeHeader(): void; + /** + * Serializes a JavaScript value and adds the serialized representation to the + * internal buffer. + * + * This throws an error if `value` cannot be serialized. + */ + writeValue(val: any): boolean; + /** + * Returns the stored internal buffer. This serializer should not be used once + * the buffer is released. Calling this method results in undefined behavior + * if a previous write has failed. + */ + releaseBuffer(): Buffer; + /** + * Marks an `ArrayBuffer` as having its contents transferred out of band. + * Pass the corresponding `ArrayBuffer` in the deserializing context to `deserializer.transferArrayBuffer()`. + * @param id A 32-bit unsigned integer. + * @param arrayBuffer An `ArrayBuffer` instance. + */ + transferArrayBuffer(id: number, arrayBuffer: ArrayBuffer): void; + /** + * Write a raw 32-bit unsigned integer. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeUint32(value: number): void; + /** + * Write a raw 64-bit unsigned integer, split into high and low 32-bit parts. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeUint64(hi: number, lo: number): void; + /** + * Write a JS `number` value. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeDouble(value: number): void; + /** + * Write raw bytes into the serializer's internal buffer. The deserializer + * will require a way to compute the length of the buffer. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeRawBytes(buffer: NodeJS.TypedArray): void; + } + /** + * A subclass of `Serializer` that serializes `TypedArray`(in particular `Buffer`) and `DataView` objects as host objects, and only + * stores the part of their underlying `ArrayBuffer`s that they are referring to. + * @since v8.0.0 + */ + class DefaultSerializer extends Serializer {} + /** + * @since v8.0.0 + */ + class Deserializer { + constructor(data: NodeJS.TypedArray); + /** + * Reads and validates a header (including the format version). + * May, for example, reject an invalid or unsupported wire format. In that case, + * an `Error` is thrown. + */ + readHeader(): boolean; + /** + * Deserializes a JavaScript value from the buffer and returns it. + */ + readValue(): any; + /** + * Marks an `ArrayBuffer` as having its contents transferred out of band. + * Pass the corresponding `ArrayBuffer` in the serializing context to `serializer.transferArrayBuffer()` (or return the `id` from `serializer._getSharedArrayBufferId()` in the case of + * `SharedArrayBuffer`s). + * @param id A 32-bit unsigned integer. + * @param arrayBuffer An `ArrayBuffer` instance. + */ + transferArrayBuffer(id: number, arrayBuffer: ArrayBuffer): void; + /** + * Reads the underlying wire format version. Likely mostly to be useful to + * legacy code reading old wire format versions. May not be called before `.readHeader()`. + */ + getWireFormatVersion(): number; + /** + * Read a raw 32-bit unsigned integer and return it. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readUint32(): number; + /** + * Read a raw 64-bit unsigned integer and return it as an array `[hi, lo]` with two 32-bit unsigned integer entries. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readUint64(): [number, number]; + /** + * Read a JS `number` value. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readDouble(): number; + /** + * Read raw bytes from the deserializer's internal buffer. The `length` parameter + * must correspond to the length of the buffer that was passed to `serializer.writeRawBytes()`. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readRawBytes(length: number): Buffer; + } + /** + * A subclass of `Deserializer` corresponding to the format written by `DefaultSerializer`. + * @since v8.0.0 + */ + class DefaultDeserializer extends Deserializer {} + /** + * Uses a `DefaultSerializer` to serialize `value` into a buffer. + * + * `ERR_BUFFER_TOO_LARGE` will be thrown when trying to + * serialize a huge object which requires buffer + * larger than `buffer.constants.MAX_LENGTH`. + * @since v8.0.0 + */ + function serialize(value: any): Buffer; + /** + * Uses a `DefaultDeserializer` with default options to read a JS value + * from a buffer. + * @since v8.0.0 + * @param buffer A buffer returned by {@link serialize}. + */ + function deserialize(buffer: NodeJS.ArrayBufferView): any; + /** + * The `v8.takeCoverage()` method allows the user to write the coverage started by `NODE_V8_COVERAGE` to disk on demand. This method can be invoked multiple + * times during the lifetime of the process. Each time the execution counter will + * be reset and a new coverage report will be written to the directory specified + * by `NODE_V8_COVERAGE`. + * + * When the process is about to exit, one last coverage will still be written to + * disk unless {@link stopCoverage} is invoked before the process exits. + * @since v15.1.0, v14.18.0, v12.22.0 + */ + function takeCoverage(): void; + /** + * The `v8.stopCoverage()` method allows the user to stop the coverage collection + * started by `NODE_V8_COVERAGE`, so that V8 can release the execution count + * records and optimize code. This can be used in conjunction with {@link takeCoverage} if the user wants to collect the coverage on demand. + * @since v15.1.0, v14.18.0, v12.22.0 + */ + function stopCoverage(): void; + /** + * The API is a no-op if `--heapsnapshot-near-heap-limit` is already set from the command line or the API is called more than once. + * `limit` must be a positive integer. See [`--heapsnapshot-near-heap-limit`](https://nodejs.org/docs/latest-v22.x/api/cli.html#--heapsnapshot-near-heap-limitmax_count) for more information. + * @experimental + * @since v18.10.0, v16.18.0 + */ + function setHeapSnapshotNearHeapLimit(limit: number): void; + /** + * This API collects GC data in current thread. + * @since v19.6.0, v18.15.0 + */ + class GCProfiler { + /** + * Start collecting GC data. + * @since v19.6.0, v18.15.0 + */ + start(): void; + /** + * Stop collecting GC data and return an object. The content of object + * is as follows. + * + * ```json + * { + * "version": 1, + * "startTime": 1674059033862, + * "statistics": [ + * { + * "gcType": "Scavenge", + * "beforeGC": { + * "heapStatistics": { + * "totalHeapSize": 5005312, + * "totalHeapSizeExecutable": 524288, + * "totalPhysicalSize": 5226496, + * "totalAvailableSize": 4341325216, + * "totalGlobalHandlesSize": 8192, + * "usedGlobalHandlesSize": 2112, + * "usedHeapSize": 4883840, + * "heapSizeLimit": 4345298944, + * "mallocedMemory": 254128, + * "externalMemory": 225138, + * "peakMallocedMemory": 181760 + * }, + * "heapSpaceStatistics": [ + * { + * "spaceName": "read_only_space", + * "spaceSize": 0, + * "spaceUsedSize": 0, + * "spaceAvailableSize": 0, + * "physicalSpaceSize": 0 + * } + * ] + * }, + * "cost": 1574.14, + * "afterGC": { + * "heapStatistics": { + * "totalHeapSize": 6053888, + * "totalHeapSizeExecutable": 524288, + * "totalPhysicalSize": 5500928, + * "totalAvailableSize": 4341101384, + * "totalGlobalHandlesSize": 8192, + * "usedGlobalHandlesSize": 2112, + * "usedHeapSize": 4059096, + * "heapSizeLimit": 4345298944, + * "mallocedMemory": 254128, + * "externalMemory": 225138, + * "peakMallocedMemory": 181760 + * }, + * "heapSpaceStatistics": [ + * { + * "spaceName": "read_only_space", + * "spaceSize": 0, + * "spaceUsedSize": 0, + * "spaceAvailableSize": 0, + * "physicalSpaceSize": 0 + * } + * ] + * } + * } + * ], + * "endTime": 1674059036865 + * } + * ``` + * + * Here's an example. + * + * ```js + * import { GCProfiler } from 'node:v8'; + * const profiler = new GCProfiler(); + * profiler.start(); + * setTimeout(() => { + * console.log(profiler.stop()); + * }, 1000); + * ``` + * @since v19.6.0, v18.15.0 + */ + stop(): GCProfilerResult; + } + interface GCProfilerResult { + version: number; + startTime: number; + endTime: number; + statistics: Array<{ + gcType: string; + cost: number; + beforeGC: { + heapStatistics: HeapStatistics; + heapSpaceStatistics: HeapSpaceStatistics[]; + }; + afterGC: { + heapStatistics: HeapStatistics; + heapSpaceStatistics: HeapSpaceStatistics[]; + }; + }>; + } + interface HeapStatistics { + totalHeapSize: number; + totalHeapSizeExecutable: number; + totalPhysicalSize: number; + totalAvailableSize: number; + totalGlobalHandlesSize: number; + usedGlobalHandlesSize: number; + usedHeapSize: number; + heapSizeLimit: number; + mallocedMemory: number; + externalMemory: number; + peakMallocedMemory: number; + } + interface HeapSpaceStatistics { + spaceName: string; + spaceSize: number; + spaceUsedSize: number; + spaceAvailableSize: number; + physicalSpaceSize: number; + } + /** + * Called when a promise is constructed. This does not mean that corresponding before/after events will occur, only that the possibility exists. This will + * happen if a promise is created without ever getting a continuation. + * @since v17.1.0, v16.14.0 + * @param promise The promise being created. + * @param parent The promise continued from, if applicable. + */ + interface Init { + (promise: Promise, parent: Promise): void; + } + /** + * Called before a promise continuation executes. This can be in the form of `then()`, `catch()`, or `finally()` handlers or an await resuming. + * + * The before callback will be called 0 to N times. The before callback will typically be called 0 times if no continuation was ever made for the promise. + * The before callback may be called many times in the case where many continuations have been made from the same promise. + * @since v17.1.0, v16.14.0 + */ + interface Before { + (promise: Promise): void; + } + /** + * Called immediately after a promise continuation executes. This may be after a `then()`, `catch()`, or `finally()` handler or before an await after another await. + * @since v17.1.0, v16.14.0 + */ + interface After { + (promise: Promise): void; + } + /** + * Called when the promise receives a resolution or rejection value. This may occur synchronously in the case of {@link Promise.resolve()} or + * {@link Promise.reject()}. + * @since v17.1.0, v16.14.0 + */ + interface Settled { + (promise: Promise): void; + } + /** + * Key events in the lifetime of a promise have been categorized into four areas: creation of a promise, before/after a continuation handler is called or + * around an await, and when the promise resolves or rejects. + * + * Because promises are asynchronous resources whose lifecycle is tracked via the promise hooks mechanism, the `init()`, `before()`, `after()`, and + * `settled()` callbacks must not be async functions as they create more promises which would produce an infinite loop. + * @since v17.1.0, v16.14.0 + */ + interface HookCallbacks { + init?: Init; + before?: Before; + after?: After; + settled?: Settled; + } + interface PromiseHooks { + /** + * The `init` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param init The {@link Init | `init` callback} to call when a promise is created. + * @return Call to stop the hook. + */ + onInit: (init: Init) => Function; + /** + * The `settled` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param settled The {@link Settled | `settled` callback} to call when a promise is created. + * @return Call to stop the hook. + */ + onSettled: (settled: Settled) => Function; + /** + * The `before` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param before The {@link Before | `before` callback} to call before a promise continuation executes. + * @return Call to stop the hook. + */ + onBefore: (before: Before) => Function; + /** + * The `after` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param after The {@link After | `after` callback} to call after a promise continuation executes. + * @return Call to stop the hook. + */ + onAfter: (after: After) => Function; + /** + * Registers functions to be called for different lifetime events of each promise. + * The callbacks `init()`/`before()`/`after()`/`settled()` are called for the respective events during a promise's lifetime. + * All callbacks are optional. For example, if only promise creation needs to be tracked, then only the init callback needs to be passed. + * The hook callbacks must be plain functions. Providing async functions will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param callbacks The {@link HookCallbacks | Hook Callbacks} to register + * @return Used for disabling hooks + */ + createHook: (callbacks: HookCallbacks) => Function; + } + /** + * The `promiseHooks` interface can be used to track promise lifecycle events. + * @since v17.1.0, v16.14.0 + */ + const promiseHooks: PromiseHooks; + type StartupSnapshotCallbackFn = (args: any) => any; + interface StartupSnapshot { + /** + * Add a callback that will be called when the Node.js instance is about to get serialized into a snapshot and exit. + * This can be used to release resources that should not or cannot be serialized or to convert user data into a form more suitable for serialization. + * @since v18.6.0, v16.17.0 + */ + addSerializeCallback(callback: StartupSnapshotCallbackFn, data?: any): void; + /** + * Add a callback that will be called when the Node.js instance is deserialized from a snapshot. + * The `callback` and the `data` (if provided) will be serialized into the snapshot, they can be used to re-initialize the state of the application or + * to re-acquire resources that the application needs when the application is restarted from the snapshot. + * @since v18.6.0, v16.17.0 + */ + addDeserializeCallback(callback: StartupSnapshotCallbackFn, data?: any): void; + /** + * This sets the entry point of the Node.js application when it is deserialized from a snapshot. This can be called only once in the snapshot building script. + * If called, the deserialized application no longer needs an additional entry point script to start up and will simply invoke the callback along with the deserialized + * data (if provided), otherwise an entry point script still needs to be provided to the deserialized application. + * @since v18.6.0, v16.17.0 + */ + setDeserializeMainFunction(callback: StartupSnapshotCallbackFn, data?: any): void; + /** + * Returns true if the Node.js instance is run to build a snapshot. + * @since v18.6.0, v16.17.0 + */ + isBuildingSnapshot(): boolean; + } + /** + * The `v8.startupSnapshot` interface can be used to add serialization and deserialization hooks for custom startup snapshots. + * + * ```bash + * $ node --snapshot-blob snapshot.blob --build-snapshot entry.js + * # This launches a process with the snapshot + * $ node --snapshot-blob snapshot.blob + * ``` + * + * In the example above, `entry.js` can use methods from the `v8.startupSnapshot` interface to specify how to save information for custom objects + * in the snapshot during serialization and how the information can be used to synchronize these objects during deserialization of the snapshot. + * For example, if the `entry.js` contains the following script: + * + * ```js + * 'use strict'; + * + * import fs from 'node:fs'; + * import zlib from 'node:zlib'; + * import path from 'node:path'; + * import assert from 'node:assert'; + * + * import v8 from 'node:v8'; + * + * class BookShelf { + * storage = new Map(); + * + * // Reading a series of files from directory and store them into storage. + * constructor(directory, books) { + * for (const book of books) { + * this.storage.set(book, fs.readFileSync(path.join(directory, book))); + * } + * } + * + * static compressAll(shelf) { + * for (const [ book, content ] of shelf.storage) { + * shelf.storage.set(book, zlib.gzipSync(content)); + * } + * } + * + * static decompressAll(shelf) { + * for (const [ book, content ] of shelf.storage) { + * shelf.storage.set(book, zlib.gunzipSync(content)); + * } + * } + * } + * + * // __dirname here is where the snapshot script is placed + * // during snapshot building time. + * const shelf = new BookShelf(__dirname, [ + * 'book1.en_US.txt', + * 'book1.es_ES.txt', + * 'book2.zh_CN.txt', + * ]); + * + * assert(v8.startupSnapshot.isBuildingSnapshot()); + * // On snapshot serialization, compress the books to reduce size. + * v8.startupSnapshot.addSerializeCallback(BookShelf.compressAll, shelf); + * // On snapshot deserialization, decompress the books. + * v8.startupSnapshot.addDeserializeCallback(BookShelf.decompressAll, shelf); + * v8.startupSnapshot.setDeserializeMainFunction((shelf) => { + * // process.env and process.argv are refreshed during snapshot + * // deserialization. + * const lang = process.env.BOOK_LANG || 'en_US'; + * const book = process.argv[1]; + * const name = `${book}.${lang}.txt`; + * console.log(shelf.storage.get(name)); + * }, shelf); + * ``` + * + * The resulted binary will get print the data deserialized from the snapshot during start up, using the refreshed `process.env` and `process.argv` of the launched process: + * + * ```bash + * $ BOOK_LANG=es_ES node --snapshot-blob snapshot.blob book1 + * # Prints content of book1.es_ES.txt deserialized from the snapshot. + * ``` + * + * Currently the application deserialized from a user-land snapshot cannot be snapshotted again, so these APIs are only available to applications that are not deserialized from a user-land snapshot. + * + * @experimental + * @since v18.6.0, v16.17.0 + */ + const startupSnapshot: StartupSnapshot; +} +declare module "node:v8" { + export * from "v8"; +} diff --git a/node_modules/@types/node-fetch/node_modules/@types/node/vm.d.ts b/node_modules/@types/node-fetch/node_modules/@types/node/vm.d.ts new file mode 100644 index 0000000..5b2a602 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/@types/node/vm.d.ts @@ -0,0 +1,976 @@ +/** + * The `node:vm` module enables compiling and running code within V8 Virtual + * Machine contexts. + * + * **The `node:vm` module is not a security** + * **mechanism. Do not use it to run untrusted code.** + * + * JavaScript code can be compiled and run immediately or + * compiled, saved, and run later. + * + * A common use case is to run the code in a different V8 Context. This means + * invoked code has a different global object than the invoking code. + * + * One can provide the context by `contextifying` an + * object. The invoked code treats any property in the context like a + * global variable. Any changes to global variables caused by the invoked + * code are reflected in the context object. + * + * ```js + * import vm from 'node:vm'; + * + * const x = 1; + * + * const context = { x: 2 }; + * vm.createContext(context); // Contextify the object. + * + * const code = 'x += 40; var y = 17;'; + * // `x` and `y` are global variables in the context. + * // Initially, x has the value 2 because that is the value of context.x. + * vm.runInContext(code, context); + * + * console.log(context.x); // 42 + * console.log(context.y); // 17 + * + * console.log(x); // 1; y is not defined. + * ``` + * @see [source](https://github.com/nodejs/node/blob/v22.x/lib/vm.js) + */ +declare module "vm" { + import { ImportAttributes } from "node:module"; + interface Context extends NodeJS.Dict {} + interface BaseOptions { + /** + * Specifies the filename used in stack traces produced by this script. + * @default '' + */ + filename?: string | undefined; + /** + * Specifies the line number offset that is displayed in stack traces produced by this script. + * @default 0 + */ + lineOffset?: number | undefined; + /** + * Specifies the column number offset that is displayed in stack traces produced by this script. + * @default 0 + */ + columnOffset?: number | undefined; + } + interface ScriptOptions extends BaseOptions { + /** + * V8's code cache data for the supplied source. + */ + cachedData?: Buffer | NodeJS.ArrayBufferView | undefined; + /** @deprecated in favor of `script.createCachedData()` */ + produceCachedData?: boolean | undefined; + /** + * Used to specify how the modules should be loaded during the evaluation of this script when `import()` is called. This option is + * part of the experimental modules API. We do not recommend using it in a production environment. For detailed information, see + * [Support of dynamic `import()` in compilation APIs](https://nodejs.org/docs/latest-v22.x/api/vm.html#support-of-dynamic-import-in-compilation-apis). + */ + importModuleDynamically?: + | ((specifier: string, script: Script, importAttributes: ImportAttributes) => Module) + | typeof constants.USE_MAIN_CONTEXT_DEFAULT_LOADER + | undefined; + } + interface RunningScriptOptions extends BaseOptions { + /** + * When `true`, if an `Error` occurs while compiling the `code`, the line of code causing the error is attached to the stack trace. + * @default true + */ + displayErrors?: boolean | undefined; + /** + * Specifies the number of milliseconds to execute code before terminating execution. + * If execution is terminated, an `Error` will be thrown. This value must be a strictly positive integer. + */ + timeout?: number | undefined; + /** + * If `true`, the execution will be terminated when `SIGINT` (Ctrl+C) is received. + * Existing handlers for the event that have been attached via `process.on('SIGINT')` will be disabled during script execution, but will continue to work after that. + * If execution is terminated, an `Error` will be thrown. + * @default false + */ + breakOnSigint?: boolean | undefined; + } + interface RunningScriptInNewContextOptions extends RunningScriptOptions { + /** + * Human-readable name of the newly created context. + */ + contextName?: CreateContextOptions["name"]; + /** + * Origin corresponding to the newly created context for display purposes. The origin should be formatted like a URL, + * but with only the scheme, host, and port (if necessary), like the value of the `url.origin` property of a `URL` object. + * Most notably, this string should omit the trailing slash, as that denotes a path. + */ + contextOrigin?: CreateContextOptions["origin"]; + contextCodeGeneration?: CreateContextOptions["codeGeneration"]; + /** + * If set to `afterEvaluate`, microtasks will be run immediately after the script has run. + */ + microtaskMode?: CreateContextOptions["microtaskMode"]; + } + interface RunningCodeOptions extends RunningScriptOptions { + cachedData?: ScriptOptions["cachedData"]; + importModuleDynamically?: ScriptOptions["importModuleDynamically"]; + } + interface RunningCodeInNewContextOptions extends RunningScriptInNewContextOptions { + cachedData?: ScriptOptions["cachedData"]; + importModuleDynamically?: ScriptOptions["importModuleDynamically"]; + } + interface CompileFunctionOptions extends BaseOptions { + /** + * Provides an optional data with V8's code cache data for the supplied source. + */ + cachedData?: Buffer | undefined; + /** + * Specifies whether to produce new cache data. + * @default false + */ + produceCachedData?: boolean | undefined; + /** + * The sandbox/context in which the said function should be compiled in. + */ + parsingContext?: Context | undefined; + /** + * An array containing a collection of context extensions (objects wrapping the current scope) to be applied while compiling + */ + contextExtensions?: Object[] | undefined; + } + interface CreateContextOptions { + /** + * Human-readable name of the newly created context. + * @default 'VM Context i' Where i is an ascending numerical index of the created context. + */ + name?: string | undefined; + /** + * Corresponds to the newly created context for display purposes. + * The origin should be formatted like a `URL`, but with only the scheme, host, and port (if necessary), + * like the value of the `url.origin` property of a URL object. + * Most notably, this string should omit the trailing slash, as that denotes a path. + * @default '' + */ + origin?: string | undefined; + codeGeneration?: + | { + /** + * If set to false any calls to eval or function constructors (Function, GeneratorFunction, etc) + * will throw an EvalError. + * @default true + */ + strings?: boolean | undefined; + /** + * If set to false any attempt to compile a WebAssembly module will throw a WebAssembly.CompileError. + * @default true + */ + wasm?: boolean | undefined; + } + | undefined; + /** + * If set to `afterEvaluate`, microtasks will be run immediately after the script has run. + */ + microtaskMode?: "afterEvaluate" | undefined; + } + type MeasureMemoryMode = "summary" | "detailed"; + interface MeasureMemoryOptions { + /** + * @default 'summary' + */ + mode?: MeasureMemoryMode | undefined; + /** + * @default 'default' + */ + execution?: "default" | "eager" | undefined; + } + interface MemoryMeasurement { + total: { + jsMemoryEstimate: number; + jsMemoryRange: [number, number]; + }; + } + /** + * Instances of the `vm.Script` class contain precompiled scripts that can be + * executed in specific contexts. + * @since v0.3.1 + */ + class Script { + constructor(code: string, options?: ScriptOptions | string); + /** + * Runs the compiled code contained by the `vm.Script` object within the given `contextifiedObject` and returns the result. Running code does not have access + * to local scope. + * + * The following example compiles code that increments a global variable, sets + * the value of another global variable, then execute the code multiple times. + * The globals are contained in the `context` object. + * + * ```js + * import vm from 'node:vm'; + * + * const context = { + * animal: 'cat', + * count: 2, + * }; + * + * const script = new vm.Script('count += 1; name = "kitty";'); + * + * vm.createContext(context); + * for (let i = 0; i < 10; ++i) { + * script.runInContext(context); + * } + * + * console.log(context); + * // Prints: { animal: 'cat', count: 12, name: 'kitty' } + * ``` + * + * Using the `timeout` or `breakOnSigint` options will result in new event loops + * and corresponding threads being started, which have a non-zero performance + * overhead. + * @since v0.3.1 + * @param contextifiedObject A `contextified` object as returned by the `vm.createContext()` method. + * @return the result of the very last statement executed in the script. + */ + runInContext(contextifiedObject: Context, options?: RunningScriptOptions): any; + /** + * This method is a shortcut to `script.runInContext(vm.createContext(options), options)`. + * It does several things at once: + * + * 1. Creates a new context. + * 2. If `contextObject` is an object, contextifies it with the new context. + * If `contextObject` is undefined, creates a new object and contextifies it. + * If `contextObject` is `vm.constants.DONT_CONTEXTIFY`, don't contextify anything. + * 3. Runs the compiled code contained by the `vm.Script` object within the created context. The code + * does not have access to the scope in which this method is called. + * 4. Returns the result. + * + * The following example compiles code that sets a global variable, then executes + * the code multiple times in different contexts. The globals are set on and + * contained within each individual `context`. + * + * ```js + * const vm = require('node:vm'); + * + * const script = new vm.Script('globalVar = "set"'); + * + * const contexts = [{}, {}, {}]; + * contexts.forEach((context) => { + * script.runInNewContext(context); + * }); + * + * console.log(contexts); + * // Prints: [{ globalVar: 'set' }, { globalVar: 'set' }, { globalVar: 'set' }] + * + * // This would throw if the context is created from a contextified object. + * // vm.constants.DONT_CONTEXTIFY allows creating contexts with ordinary + * // global objects that can be frozen. + * const freezeScript = new vm.Script('Object.freeze(globalThis); globalThis;'); + * const frozenContext = freezeScript.runInNewContext(vm.constants.DONT_CONTEXTIFY); + * ``` + * @since v0.3.1 + * @param contextObject Either `vm.constants.DONT_CONTEXTIFY` or an object that will be contextified. + * If `undefined`, an empty contextified object will be created for backwards compatibility. + * @return the result of the very last statement executed in the script. + */ + runInNewContext( + contextObject?: Context | typeof constants.DONT_CONTEXTIFY, + options?: RunningScriptInNewContextOptions, + ): any; + /** + * Runs the compiled code contained by the `vm.Script` within the context of the + * current `global` object. Running code does not have access to local scope, but _does_ have access to the current `global` object. + * + * The following example compiles code that increments a `global` variable then + * executes that code multiple times: + * + * ```js + * import vm from 'node:vm'; + * + * global.globalVar = 0; + * + * const script = new vm.Script('globalVar += 1', { filename: 'myfile.vm' }); + * + * for (let i = 0; i < 1000; ++i) { + * script.runInThisContext(); + * } + * + * console.log(globalVar); + * + * // 1000 + * ``` + * @since v0.3.1 + * @return the result of the very last statement executed in the script. + */ + runInThisContext(options?: RunningScriptOptions): any; + /** + * Creates a code cache that can be used with the `Script` constructor's `cachedData` option. Returns a `Buffer`. This method may be called at any + * time and any number of times. + * + * The code cache of the `Script` doesn't contain any JavaScript observable + * states. The code cache is safe to be saved along side the script source and + * used to construct new `Script` instances multiple times. + * + * Functions in the `Script` source can be marked as lazily compiled and they are + * not compiled at construction of the `Script`. These functions are going to be + * compiled when they are invoked the first time. The code cache serializes the + * metadata that V8 currently knows about the `Script` that it can use to speed up + * future compilations. + * + * ```js + * const script = new vm.Script(` + * function add(a, b) { + * return a + b; + * } + * + * const x = add(1, 2); + * `); + * + * const cacheWithoutAdd = script.createCachedData(); + * // In `cacheWithoutAdd` the function `add()` is marked for full compilation + * // upon invocation. + * + * script.runInThisContext(); + * + * const cacheWithAdd = script.createCachedData(); + * // `cacheWithAdd` contains fully compiled function `add()`. + * ``` + * @since v10.6.0 + */ + createCachedData(): Buffer; + /** @deprecated in favor of `script.createCachedData()` */ + cachedDataProduced?: boolean | undefined; + /** + * When `cachedData` is supplied to create the `vm.Script`, this value will be set + * to either `true` or `false` depending on acceptance of the data by V8. + * Otherwise the value is `undefined`. + * @since v5.7.0 + */ + cachedDataRejected?: boolean | undefined; + cachedData?: Buffer | undefined; + /** + * When the script is compiled from a source that contains a source map magic + * comment, this property will be set to the URL of the source map. + * + * ```js + * import vm from 'node:vm'; + * + * const script = new vm.Script(` + * function myFunc() {} + * //# sourceMappingURL=sourcemap.json + * `); + * + * console.log(script.sourceMapURL); + * // Prints: sourcemap.json + * ``` + * @since v19.1.0, v18.13.0 + */ + sourceMapURL?: string | undefined; + } + /** + * If the given `contextObject` is an object, the `vm.createContext()` method will + * [prepare that object](https://nodejs.org/docs/latest-v22.x/api/vm.html#what-does-it-mean-to-contextify-an-object) + * and return a reference to it so that it can be used in calls to {@link runInContext} or + * [`script.runInContext()`](https://nodejs.org/docs/latest-v22.x/api/vm.html#scriptrunincontextcontextifiedobject-options). + * Inside such scripts, the global object will be wrapped by the `contextObject`, retaining all of its + * existing properties but also having the built-in objects and functions any standard + * [global object](https://es5.github.io/#x15.1) has. Outside of scripts run by the vm module, global + * variables will remain unchanged. + * + * ```js + * const vm = require('node:vm'); + * + * global.globalVar = 3; + * + * const context = { globalVar: 1 }; + * vm.createContext(context); + * + * vm.runInContext('globalVar *= 2;', context); + * + * console.log(context); + * // Prints: { globalVar: 2 } + * + * console.log(global.globalVar); + * // Prints: 3 + * ``` + * + * If `contextObject` is omitted (or passed explicitly as `undefined`), a new, + * empty contextified object will be returned. + * + * When the global object in the newly created context is contextified, it has some quirks + * compared to ordinary global objects. For example, it cannot be frozen. To create a context + * without the contextifying quirks, pass `vm.constants.DONT_CONTEXTIFY` as the `contextObject` + * argument. See the documentation of `vm.constants.DONT_CONTEXTIFY` for details. + * + * The `vm.createContext()` method is primarily useful for creating a single + * context that can be used to run multiple scripts. For instance, if emulating a + * web browser, the method can be used to create a single context representing a + * window's global object, then run all ` +``` + + + +Node + + +Install with `npm install deprecation` + +```js +const { Deprecation } = require("deprecation"); +// or: import { Deprecation } from "deprecation"; +``` + + + + + +```js +function foo() { + bar(); +} + +function bar() { + baz(); +} + +function baz() { + console.warn(new Deprecation("[my-lib] foo() is deprecated, use bar()")); +} + +foo(); +// { Deprecation: [my-lib] foo() is deprecated, use bar() +// at baz (/path/to/file.js:12:15) +// at bar (/path/to/file.js:8:3) +// at foo (/path/to/file.js:4:3) +``` + +To log a deprecation message only once, you can use the [once](https://www.npmjs.com/package/once) module. + +```js +const Deprecation = require("deprecation"); +const once = require("once"); + +const deprecateFoo = once(console.warn); + +function foo() { + deprecateFoo(new Deprecation("[my-lib] foo() is deprecated, use bar()")); +} + +foo(); +foo(); // logs nothing +``` + +## License + +[ISC](LICENSE) diff --git a/node_modules/deprecation/dist-node/index.js b/node_modules/deprecation/dist-node/index.js new file mode 100644 index 0000000..9da1775 --- /dev/null +++ b/node_modules/deprecation/dist-node/index.js @@ -0,0 +1,20 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; diff --git a/node_modules/deprecation/dist-src/index.js b/node_modules/deprecation/dist-src/index.js new file mode 100644 index 0000000..7950fdc --- /dev/null +++ b/node_modules/deprecation/dist-src/index.js @@ -0,0 +1,14 @@ +export class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} \ No newline at end of file diff --git a/node_modules/deprecation/dist-types/index.d.ts b/node_modules/deprecation/dist-types/index.d.ts new file mode 100644 index 0000000..e3ae7ad --- /dev/null +++ b/node_modules/deprecation/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export class Deprecation extends Error { + name: "Deprecation"; +} diff --git a/node_modules/deprecation/dist-web/index.js b/node_modules/deprecation/dist-web/index.js new file mode 100644 index 0000000..c6bbda7 --- /dev/null +++ b/node_modules/deprecation/dist-web/index.js @@ -0,0 +1,16 @@ +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +export { Deprecation }; diff --git a/node_modules/deprecation/package.json b/node_modules/deprecation/package.json new file mode 100644 index 0000000..a45fd51 --- /dev/null +++ b/node_modules/deprecation/package.json @@ -0,0 +1,34 @@ +{ + "name": "deprecation", + "description": "Log a deprecation message with stack", + "version": "2.3.1", + "license": "ISC", + "files": [ + "dist-*/", + "bin/" + ], + "esnext": "dist-src/index.js", + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "pika": true, + "sideEffects": false, + "keywords": [ + "deprecate", + "deprecated", + "deprecation" + ], + "repository": { + "type": "git", + "url": "https://github.com/gr2m/deprecation.git" + }, + "dependencies": {}, + "devDependencies": { + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.4.0", + "@pika/plugin-build-types": "^0.4.0", + "@pika/plugin-build-web": "^0.4.0", + "@pika/plugin-standard-pkg": "^0.4.0", + "semantic-release": "^15.13.3" + } +} diff --git a/node_modules/event-target-shim/LICENSE b/node_modules/event-target-shim/LICENSE new file mode 100644 index 0000000..c39e694 --- /dev/null +++ b/node_modules/event-target-shim/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Toru Nagashima + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/event-target-shim/README.md b/node_modules/event-target-shim/README.md new file mode 100644 index 0000000..a4f9c1b --- /dev/null +++ b/node_modules/event-target-shim/README.md @@ -0,0 +1,293 @@ +# event-target-shim + +[![npm version](https://img.shields.io/npm/v/event-target-shim.svg)](https://www.npmjs.com/package/event-target-shim) +[![Downloads/month](https://img.shields.io/npm/dm/event-target-shim.svg)](http://www.npmtrends.com/event-target-shim) +[![Build Status](https://travis-ci.org/mysticatea/event-target-shim.svg?branch=master)](https://travis-ci.org/mysticatea/event-target-shim) +[![Coverage Status](https://codecov.io/gh/mysticatea/event-target-shim/branch/master/graph/badge.svg)](https://codecov.io/gh/mysticatea/event-target-shim) +[![Dependency Status](https://david-dm.org/mysticatea/event-target-shim.svg)](https://david-dm.org/mysticatea/event-target-shim) + +An implementation of [WHATWG EventTarget interface](https://dom.spec.whatwg.org/#interface-eventtarget), plus few extensions. + +- This provides `EventTarget` constructor that can inherit for your custom object. +- This provides an utility that defines properties of attribute listeners (e.g. `obj.onclick`). + +```js +import {EventTarget, defineEventAttribute} from "event-target-shim" + +class Foo extends EventTarget { + // ... +} + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Use +const foo = new Foo() +foo.addEventListener("hello", e => console.log("hello", e)) +foo.onhello = e => console.log("onhello:", e) +foo.dispatchEvent(new CustomEvent("hello")) +``` + +## 💿 Installation + +Use [npm](https://www.npmjs.com/) to install then use a bundler. + +``` +npm install event-target-shim +``` + +Or download from [`dist` directory](./dist). + +- [dist/event-target-shim.mjs](dist/event-target-shim.mjs) ... ES modules version. +- [dist/event-target-shim.js](dist/event-target-shim.js) ... Common JS version. +- [dist/event-target-shim.umd.js](dist/event-target-shim.umd.js) ... UMD (Universal Module Definition) version. This is transpiled by [Babel](https://babeljs.io/) for IE 11. + +## 📖 Usage + +```js +import {EventTarget, defineEventAttribute} from "event-target-shim" +// or +const {EventTarget, defineEventAttribute} = require("event-target-shim") + +// or UMD version defines a global variable: +const {EventTarget, defineEventAttribute} = window.EventTargetShim +``` + +### EventTarget + +> https://dom.spec.whatwg.org/#interface-eventtarget + +#### eventTarget.addEventListener(type, callback, options) + +Register an event listener. + +- `type` is a string. This is the event name to register. +- `callback` is a function. This is the event listener to register. +- `options` is a boolean or an object `{ capture?: boolean, passive?: boolean, once?: boolean }`. If this is a boolean, it's same meaning as `{ capture: options }`. + - `capture` is the flag to register the event listener for capture phase. + - `passive` is the flag to ignore `event.preventDefault()` method in the event listener. + - `once` is the flag to remove the event listener automatically after the first call. + +#### eventTarget.removeEventListener(type, callback, options) + +Unregister an event listener. + +- `type` is a string. This is the event name to unregister. +- `callback` is a function. This is the event listener to unregister. +- `options` is a boolean or an object `{ capture?: boolean }`. If this is a boolean, it's same meaning as `{ capture: options }`. + - `capture` is the flag to register the event listener for capture phase. + +#### eventTarget.dispatchEvent(event) + +Dispatch an event. + +- `event` is a [Event](https://dom.spec.whatwg.org/#event) object or an object `{ type: string, [key: string]: any }`. The latter is non-standard but useful. In both cases, listeners receive the event as implementing [Event](https://dom.spec.whatwg.org/#event) interface. + +### defineEventAttribute(proto, type) + +Define an event attribute (e.g. `onclick`) to `proto`. This is non-standard. + +- `proto` is an object (assuming it's a prototype object). This function defines a getter/setter pair for the event attribute. +- `type` is a string. This is the event name to define. + +For example: + +```js +class AbortSignal extends EventTarget { + constructor() { + this.aborted = false + } +} +// Define `onabort` property. +defineEventAttribute(AbortSignal.prototype, "abort") +``` + +### EventTarget(types) + +Define a custom `EventTarget` class with event attributes. This is non-standard. + +- `types` is a string or an array of strings. This is the event name to define. + +For example: + +```js +// This has `onabort` property. +class AbortSignal extends EventTarget("abort") { + constructor() { + this.aborted = false + } +} +``` + +## 📚 Examples + +### ES2015 and later + +> https://jsfiddle.net/636vea92/ + +```js +const {EventTarget, defineEventAttribute} = EventTargetShim + +// Define a derived class. +class Foo extends EventTarget { + // ... +} + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +const foo = new Foo() +foo.addEventListener("hello", (e) => { + console.log("hello", e) +}) +foo.onhello = (e) => { + console.log("onhello", e) +} + +// Dispatching events +foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +``` + +### Typescript + +```ts +import { EventTarget, defineEventAttribute } from "event-target-shim"; + +// Define events +type FooEvents = { + hello: CustomEvent +} +type FooEventAttributes = { + onhello: CustomEvent +} + +// Define a derived class. +class Foo extends EventTarget { + // ... +} +// Define `foo.onhello` property's implementation. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +const foo = new Foo() +foo.addEventListener("hello", (e) => { + console.log("hello", e.detail) +}) +foo.onhello = (e) => { + console.log("onhello", e.detail) +} + +// Dispatching events +foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +``` + +Unfortunately, both `FooEvents` and `FooEventAttributes` are needed because TypeScript doesn't allow the mutation of string literal types. If TypeScript allowed us to compute `"onhello"` from `"hello"` in types, `FooEventAttributes` will be optional. + +This `EventTarget` type is compatible with `EventTarget` interface of `lib.dom.d.ts`. + +#### To disallow unknown events + +By default, methods such as `addEventListener` accept unknown events. You can disallow unknown events by the third type parameter `"strict"`. + +```ts +type FooEvents = { + hello: CustomEvent +} +class Foo extends EventTarget { + // ... +} + +// OK because `hello` is defined in FooEvents. +foo.addEventListener("hello", (e) => { +}) +// Error because `unknown` is not defined in FooEvents. +foo.addEventListener("unknown", (e) => { +}) +``` + +However, if you use `"strict"` parameter, it loses compatibility with `EventTarget` interface of `lib.dom.d.ts`. + +#### To infer the type of `dispatchEvent()` method + +TypeScript cannot infer the event type of `dispatchEvent()` method properly from the argument in most cases. You can improve this behavior with the following steps: + +1. Use the third type parameter `"strict"`. This prevents inferring to `dispatchEvent()`. +2. Make the `type` property of event definitions stricter. + +```ts +type FooEvents = { + hello: CustomEvent & { type: "hello" } + hey: Event & { type: "hey" } +} +class Foo extends EventTarget { + // ... +} + +// Error because `detail` property is lacking. +foo.dispatchEvent({ type: "hello" }) +``` + +### ES5 + +> https://jsfiddle.net/522zc9de/ + +```js +// Define a derived class. +function Foo() { + EventTarget.call(this) +} +Foo.prototype = Object.create(EventTarget.prototype, { + constructor: { value: Foo, configurable: true, writable: true } + // ... +}) + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +var foo = new Foo() +foo.addEventListener("hello", function(e) { + console.log("hello", e) +}) +foo.onhello = function(e) { + console.log("onhello", e) +} + +// Dispatching events +function isSupportEventConstrucor() { // IE does not support. + try { + new CusomEvent("hello") + return true + } catch (_err) { + return false + } +} +if (isSupportEventConstrucor()) { + foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +} else { + var e = document.createEvent("CustomEvent") + e.initCustomEvent("hello", false, false, "detail") + foo.dispatchEvent(e) +} +``` + +## 📰 Changelog + +- See [GitHub releases](https://github.com/mysticatea/event-target-shim/releases). + +## 🍻 Contributing + +Contributing is welcome ❤️ + +Please use GitHub issues/PRs. + +### Development tools + +- `npm install` installs dependencies for development. +- `npm test` runs tests and measures code coverage. +- `npm run clean` removes temporary files of tests. +- `npm run coverage` opens code coverage of the previous test with your default browser. +- `npm run lint` runs ESLint. +- `npm run build` generates `dist` codes. +- `npm run watch` runs tests on each file change. diff --git a/node_modules/event-target-shim/dist/event-target-shim.js b/node_modules/event-target-shim/dist/event-target-shim.js new file mode 100644 index 0000000..53ce220 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.js @@ -0,0 +1,871 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports.default = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map diff --git a/node_modules/event-target-shim/dist/event-target-shim.js.map b/node_modules/event-target-shim/dist/event-target-shim.js.map new file mode 100644 index 0000000..83c5f62 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.js.map @@ -0,0 +1 @@ +{"version":3,"file":"event-target-shim.js","sources":["../src/event.mjs","../src/event-target.mjs"],"sourcesContent":["/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":[],"mappings":";;;;;;;;;AAAA;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASD,AAAO,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQD,AAAO,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASD,AAAO,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASD,AAAO,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASD,AAAO,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/event-target-shim/dist/event-target-shim.mjs b/node_modules/event-target-shim/dist/event-target-shim.mjs new file mode 100644 index 0000000..114f3a1 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.mjs @@ -0,0 +1,862 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +export default EventTarget; +export { defineEventAttribute, EventTarget }; +//# sourceMappingURL=event-target-shim.mjs.map diff --git a/node_modules/event-target-shim/dist/event-target-shim.mjs.map b/node_modules/event-target-shim/dist/event-target-shim.mjs.map new file mode 100644 index 0000000..57b3e8f --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"event-target-shim.mjs","sources":["../src/event.mjs","../src/event-target.mjs"],"sourcesContent":["/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":[],"mappings":";;;;;AAAA;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASD,AAAO,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQD,AAAO,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASD,AAAO,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASD,AAAO,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASD,AAAO,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;"} \ No newline at end of file diff --git a/node_modules/event-target-shim/dist/event-target-shim.umd.js b/node_modules/event-target-shim/dist/event-target-shim.umd.js new file mode 100644 index 0000000..e7cf5d4 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.umd.js @@ -0,0 +1,6 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.EventTargetShim={}))})(this,function(a){"use strict";function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a){var b=u.get(a);return console.assert(null!=b,"'this' is expected an Event object, but got",a),b}function d(a){return null==a.passiveListener?void(!a.event.cancelable||(a.canceled=!0,"function"==typeof a.event.preventDefault&&a.event.preventDefault())):void("undefined"!=typeof console&&"function"==typeof console.error&&console.error("Unable to preventDefault inside passive event listener invocation.",a.passiveListener))}function e(a,b){u.set(this,{eventTarget:a,event:b,eventPhase:2,currentTarget:a,canceled:!1,stopped:!1,immediateStopped:!1,passiveListener:null,timeStamp:b.timeStamp||Date.now()}),Object.defineProperty(this,"isTrusted",{value:!1,enumerable:!0});for(var c,d=Object.keys(b),e=0;e}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":["pd","event","retv","privateData","get","console","assert","setCancelFlag","data","passiveListener","cancelable","canceled","preventDefault","error","Event","eventTarget","set","eventPhase","currentTarget","stopped","immediateStopped","timeStamp","Date","now","Object","defineProperty","value","enumerable","key","keys","i","length","defineRedirectDescriptor","configurable","defineCallDescriptor","apply","arguments","defineWrapper","BaseEvent","proto","CustomEvent","call","prototype","create","constructor","writable","descriptor","getOwnPropertyDescriptor","isFunc","getWrapper","wrapper","wrappers","getPrototypeOf","wrapEvent","Wrapper","isStopped","setEventPhase","setCurrentTarget","setPassiveListener","isObject","x","_typeof","getListeners","listeners","listenersMap","TypeError","defineEventAttributeDescriptor","eventName","node","listenerType","listener","next","prev","delete","newNode","passive","once","defineEventAttribute","eventTargetPrototype","defineCustomEventTarget","eventNames","CustomEventTarget","EventTarget","Map","Array","isArray","types","WeakMap","type","target","composedPath","NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","stopPropagation","stopImmediatePropagation","bubbles","defaultPrevented","composed","srcElement","cancelBubble","returnValue","initEvent","window","setPrototypeOf","CAPTURE","BUBBLE","addEventListener","options","optionsIsObj","capture","removeEventListener","dispatchEvent","wrappedEvent","err","handleEvent"],"mappings":";;;;wbAkCA,QAASA,CAAAA,CAAT,CAAYC,CAAZ,CAAmB,IACTC,CAAAA,CAAI,CAAGC,CAAW,CAACC,GAAZD,CAAgBF,CAAhBE,QACbE,CAAAA,OAAO,CAACC,MAARD,CACY,IAARH,EAAAA,CADJG,CAEI,6CAFJA,CAGIJ,CAHJI,EAKOH,EAOX,QAASK,CAAAA,CAAT,CAAuBC,CAAvB,CAA6B,OACG,KAAxBA,EAAAA,CAAI,CAACC,eADgB,MAarB,CAACD,CAAI,CAACP,KAALO,CAAWE,UAbS,GAiBzBF,CAAI,CAACG,QAALH,GAjByB,CAkBgB,UAArC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWI,cAlBG,EAmBrBJ,CAAI,CAACP,KAALO,CAAWI,cAAXJ,EAnBqB,QAGE,WAAnB,QAAOH,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAJE,EAMjBR,OAAO,CAACQ,KAARR,CACI,oEADJA,CAEIG,CAAI,CAACC,eAFTJ,CANiB,EAiC7B,QAASS,CAAAA,CAAT,CAAeC,CAAf,CAA4Bd,CAA5B,CAAmC,CAC/BE,CAAW,CAACa,GAAZb,CAAgB,IAAhBA,CAAsB,CAClBY,WAAW,CAAXA,CADkB,CAElBd,KAAK,CAALA,CAFkB,CAGlBgB,UAAU,CAAE,CAHM,CAIlBC,aAAa,CAAEH,CAJG,CAKlBJ,QAAQ,GALU,CAMlBQ,OAAO,GANW,CAOlBC,gBAAgB,GAPE,CAQlBX,eAAe,CAAE,IARC,CASlBY,SAAS,CAAEpB,CAAK,CAACoB,SAANpB,EAAmBqB,IAAI,CAACC,GAALD,EATZ,CAAtBnB,CAD+B,CAc/BqB,MAAM,CAACC,cAAPD,CAAsB,IAAtBA,CAA4B,WAA5BA,CAAyC,CAAEE,KAAK,GAAP,CAAgBC,UAAU,GAA1B,CAAzCH,CAd+B,QAmBrBI,CAAAA,EAFJC,CAAI,CAAGL,MAAM,CAACK,IAAPL,CAAYvB,CAAZuB,EACJM,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,EACzBF,EAAMC,CAAI,CAACC,CAAD,EACVF,CAAG,GAAI,OACTJ,MAAM,CAACC,cAAPD,CAAsB,IAAtBA,CAA4BI,CAA5BJ,CAAiCQ,CAAwB,CAACJ,CAAD,CAAzDJ,EAyOZ,QAASQ,CAAAA,CAAT,CAAkCJ,CAAlC,CAAuC,OAC5B,CACHxB,GADG,WACG,OACKJ,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAe4B,CAAf5B,CAFR,CAAA,CAIHgB,GAJG,UAICU,EAAO,CACP1B,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAe4B,CAAf5B,EAAsB0B,CALvB,CAAA,CAOHO,YAAY,GAPT,CAQHN,UAAU,GARP,EAkBX,QAASO,CAAAA,CAAT,CAA8BN,CAA9B,CAAmC,OACxB,CACHF,KADG,WACK,IACEzB,CAAAA,CAAK,CAAGD,CAAE,CAAC,IAAD,CAAFA,CAASC,YAChBA,CAAAA,CAAK,CAAC2B,CAAD,CAAL3B,CAAWkC,KAAXlC,CAAiBA,CAAjBA,CAAwBmC,SAAxBnC,CAHR,CAAA,CAKHgC,YAAY,GALT,CAMHN,UAAU,GANP,EAiBX,QAASU,CAAAA,CAAT,CAAuBC,CAAvB,CAAkCC,CAAlC,CAAyC,SAO5BC,CAAAA,EAAYzB,EAAad,EAAO,CACrCqC,CAAS,CAACG,IAAVH,CAAe,IAAfA,CAAqBvB,CAArBuB,CAAkCrC,CAAlCqC,KAPET,CAAAA,CAAI,CAAGL,MAAM,CAACK,IAAPL,CAAYe,CAAZf,KACO,CAAhBK,GAAAA,CAAI,CAACE,aACEO,CAAAA,EAQXE,CAAW,CAACE,SAAZF,CAAwBhB,MAAM,CAACmB,MAAPnB,CAAcc,CAAS,CAACI,SAAxBlB,CAAmC,CACvDoB,WAAW,CAAE,CAAElB,KAAK,CAAEc,CAAT,CAAsBP,YAAY,GAAlC,CAA0CY,QAAQ,GAAlD,CAD0C,CAAnCrB,CAXa,KAgBhC,GACKI,CAAAA,CADL,CAAIE,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,KACzBF,EAAMC,CAAI,CAACC,CAAD,EACZ,EAAEF,CAAG,GAAIU,CAAAA,CAAS,CAACI,SAAnB,EAA+B,IACzBI,CAAAA,CAAU,CAAGtB,MAAM,CAACuB,wBAAPvB,CAAgCe,CAAhCf,CAAuCI,CAAvCJ,CADY,CAEzBwB,CAAM,CAA+B,UAA5B,QAAOF,CAAAA,CAAU,CAACpB,KAFF,CAG/BF,MAAM,CAACC,cAAPD,CACIgB,CAAW,CAACE,SADhBlB,CAEII,CAFJJ,CAGIwB,CAAM,CACAd,CAAoB,CAACN,CAAD,CADpB,CAEAI,CAAwB,CAACJ,CAAD,CALlCJ,QAUDgB,CAAAA,EASX,QAASS,CAAAA,CAAT,CAAoBV,CAApB,CAA2B,IACV,IAATA,EAAAA,CAAK,EAAYA,CAAK,GAAKf,MAAM,CAACkB,gBAC3B5B,CAAAA,KAGPoC,CAAAA,CAAO,CAAGC,CAAQ,CAAC/C,GAAT+C,CAAaZ,CAAbY,QACC,KAAXD,EAAAA,IACAA,CAAO,CAAGb,CAAa,CAACY,CAAU,CAACzB,MAAM,CAAC4B,cAAP5B,CAAsBe,CAAtBf,CAAD,CAAX,CAA2Ce,CAA3C,EACvBY,CAAQ,CAACnC,GAATmC,CAAaZ,CAAbY,CAAoBD,CAApBC,GAEGD,EAUJ,QAASG,CAAAA,CAAT,CAAmBtC,CAAnB,CAAgCd,CAAhC,CAAuC,IACpCqD,CAAAA,CAAO,CAAGL,CAAU,CAACzB,MAAM,CAAC4B,cAAP5B,CAAsBvB,CAAtBuB,CAAD,QACnB,IAAI8B,CAAAA,CAAJ,CAAYvC,CAAZ,CAAyBd,CAAzB,EASJ,QAASsD,CAAAA,CAAT,CAAmBtD,CAAnB,CAA0B,OACtBD,CAAAA,CAAE,CAACC,CAAD,CAAFD,CAAUoB,iBAUd,QAASoC,CAAAA,CAAT,CAAuBvD,CAAvB,CAA8BgB,CAA9B,CAA0C,CAC7CjB,CAAE,CAACC,CAAD,CAAFD,CAAUiB,UAAVjB,CAAuBiB,EAUpB,QAASwC,CAAAA,CAAT,CAA0BxD,CAA1B,CAAiCiB,CAAjC,CAAgD,CACnDlB,CAAE,CAACC,CAAD,CAAFD,CAAUkB,aAAVlB,CAA0BkB,EAUvB,QAASwC,CAAAA,CAAT,CAA4BzD,CAA5B,CAAmCQ,CAAnC,CAAoD,CACvDT,CAAE,CAACC,CAAD,CAAFD,CAAUS,eAAVT,CAA4BS,EC3bhC,QAASkD,CAAAA,CAAT,CAAkBC,CAAlB,CAAqB,OACJ,KAANA,GAAAA,CAAC,EAA0B,QAAb,GAAAC,EAAOD,GAShC,QAASE,CAAAA,CAAT,CAAsB/C,CAAtB,CAAmC,IACzBgD,CAAAA,CAAS,CAAGC,CAAY,CAAC5D,GAAb4D,CAAiBjD,CAAjBiD,KACD,IAAbD,EAAAA,OACM,IAAIE,CAAAA,SAAJ,CACF,kEADE,QAIHF,CAAAA,EASX,QAASG,CAAAA,CAAT,CAAwCC,CAAxC,CAAmD,OACxC,CACH/D,GADG,WACG,QACI2D,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CAD5B,CAEEM,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CAFT,CAGa,IAARK,EAAAA,CAHL,EAGmB,IACbA,IAAAA,CAAI,CAACC,mBACED,CAAAA,CAAI,CAACE,SAEhBF,CAAI,CAAGA,CAAI,CAACG,WAET,KAVR,CAAA,CAaHvD,GAbG,UAaCsD,EAAU,CACc,UAApB,QAAOA,CAAAA,CAAP,EAAmCX,CAAQ,CAACW,CAAD,CADrC,GAENA,CAAQ,CAAG,IAFL,SAIJP,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CAJpB,CAONU,CAAI,CAAG,IAPD,CAQNJ,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CARD,CASK,IAARK,EAAAA,CATG,EAUFA,IAAAA,CAAI,CAACC,YAVH,CAYW,IAATG,GAAAA,CAZF,CAcuB,IAAdJ,GAAAA,CAAI,CAACG,IAdd,CAiBER,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,CAjBF,CAeEA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,CAfF,CAaES,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,IAbnB,CAoBFC,CAAI,CAAGJ,CApBL,CAuBNA,CAAI,CAAGA,CAAI,CAACG,IAvBN,IA2BO,IAAbD,GAAAA,EAAmB,IACbI,CAAAA,CAAO,CAAG,CACZJ,QAAQ,CAARA,CADY,CAEZD,YAAY,EAFA,CAGZM,OAAO,GAHK,CAIZC,IAAI,GAJQ,CAKZL,IAAI,CAAE,IALM,EAOH,IAATC,GAAAA,CARe,CASfT,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBW,CAAzBX,CATe,CAWfS,CAAI,CAACD,IAALC,CAAYE,EAnDrB,CAAA,CAuDHzC,YAAY,GAvDT,CAwDHN,UAAU,GAxDP,EAkEX,QAASkD,CAAAA,CAAT,CAA8BC,CAA9B,CAAoDX,CAApD,CAA+D,CAC3D3C,MAAM,CAACC,cAAPD,CACIsD,CADJtD,aAES2C,EAFT3C,CAGI0C,CAA8B,CAACC,CAAD,CAHlC3C,EAaJ,QAASuD,CAAAA,CAAT,CAAiCC,CAAjC,CAA6C,SAEhCC,CAAAA,GAAoB,CACzBC,CAAW,CAACzC,IAAZyC,CAAiB,IAAjBA,EAGJD,CAAiB,CAACvC,SAAlBuC,CAA8BzD,MAAM,CAACmB,MAAPnB,CAAc0D,CAAW,CAACxC,SAA1BlB,CAAqC,CAC/DoB,WAAW,CAAE,CACTlB,KAAK,CAAEuD,CADE,CAEThD,YAAY,GAFH,CAGTY,QAAQ,GAHC,CADkD,CAArCrB,CANW,KAcpC,GAAIM,CAAAA,CAAC,CAAG,EAAGA,CAAC,CAAGkD,CAAU,CAACjD,OAAQ,EAAED,EACrC+C,CAAoB,CAACI,CAAiB,CAACvC,SAAnB,CAA8BsC,CAAU,CAAClD,CAAD,CAAxC,CAApB+C,OAGGI,CAAAA,EAgBX,QAASC,CAAAA,CAAT,EAAuB,IAEf,eAAgBA,CAAAA,aAChBlB,CAAAA,CAAY,CAAChD,GAAbgD,CAAiB,IAAjBA,CAAuB,GAAImB,CAAAA,GAA3BnB,KAGqB,CAArB5B,GAAAA,SAAS,CAACL,MAAVK,EAA0BgD,KAAK,CAACC,OAAND,CAAchD,SAAS,CAAC,CAAD,CAAvBgD,QACnBL,CAAAA,CAAuB,CAAC3C,SAAS,CAAC,CAAD,CAAV,KAEX,CAAnBA,CAAAA,SAAS,CAACL,OAAY,QAChBuD,CAAAA,CAAK,CAAOF,KAAP,CAAahD,SAAS,CAACL,MAAvB,EACFD,CAAC,CAAG,EAAGA,CAAC,CAAGM,SAAS,CAACL,OAAQ,EAAED,EACpCwD,CAAK,CAACxD,CAAD,CAALwD,CAAWlD,SAAS,CAACN,CAAD,CAApBwD,OAEGP,CAAAA,CAAuB,CAACO,CAAD,OAE5B,IAAIrB,CAAAA,SAAJ,CAAc,mCAAd,KD5KJ9D,CAAAA,CAAW,CAAG,GAAIoF,CAAAA,QAOlBpC,CAAQ,CAAG,GAAIoC,CAAAA,QAkFrBzE,CAAK,CAAC4B,SAAN5B,CAAkB,IAKV0E,CAAAA,MAAO,OACAxF,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAewF,IANZ,CAAA,IAaVC,CAAAA,QAAS,OACFzF,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASe,WAdN,CAAA,IAqBVG,CAAAA,eAAgB,OACTlB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASkB,aAtBN,CAAA,CA4BdwE,YA5Bc,WA4BC,IACLxE,CAAAA,CAAa,CAAGlB,CAAE,CAAC,IAAD,CAAFA,CAASkB,cADpB,MAEU,KAAjBA,EAAAA,CAFO,CAGA,EAHA,CAKJ,CAACA,CAAD,CAjCG,CAAA,IAwCVyE,CAAAA,MAAO,OACA,EAzCG,CAAA,IAgDVC,CAAAA,iBAAkB,OACX,EAjDG,CAAA,IAwDVC,CAAAA,WAAY,OACL,EAzDG,CAAA,IAgEVC,CAAAA,gBAAiB,OACV,EAjEG,CAAA,IAwEV7E,CAAAA,YAAa,OACNjB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASiB,UAzEN,CAAA,CAgFd8E,eAhFc,WAgFI,IACRvF,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,GAHc,CAI4B,UAAtC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWuF,eAJR,EAKVvF,CAAI,CAACP,KAALO,CAAWuF,eAAXvF,EArFM,CAAA,CA6FdwF,wBA7Fc,WA6Fa,IACjBxF,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,GAHuB,CAIvBA,CAAI,CAACY,gBAALZ,GAJuB,CAK4B,UAA/C,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWwF,wBALC,EAMnBxF,CAAI,CAACP,KAALO,CAAWwF,wBAAXxF,EAnGM,CAAA,IA2GVyF,CAAAA,SAAU,SACKjG,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAeiG,OA5GpB,CAAA,IAmHVvF,CAAAA,YAAa,SACEV,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAeU,UApHpB,CAAA,CA2HdE,cA3Hc,WA2HG,CACbL,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA5HH,CAAA,IAmIVkG,CAAAA,kBAAmB,OACZlG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASW,QApIN,CAAA,IA2IVwF,CAAAA,UAAW,SACInG,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAemG,QA5IpB,CAAA,IAmJV9E,CAAAA,WAAY,OACLrB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASqB,SApJN,CAAA,IA4JV+E,CAAAA,YAAa,OACNpG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASe,WA7JN,CAAA,IAqKVsF,CAAAA,cAAe,OACRrG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASmB,OAtKN,CAAA,IAwKVkF,CAAAA,aAAa3E,EAAO,IACfA,MAGClB,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,IACuC,SAAnC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAW6F,eAClB7F,CAAI,CAACP,KAALO,CAAW6F,YAAX7F,KAhLM,CAAA,IAyLV8F,CAAAA,aAAc,OACP,CAACtG,CAAE,CAAC,IAAD,CAAFA,CAASW,QA1LP,CAAA,IA4LV2F,CAAAA,YAAY5E,EAAO,CACdA,CADc,EAEfnB,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA9LP,CAAA,CAyMduG,SAzMc,WAyMF,EAzME,EA+MlB/E,MAAM,CAACC,cAAPD,CAAsBV,CAAK,CAAC4B,SAA5BlB,CAAuC,aAAvCA,CAAsD,CAClDE,KAAK,CAAEZ,CAD2C,CAElDmB,YAAY,GAFsC,CAGlDY,QAAQ,GAH0C,CAAtDrB,EAOsB,WAAlB,QAAOgF,CAAAA,MAAP,EAAyD,WAAxB,QAAOA,CAAAA,MAAM,CAAC1F,QAC/CU,MAAM,CAACiF,cAAPjF,CAAsBV,CAAK,CAAC4B,SAA5BlB,CAAuCgF,MAAM,CAAC1F,KAAP0F,CAAa9D,SAApDlB,EAGA2B,CAAQ,CAACnC,GAATmC,CAAaqD,MAAM,CAAC1F,KAAP0F,CAAa9D,SAA1BS,CAAqCrC,CAArCqC,MChTEa,CAAAA,CAAY,CAAG,GAAIuB,CAAAA,QAGnBmB,CAAO,CAAG,EACVC,CAAM,CAAG,KA0KfzB,CAAW,CAACxC,SAAZwC,CAAwB,CAQpB0B,gBARoB,UAQHzC,EAAWG,EAAUuC,EAAS,IAC3B,IAAZvC,EAAAA,MAGoB,UAApB,QAAOA,CAAAA,CAAP,EAAkC,CAACX,CAAQ,CAACW,CAAD,OACrC,IAAIL,CAAAA,SAAJ,CAAc,+CAAd,KAGJF,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,EACxBgD,CAAY,CAAGnD,CAAQ,CAACkD,CAAD,EACvBE,CAAO,CAAGD,CAAY,GACdD,CAAO,CAACE,OADM,GAEdF,EACRxC,CAAY,CAAG0C,CAAO,CAAGL,CAAH,CAAaC,EACnCjC,CAAO,CAAG,CACZJ,QAAQ,CAARA,CADY,CAEZD,YAAY,CAAZA,CAFY,CAGZM,OAAO,CAAEmC,CAAY,IAAYD,CAAO,CAAClC,OAH7B,CAIZC,IAAI,CAAEkC,CAAY,IAAYD,CAAO,CAACjC,IAJ1B,CAKZL,IAAI,CAAE,IALM,EASZH,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,KACPK,SAAAA,aACAL,CAAAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBW,CAAzBX,SAKAS,CAAAA,CAAI,CAAG,KACI,IAARJ,EAAAA,GAAc,IAEbA,CAAI,CAACE,QAALF,GAAkBE,CAAlBF,EACAA,CAAI,CAACC,YAALD,GAAsBC,SAK1BG,CAAI,CAAGJ,CARU,CASjBA,CAAI,CAAGA,CAAI,CAACG,IAxC2B,CA4C3CC,CAAI,CAACD,IAALC,CAAYE,EApDI,CAAA,CA8DpBsC,mBA9DoB,UA8DA7C,EAAWG,EAAUuC,EAAS,IAC9B,IAAZvC,EAAAA,SAIEP,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,EACxBiD,CAAO,CAAGpD,CAAQ,CAACkD,CAAD,CAARlD,GACFkD,CAAO,CAACE,OADNpD,GAEFkD,EACRxC,CAAY,CAAG0C,CAAO,CAAGL,CAAH,CAAaC,EAErCnC,CAAI,CAAG,KACPJ,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,EACI,IAARK,EAAAA,GAAc,IAEbA,CAAI,CAACE,QAALF,GAAkBE,CAAlBF,EACAA,CAAI,CAACC,YAALD,GAAsBC,cAET,IAATG,GAAAA,EAEqB,IAAdJ,GAAAA,CAAI,CAACG,KAGZR,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,EAFAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,EAFAS,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,MASzBC,CAAI,CAAGJ,CAfU,CAgBjBA,CAAI,CAAGA,CAAI,CAACG,KA3FA,CAAA,CAoGpB0C,aApGoB,UAoGNhH,EAAO,IACJ,IAATA,EAAAA,CAAK,EAAkC,QAAtB,QAAOA,CAAAA,CAAK,CAACuF,UACxB,IAAIvB,CAAAA,SAAJ,CAAc,oCAAd,EAFO,GAMXF,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CANb,CAOXK,CAAS,CAAGlE,CAAK,CAACuF,IAPP,CAQbpB,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CARM,IASL,IAARK,EAAAA,WATa,OAcX8C,CAAAA,CAAY,CAAG7D,CAAS,CAAC,IAAD,CAAOpD,CAAP,CAdb,CAkBbuE,CAAI,CAAG,IAlBM,CAmBF,IAARJ,EAAAA,CAnBU,EAmBI,IAEbA,CAAI,CAACQ,KACQ,IAATJ,GAAAA,EAEqB,IAAdJ,GAAAA,CAAI,CAACG,KAGZR,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,EAFAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,EAFAS,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,KAOrBC,CAAI,CAAGJ,EAIXV,CAAkB,CACdwD,CADc,CAEd9C,CAAI,CAACO,OAALP,CAAeA,CAAI,CAACE,QAApBF,CAA+B,IAFjB,EAIW,UAAzB,QAAOA,CAAAA,CAAI,CAACE,YACR,CACAF,CAAI,CAACE,QAALF,CAAc3B,IAAd2B,CAAmB,IAAnBA,CAAyB8C,CAAzB9C,CADJ,CAEE,MAAO+C,CAAP,CAAY,CAEa,WAAnB,QAAO9G,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAHT,EAKNR,OAAO,CAACQ,KAARR,CAAc8G,CAAd9G,MAIR+D,CAAAA,CAAI,CAACC,YAALD,GA/TE,CA+TFA,EACqC,UAArC,QAAOA,CAAAA,CAAI,CAACE,QAALF,CAAcgD,aAErBhD,CAAI,CAACE,QAALF,CAAcgD,WAAdhD,CAA0B8C,CAA1B9C,KAIAb,CAAS,CAAC2D,CAAD,QAIb9C,CAAI,CAAGA,CAAI,CAACG,WAEhBb,CAAAA,CAAkB,CAACwD,CAAD,CAAe,IAAf,EAClB1D,CAAa,CAAC0D,CAAD,CAAe,CAAf,EACbzD,CAAgB,CAACyD,CAAD,CAAe,IAAf,EAET,CAACA,CAAY,CAAChB,iBAvKL,EA4KxB1E,MAAM,CAACC,cAAPD,CAAsB0D,CAAW,CAACxC,SAAlClB,CAA6C,aAA7CA,CAA4D,CACxDE,KAAK,CAAEwD,CADiD,CAExDjD,YAAY,GAF4C,CAGxDY,QAAQ,GAHgD,CAA5DrB,EAQsB,WAAlB,QAAOgF,CAAAA,MAAP,EAC8B,WAA9B,QAAOA,CAAAA,MAAM,CAACtB,aAEd1D,MAAM,CAACiF,cAAPjF,CAAsB0D,CAAW,CAACxC,SAAlClB,CAA6CgF,MAAM,CAACtB,WAAPsB,CAAmB9D,SAAhElB"} \ No newline at end of file diff --git a/node_modules/event-target-shim/index.d.ts b/node_modules/event-target-shim/index.d.ts new file mode 100644 index 0000000..a303097 --- /dev/null +++ b/node_modules/event-target-shim/index.d.ts @@ -0,0 +1,399 @@ +export as namespace EventTargetShim + +/** + * `Event` interface. + * @see https://dom.spec.whatwg.org/#event + */ +export interface Event { + /** + * The type of this event. + */ + readonly type: string + + /** + * The target of this event. + */ + readonly target: EventTarget<{}, {}, "standard"> | null + + /** + * The current target of this event. + */ + readonly currentTarget: EventTarget<{}, {}, "standard"> | null + + /** + * The target of this event. + * @deprecated + */ + readonly srcElement: any | null + + /** + * The composed path of this event. + */ + composedPath(): EventTarget<{}, {}, "standard">[] + + /** + * Constant of NONE. + */ + readonly NONE: number + + /** + * Constant of CAPTURING_PHASE. + */ + readonly CAPTURING_PHASE: number + + /** + * Constant of BUBBLING_PHASE. + */ + readonly BUBBLING_PHASE: number + + /** + * Constant of AT_TARGET. + */ + readonly AT_TARGET: number + + /** + * Indicates which phase of the event flow is currently being evaluated. + */ + readonly eventPhase: number + + /** + * Stop event bubbling. + */ + stopPropagation(): void + + /** + * Stop event bubbling. + */ + stopImmediatePropagation(): void + + /** + * Initialize event. + * @deprecated + */ + initEvent(type: string, bubbles?: boolean, cancelable?: boolean): void + + /** + * The flag indicating bubbling. + */ + readonly bubbles: boolean + + /** + * Stop event bubbling. + * @deprecated + */ + cancelBubble: boolean + + /** + * Set or get cancellation flag. + * @deprecated + */ + returnValue: boolean + + /** + * The flag indicating whether the event can be canceled. + */ + readonly cancelable: boolean + + /** + * Cancel this event. + */ + preventDefault(): void + + /** + * The flag to indicating whether the event was canceled. + */ + readonly defaultPrevented: boolean + + /** + * The flag to indicating if event is composed. + */ + readonly composed: boolean + + /** + * Indicates whether the event was dispatched by the user agent. + */ + readonly isTrusted: boolean + + /** + * The unix time of this event. + */ + readonly timeStamp: number +} + +/** + * The constructor of `EventTarget` interface. + */ +export type EventTargetConstructor< + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" +> = { + prototype: EventTarget + new(): EventTarget +} + +/** + * `EventTarget` interface. + * @see https://dom.spec.whatwg.org/#interface-eventtarget + */ +export type EventTarget< + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" +> = EventTarget.EventAttributes & { + /** + * Add a given listener to this event target. + * @param eventName The event name to add. + * @param listener The listener to add. + * @param options The options for this listener. + */ + addEventListener>( + type: TEventType, + listener: + | EventTarget.Listener> + | null, + options?: boolean | EventTarget.AddOptions + ): void + + /** + * Remove a given listener from this event target. + * @param eventName The event name to remove. + * @param listener The listener to remove. + * @param options The options for this listener. + */ + removeEventListener>( + type: TEventType, + listener: + | EventTarget.Listener> + | null, + options?: boolean | EventTarget.RemoveOptions + ): void + + /** + * Dispatch a given event. + * @param event The event to dispatch. + * @returns `false` if canceled. + */ + dispatchEvent>( + event: EventTarget.EventData + ): boolean +} + +export const EventTarget: EventTargetConstructor & { + /** + * Create an `EventTarget` instance with detailed event definition. + * + * The detailed event definition requires to use `defineEventAttribute()` + * function later. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * const signal = new EventTarget<{ abort: Event }, { onabort: Event }>() + * defineEventAttribute(signal, "abort") + */ + new < + TEvents extends EventTarget.EventDefinition, + TEventAttributes extends EventTarget.EventDefinition, + TMode extends EventTarget.Mode = "loose" + >(): EventTarget + + /** + * Define an `EventTarget` constructor with attribute events and detailed event definition. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * class AbortSignal extends EventTarget<{ abort: Event }, { onabort: Event }>("abort") { + * abort(): void {} + * } + * + * @param events Optional event attributes (e.g. passing in `"click"` adds `onclick` to prototype). + */ + < + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" + >(events: string[]): EventTargetConstructor< + TEvents, + TEventAttributes, + TMode + > + + /** + * Define an `EventTarget` constructor with attribute events and detailed event definition. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * class AbortSignal extends EventTarget<{ abort: Event }, { onabort: Event }>("abort") { + * abort(): void {} + * } + * + * @param events Optional event attributes (e.g. passing in `"click"` adds `onclick` to prototype). + */ + < + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" + >(event0: string, ...events: string[]): EventTargetConstructor< + TEvents, + TEventAttributes, + TMode + > +} + +export namespace EventTarget { + /** + * Options of `removeEventListener()` method. + */ + export interface RemoveOptions { + /** + * The flag to indicate that the listener is for the capturing phase. + */ + capture?: boolean + } + + /** + * Options of `addEventListener()` method. + */ + export interface AddOptions extends RemoveOptions { + /** + * The flag to indicate that the listener doesn't support + * `event.preventDefault()` operation. + */ + passive?: boolean + /** + * The flag to indicate that the listener will be removed on the first + * event. + */ + once?: boolean + } + + /** + * The type of regular listeners. + */ + export interface FunctionListener { + (event: TEvent): void + } + + /** + * The type of object listeners. + */ + export interface ObjectListener { + handleEvent(event: TEvent): void + } + + /** + * The type of listeners. + */ + export type Listener = + | FunctionListener + | ObjectListener + + /** + * Event definition. + */ + export type EventDefinition = { + readonly [key: string]: Event + } + + /** + * Mapped type for event attributes. + */ + export type EventAttributes = { + [P in keyof TEventAttributes]: + | FunctionListener + | null + } + + /** + * The type of event data for `dispatchEvent()` method. + */ + export type EventData< + TEvents extends EventDefinition, + TEventType extends keyof TEvents | string, + TMode extends Mode + > = + TEventType extends keyof TEvents + ? ( + // Require properties which are not generated automatically. + & Pick< + TEvents[TEventType], + Exclude + > + // Properties which are generated automatically are optional. + & Partial> + ) + : ( + TMode extends "standard" + ? Event + : Event | NonStandardEvent + ) + + /** + * The string literal types of the properties which are generated + * automatically in `dispatchEvent()` method. + */ + export type OmittableEventKeys = Exclude + + /** + * The type of event data. + */ + export type NonStandardEvent = { + [key: string]: any + type: string + } + + /** + * The type of listeners. + */ + export type PickEvent< + TEvents extends EventDefinition, + TEventType extends keyof TEvents | string, + > = + TEventType extends keyof TEvents + ? TEvents[TEventType] + : Event + + /** + * Event type candidates. + */ + export type EventType< + TEvents extends EventDefinition, + TMode extends Mode + > = + TMode extends "strict" + ? keyof TEvents + : keyof TEvents | string + + /** + * - `"strict"` ..... Methods don't accept unknown events. + * `dispatchEvent()` accepts partial objects. + * - `"loose"` ...... Methods accept unknown events. + * `dispatchEvent()` accepts partial objects. + * - `"standard"` ... Methods accept unknown events. + * `dispatchEvent()` doesn't accept partial objects. + */ + export type Mode = "strict" | "standard" | "loose" +} + +/** + * Specialized `type` property. + */ +export type Type = { type: T } + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param prototype The event target prototype to define an event attribute. + * @param eventName The event name to define. + */ +export function defineEventAttribute( + prototype: EventTarget, + eventName: string +): void + +export default EventTarget diff --git a/node_modules/event-target-shim/package.json b/node_modules/event-target-shim/package.json new file mode 100644 index 0000000..40326f3 --- /dev/null +++ b/node_modules/event-target-shim/package.json @@ -0,0 +1,82 @@ +{ + "name": "event-target-shim", + "version": "5.0.1", + "description": "An implementation of WHATWG EventTarget interface.", + "main": "dist/event-target-shim", + "types": "index.d.ts", + "files": [ + "dist", + "index.d.ts" + ], + "engines": { + "node": ">=6" + }, + "scripts": { + "preversion": "npm test", + "version": "npm run build && git add dist/*", + "postversion": "git push && git push --tags", + "clean": "rimraf .nyc_output coverage", + "coverage": "nyc report --reporter lcov && opener coverage/lcov-report/index.html", + "lint": "eslint src test scripts --ext .js,.mjs", + "build": "rollup -c scripts/rollup.config.js", + "pretest": "npm run lint", + "test": "run-s test:*", + "test:mocha": "nyc --require ./scripts/babel-register mocha test/*.mjs", + "test:karma": "karma start scripts/karma.conf.js --single-run", + "watch": "run-p watch:*", + "watch:mocha": "mocha test/*.mjs --require ./scripts/babel-register --watch --watch-extensions js,mjs --growl", + "watch:karma": "karma start scripts/karma.conf.js --watch", + "codecov": "codecov" + }, + "devDependencies": { + "@babel/core": "^7.2.2", + "@babel/plugin-transform-modules-commonjs": "^7.2.0", + "@babel/preset-env": "^7.2.3", + "@babel/register": "^7.0.0", + "@mysticatea/eslint-plugin": "^8.0.1", + "@mysticatea/spy": "^0.1.2", + "assert": "^1.4.1", + "codecov": "^3.1.0", + "eslint": "^5.12.1", + "karma": "^3.1.4", + "karma-chrome-launcher": "^2.2.0", + "karma-coverage": "^1.1.2", + "karma-firefox-launcher": "^1.0.0", + "karma-growl-reporter": "^1.0.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0-rc.2", + "mocha": "^5.2.0", + "npm-run-all": "^4.1.5", + "nyc": "^13.1.0", + "opener": "^1.5.1", + "rimraf": "^2.6.3", + "rollup": "^1.1.1", + "rollup-plugin-babel": "^4.3.2", + "rollup-plugin-babel-minify": "^7.0.0", + "rollup-plugin-commonjs": "^9.2.0", + "rollup-plugin-json": "^3.1.0", + "rollup-plugin-node-resolve": "^4.0.0", + "rollup-watch": "^4.3.1", + "type-tester": "^1.0.0", + "typescript": "^3.2.4" + }, + "repository": { + "type": "git", + "url": "https://github.com/mysticatea/event-target-shim.git" + }, + "keywords": [ + "w3c", + "whatwg", + "eventtarget", + "event", + "events", + "shim" + ], + "author": "Toru Nagashima", + "license": "MIT", + "bugs": { + "url": "https://github.com/mysticatea/event-target-shim/issues" + }, + "homepage": "https://github.com/mysticatea/event-target-shim" +} diff --git a/node_modules/form-data-encoder/@type/FileLike.d.ts b/node_modules/form-data-encoder/@type/FileLike.d.ts new file mode 100644 index 0000000..8febb60 --- /dev/null +++ b/node_modules/form-data-encoder/@type/FileLike.d.ts @@ -0,0 +1,23 @@ +export interface FileLike { + /** + * Name of the file referenced by the File object. + */ + readonly name: string; + /** + * Returns the media type ([`MIME`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types)) of the file represented by a `File` object. + */ + readonly type: string; + /** + * Size of the file parts in bytes + */ + readonly size: number; + /** + * The last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). Files without a known last modified date return the current date. + */ + readonly lastModified: number; + /** + * Returns a [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) which upon reading returns the data contained within the [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). + */ + stream(): AsyncIterable; + readonly [Symbol.toStringTag]: string; +} diff --git a/node_modules/form-data-encoder/@type/FormDataEncoder.d.ts b/node_modules/form-data-encoder/@type/FormDataEncoder.d.ts new file mode 100644 index 0000000..9aae0c6 --- /dev/null +++ b/node_modules/form-data-encoder/@type/FormDataEncoder.d.ts @@ -0,0 +1,160 @@ +import { FormDataLike } from "./FormDataLike"; +import { FileLike } from "./FileLike"; +export interface FormDataEncoderOptions { + /** + * When enabled, the encoder will emit additional per part headers, such as `Content-Length`. + * + * Please note that the web clients do not include these, so when enabled this option might cause an error if `multipart/form-data` does not consider additional headers. + * + * Defaults to `false`. + */ + enableAdditionalHeaders?: boolean; +} +/** + * Implements [`multipart/form-data` encoding algorithm](https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#multipart/form-data-encoding-algorithm), + * allowing to add support for spec-comliant [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) to an HTTP client. + */ +export declare class FormDataEncoder { + #private; + /** + * Returns boundary string + */ + readonly boundary: string; + /** + * Returns Content-Type header + */ + readonly contentType: string; + /** + * Returns Content-Length header + */ + readonly contentLength: string; + /** + * Returns headers object with Content-Type and Content-Length header + */ + readonly headers: { + "Content-Type": string; + "Content-Length": string; + }; + /** + * Creates a multipart/form-data encoder. + * + * @param form FormData object to encode. This object must be a spec-compatible FormData implementation. + * @param boundary An optional boundary string that will be used by the encoder. If there's no boundary string is present, Encoder will generate it automatically. + * + * @example + * + * import {Readable} from "stream" + * + * import {FormData, File, fileFromPath} from "formdata-node" + * import {FormDataEncoder} from "form-data-encoder" + * + * import fetch from "node-fetch" + * + * const form = new FormData() + * + * form.set("field", "Just a random string") + * form.set("file", new File(["Using files is class amazing"], "file.txt")) + * form.set("fileFromPath", await fileFromPath("path/to/a/file.txt")) + * + * const encoder = new FormDataEncoder(form) + * + * const options = { + * method: "post", + * headers: encoder.headers, + * body: Readable.from(encoder) + * } + * + * const response = await fetch("https://httpbin.org/post", options) + * + * console.log(await response.json()) + */ + constructor(form: FormDataLike); + constructor(form: FormDataLike, boundary: string); + constructor(form: FormDataLike, options: FormDataEncoderOptions); + constructor(form: FormDataLike, boundary: string, options?: FormDataEncoderOptions); + /** + * Returns form-data content length + */ + getContentLength(): number; + /** + * Creates an iterator allowing to go through form-data parts (with metadata). + * This method **will not** read the files. + * + * Using this method, you can convert form-data content into Blob: + * + * @example + * + * import {Readable} from "stream" + * + * import {FormDataEncoder} from "form-data-encoder" + * + * import {FormData} from "formdata-polyfill/esm-min.js" + * import {fileFrom} from "fetch-blob/form.js" + * import {File} from "fetch-blob/file.js" + * import {Blob} from "fetch-blob" + * + * import fetch from "node-fetch" + * + * const form = new FormData() + * + * form.set("field", "Just a random string") + * form.set("file", new File(["Using files is class amazing"])) + * form.set("fileFromPath", await fileFrom("path/to/a/file.txt")) + * + * const encoder = new FormDataEncoder(form) + * + * const options = { + * method: "post", + * body: new Blob(encoder, {type: encoder.contentType}) + * } + * + * const response = await fetch("https://httpbin.org/post", options) + * + * console.log(await response.json()) + */ + values(): Generator; + /** + * Creates an async iterator allowing to perform the encoding by portions. + * This method **will** also read files. + * + * @example + * + * import {Readable} from "stream" + * + * import {FormData, File, fileFromPath} from "formdata-node" + * import {FormDataEncoder} from "form-data-encoder" + * + * import fetch from "node-fetch" + * + * const form = new FormData() + * + * form.set("field", "Just a random string") + * form.set("file", new File(["Using files is class amazing"], "file.txt")) + * form.set("fileFromPath", await fileFromPath("path/to/a/file.txt")) + * + * const encoder = new FormDataEncoder(form) + * + * const options = { + * method: "post", + * headers: encoder.headers, + * body: Readable.from(encoder.encode()) // or Readable.from(encoder) + * } + * + * const response = await fetch("https://httpbin.org/post", options) + * + * console.log(await response.json()) + */ + encode(): AsyncGenerator; + /** + * Creates an iterator allowing to read through the encoder data using for...of loops + */ + [Symbol.iterator](): Generator; + /** + * Creates an **async** iterator allowing to read through the encoder data using for-await...of loops + */ + [Symbol.asyncIterator](): AsyncGenerator; +} +/** + * @deprecated Use FormDataEncoder to import the encoder class instead + */ +export declare const Encoder: typeof FormDataEncoder; diff --git a/node_modules/form-data-encoder/@type/FormDataLike.d.ts b/node_modules/form-data-encoder/@type/FormDataLike.d.ts new file mode 100644 index 0000000..9e1922e --- /dev/null +++ b/node_modules/form-data-encoder/@type/FormDataLike.d.ts @@ -0,0 +1,40 @@ +import { FileLike } from "./FileLike"; +/** + * A `string` or `File` that represents a single value from a set of `FormData` key-value pairs. + */ +export declare type FormDataEntryValue = string | FileLike; +/** + * This interface reflects minimal shape of the FormData + */ +export interface FormDataLike { + /** + * Appends a new value onto an existing key inside a FormData object, + * or adds the key if it does not already exist. + * + * The difference between `set()` and `append()` is that if the specified key already exists, `set()` will overwrite all existing values with the new one, whereas `append()` will append the new value onto the end of the existing set of values. + * + * @param name The name of the field whose data is contained in `value`. + * @param value The field's value. This can be [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) + or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). If none of these are specified the value is converted to a string. + * @param fileName The filename reported to the server, when a Blob or File is passed as the second parameter. The default filename for Blob objects is "blob". The default filename for File objects is the file's filename. + */ + append(name: string, value: unknown, fileName?: string): void; + /** + * Returns all the values associated with a given key from within a `FormData` object. + * + * @param {string} name A name of the value you want to retrieve. + * + * @returns An array of `FormDataEntryValue` whose key matches the value passed in the `name` parameter. If the key doesn't exist, the method returns an empty list. + */ + getAll(name: string): FormDataEntryValue[]; + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through the `FormData` key/value pairs. + * The key of each pair is a string; the value is a [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue). + */ + entries(): Generator<[string, FormDataEntryValue]>; + /** + * An alias for FormDataLike#entries() + */ + [Symbol.iterator](): Generator<[string, FormDataEntryValue]>; + readonly [Symbol.toStringTag]: string; +} diff --git a/node_modules/form-data-encoder/@type/index.d.ts b/node_modules/form-data-encoder/@type/index.d.ts new file mode 100644 index 0000000..79901ac --- /dev/null +++ b/node_modules/form-data-encoder/@type/index.d.ts @@ -0,0 +1,5 @@ +export * from "./FormDataEncoder"; +export * from "./FileLike"; +export * from "./FormDataLike"; +export * from "./util/isFileLike"; +export * from "./util/isFormData"; diff --git a/node_modules/form-data-encoder/@type/util/createBoundary.d.ts b/node_modules/form-data-encoder/@type/util/createBoundary.d.ts new file mode 100644 index 0000000..4f10906 --- /dev/null +++ b/node_modules/form-data-encoder/@type/util/createBoundary.d.ts @@ -0,0 +1,13 @@ +/** + * Generates a boundary string for FormData encoder. + * + * @api private + * + * ```js + * import createBoundary from "./util/createBoundary" + * + * createBoundary() // -> n2vw38xdagaq6lrv + * ``` + */ +declare function createBoundary(): string; +export default createBoundary; diff --git a/node_modules/form-data-encoder/@type/util/escapeName.d.ts b/node_modules/form-data-encoder/@type/util/escapeName.d.ts new file mode 100644 index 0000000..8f1e782 --- /dev/null +++ b/node_modules/form-data-encoder/@type/util/escapeName.d.ts @@ -0,0 +1,11 @@ +/** + * Escape fieldname following the spec requirements. + * + * See: https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#multipart-form-data + * + * @param name A fieldname to escape + * + * @api private + */ +declare const escapeName: (name: unknown) => string; +export default escapeName; diff --git a/node_modules/form-data-encoder/@type/util/isFileLike.d.ts b/node_modules/form-data-encoder/@type/util/isFileLike.d.ts new file mode 100644 index 0000000..26c128e --- /dev/null +++ b/node_modules/form-data-encoder/@type/util/isFileLike.d.ts @@ -0,0 +1,28 @@ +import { FileLike } from "../FileLike"; +/** + * Check if given object is `File`. + * + * Note that this function will return `false` for Blob, because the FormDataEncoder expects FormData to return File when a value is binary data. + * + * @param value an object to test + * + * @api public + * + * This function will return `true` for FileAPI compatible `File` objects: + * + * ``` + * import {isFileLike} from "form-data-encoder" + * + * isFileLike(new File(["Content"], "file.txt")) // -> true + * ``` + * + * However, if you pass a Node.js `Buffer` or `ReadStream`, it will return `false`: + * + * ```js + * import {isFileLike} from "form-data-encoder" + * + * isFileLike(Buffer.from("Content")) // -> false + * isFileLike(fs.createReadStream("path/to/a/file.txt")) // -> false + * ``` + */ +export declare const isFileLike: (value?: unknown) => value is FileLike; diff --git a/node_modules/form-data-encoder/@type/util/isFormData.d.ts b/node_modules/form-data-encoder/@type/util/isFormData.d.ts new file mode 100644 index 0000000..76f78d9 --- /dev/null +++ b/node_modules/form-data-encoder/@type/util/isFormData.d.ts @@ -0,0 +1,15 @@ +import { FormDataLike } from "../FormDataLike"; +/** + * Check if given object is FormData + * + * @param value an object to test + */ +export declare const isFormData: (value?: unknown) => value is FormDataLike; +/** + * Check if given object is FormData + * + * @param value an object to test + * + * @deprecated use `isFormData` instead. + */ +export declare const isFormDataLike: (value?: unknown) => value is FormDataLike; diff --git a/node_modules/form-data-encoder/@type/util/isFunction.d.ts b/node_modules/form-data-encoder/@type/util/isFunction.d.ts new file mode 100644 index 0000000..23caa34 --- /dev/null +++ b/node_modules/form-data-encoder/@type/util/isFunction.d.ts @@ -0,0 +1,7 @@ +/** + * Checks if given value is a function. + * + * @api private + */ +declare const isFunction: (value: unknown) => value is Function; +export default isFunction; diff --git a/node_modules/form-data-encoder/@type/util/isPlainObject.d.ts b/node_modules/form-data-encoder/@type/util/isPlainObject.d.ts new file mode 100644 index 0000000..531ad97 --- /dev/null +++ b/node_modules/form-data-encoder/@type/util/isPlainObject.d.ts @@ -0,0 +1,2 @@ +declare function isPlainObject(value: unknown): value is object; +export default isPlainObject; diff --git a/node_modules/form-data-encoder/@type/util/normalizeValue.d.ts b/node_modules/form-data-encoder/@type/util/normalizeValue.d.ts new file mode 100644 index 0000000..b63d2c1 --- /dev/null +++ b/node_modules/form-data-encoder/@type/util/normalizeValue.d.ts @@ -0,0 +1,11 @@ +/** + * Normalize non-File value following the spec requirements. + * + * See: https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#multipart-form-data + * + * @param value A value to normalize + * + * @api private + */ +declare const normalizeValue: (value: unknown) => string; +export default normalizeValue; diff --git a/node_modules/form-data-encoder/lib/cjs/FileLike.js b/node_modules/form-data-encoder/lib/cjs/FileLike.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/FileLike.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/form-data-encoder/lib/cjs/FormDataEncoder.js b/node_modules/form-data-encoder/lib/cjs/FormDataEncoder.js new file mode 100644 index 0000000..03d33c4 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/FormDataEncoder.js @@ -0,0 +1,126 @@ +"use strict"; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _FormDataEncoder_instances, _FormDataEncoder_CRLF, _FormDataEncoder_CRLF_BYTES, _FormDataEncoder_CRLF_BYTES_LENGTH, _FormDataEncoder_DASHES, _FormDataEncoder_encoder, _FormDataEncoder_footer, _FormDataEncoder_form, _FormDataEncoder_options, _FormDataEncoder_getFieldHeader; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Encoder = exports.FormDataEncoder = void 0; +const createBoundary_1 = __importDefault(require("./util/createBoundary")); +const isPlainObject_1 = __importDefault(require("./util/isPlainObject")); +const normalizeValue_1 = __importDefault(require("./util/normalizeValue")); +const escapeName_1 = __importDefault(require("./util/escapeName")); +const isFileLike_1 = require("./util/isFileLike"); +const isFormData_1 = require("./util/isFormData"); +const defaultOptions = { + enableAdditionalHeaders: false +}; +class FormDataEncoder { + constructor(form, boundaryOrOptions, options) { + _FormDataEncoder_instances.add(this); + _FormDataEncoder_CRLF.set(this, "\r\n"); + _FormDataEncoder_CRLF_BYTES.set(this, void 0); + _FormDataEncoder_CRLF_BYTES_LENGTH.set(this, void 0); + _FormDataEncoder_DASHES.set(this, "-".repeat(2)); + _FormDataEncoder_encoder.set(this, new TextEncoder()); + _FormDataEncoder_footer.set(this, void 0); + _FormDataEncoder_form.set(this, void 0); + _FormDataEncoder_options.set(this, void 0); + if (!(0, isFormData_1.isFormData)(form)) { + throw new TypeError("Expected first argument to be a FormData instance."); + } + let boundary; + if ((0, isPlainObject_1.default)(boundaryOrOptions)) { + options = boundaryOrOptions; + } + else { + boundary = boundaryOrOptions; + } + if (!boundary) { + boundary = (0, createBoundary_1.default)(); + } + if (typeof boundary !== "string") { + throw new TypeError("Expected boundary argument to be a string."); + } + if (options && !(0, isPlainObject_1.default)(options)) { + throw new TypeError("Expected options argument to be an object."); + } + __classPrivateFieldSet(this, _FormDataEncoder_form, form, "f"); + __classPrivateFieldSet(this, _FormDataEncoder_options, { ...defaultOptions, ...options }, "f"); + __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES, __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")), "f"); + __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f").byteLength, "f"); + this.boundary = `form-data-boundary-${boundary}`; + this.contentType = `multipart/form-data; boundary=${this.boundary}`; + __classPrivateFieldSet(this, _FormDataEncoder_footer, __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`), "f"); + this.contentLength = String(this.getContentLength()); + this.headers = Object.freeze({ + "Content-Type": this.contentType, + "Content-Length": this.contentLength + }); + Object.defineProperties(this, { + boundary: { writable: false, configurable: false }, + contentType: { writable: false, configurable: false }, + contentLength: { writable: false, configurable: false }, + headers: { writable: false, configurable: false } + }); + } + getContentLength() { + let length = 0; + for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, "f")) { + const value = (0, isFileLike_1.isFileLike)(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode((0, normalizeValue_1.default)(raw)); + length += __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value).byteLength; + length += (0, isFileLike_1.isFileLike)(value) ? value.size : value.byteLength; + length += __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, "f"); + } + return length + __classPrivateFieldGet(this, _FormDataEncoder_footer, "f").byteLength; + } + *values() { + for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, "f").entries()) { + const value = (0, isFileLike_1.isFileLike)(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode((0, normalizeValue_1.default)(raw)); + yield __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value); + yield value; + yield __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f"); + } + yield __classPrivateFieldGet(this, _FormDataEncoder_footer, "f"); + } + async *encode() { + for (const part of this.values()) { + if ((0, isFileLike_1.isFileLike)(part)) { + yield* part.stream(); + } + else { + yield part; + } + } + } + [(_FormDataEncoder_CRLF = new WeakMap(), _FormDataEncoder_CRLF_BYTES = new WeakMap(), _FormDataEncoder_CRLF_BYTES_LENGTH = new WeakMap(), _FormDataEncoder_DASHES = new WeakMap(), _FormDataEncoder_encoder = new WeakMap(), _FormDataEncoder_footer = new WeakMap(), _FormDataEncoder_form = new WeakMap(), _FormDataEncoder_options = new WeakMap(), _FormDataEncoder_instances = new WeakSet(), _FormDataEncoder_getFieldHeader = function _FormDataEncoder_getFieldHeader(name, value) { + let header = ""; + header += `${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; + header += `Content-Disposition: form-data; name="${(0, escapeName_1.default)(name)}"`; + if ((0, isFileLike_1.isFileLike)(value)) { + header += `; filename="${(0, escapeName_1.default)(value.name)}"${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; + header += `Content-Type: ${value.type || "application/octet-stream"}`; + } + if (__classPrivateFieldGet(this, _FormDataEncoder_options, "f").enableAdditionalHeaders === true) { + header += `${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}Content-Length: ${(0, isFileLike_1.isFileLike)(value) ? value.size : value.byteLength}`; + } + return __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${header}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`); + }, Symbol.iterator)]() { + return this.values(); + } + [Symbol.asyncIterator]() { + return this.encode(); + } +} +exports.FormDataEncoder = FormDataEncoder; +exports.Encoder = FormDataEncoder; diff --git a/node_modules/form-data-encoder/lib/cjs/FormDataLike.js b/node_modules/form-data-encoder/lib/cjs/FormDataLike.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/FormDataLike.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/form-data-encoder/lib/cjs/index.js b/node_modules/form-data-encoder/lib/cjs/index.js new file mode 100644 index 0000000..270e0b5 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/index.js @@ -0,0 +1,17 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./FormDataEncoder"), exports); +__exportStar(require("./FileLike"), exports); +__exportStar(require("./FormDataLike"), exports); +__exportStar(require("./util/isFileLike"), exports); +__exportStar(require("./util/isFormData"), exports); diff --git a/node_modules/form-data-encoder/lib/cjs/package.json b/node_modules/form-data-encoder/lib/cjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/form-data-encoder/lib/cjs/util/createBoundary.js b/node_modules/form-data-encoder/lib/cjs/util/createBoundary.js new file mode 100644 index 0000000..5d8ada6 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/util/createBoundary.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"; +function createBoundary() { + let size = 16; + let res = ""; + while (size--) { + res += alphabet[(Math.random() * alphabet.length) << 0]; + } + return res; +} +exports.default = createBoundary; diff --git a/node_modules/form-data-encoder/lib/cjs/util/escapeName.js b/node_modules/form-data-encoder/lib/cjs/util/escapeName.js new file mode 100644 index 0000000..975a1e3 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/util/escapeName.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const escapeName = (name) => String(name) + .replace(/\r/g, "%0D") + .replace(/\n/g, "%0A") + .replace(/"/g, "%22"); +exports.default = escapeName; diff --git a/node_modules/form-data-encoder/lib/cjs/util/isFileLike.js b/node_modules/form-data-encoder/lib/cjs/util/isFileLike.js new file mode 100644 index 0000000..4dde025 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/util/isFileLike.js @@ -0,0 +1,16 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isFileLike = void 0; +const isFunction_1 = __importDefault(require("./isFunction")); +const isFileLike = (value) => Boolean(value + && typeof value === "object" + && (0, isFunction_1.default)(value.constructor) + && value[Symbol.toStringTag] === "File" + && (0, isFunction_1.default)(value.stream) + && value.name != null + && value.size != null + && value.lastModified != null); +exports.isFileLike = isFileLike; diff --git a/node_modules/form-data-encoder/lib/cjs/util/isFormData.js b/node_modules/form-data-encoder/lib/cjs/util/isFormData.js new file mode 100644 index 0000000..160a4d2 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/util/isFormData.js @@ -0,0 +1,16 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isFormDataLike = exports.isFormData = void 0; +const isFunction_1 = __importDefault(require("./isFunction")); +const isFormData = (value) => Boolean(value + && (0, isFunction_1.default)(value.constructor) + && value[Symbol.toStringTag] === "FormData" + && (0, isFunction_1.default)(value.append) + && (0, isFunction_1.default)(value.getAll) + && (0, isFunction_1.default)(value.entries) + && (0, isFunction_1.default)(value[Symbol.iterator])); +exports.isFormData = isFormData; +exports.isFormDataLike = exports.isFormData; diff --git a/node_modules/form-data-encoder/lib/cjs/util/isFunction.js b/node_modules/form-data-encoder/lib/cjs/util/isFunction.js new file mode 100644 index 0000000..05d5e1b --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/util/isFunction.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const isFunction = (value) => (typeof value === "function"); +exports.default = isFunction; diff --git a/node_modules/form-data-encoder/lib/cjs/util/isPlainObject.js b/node_modules/form-data-encoder/lib/cjs/util/isPlainObject.js new file mode 100644 index 0000000..ec54623 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/util/isPlainObject.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase()); +function isPlainObject(value) { + if (getType(value) !== "object") { + return false; + } + const pp = Object.getPrototypeOf(value); + if (pp === null || pp === undefined) { + return true; + } + const Ctor = pp.constructor && pp.constructor.toString(); + return Ctor === Object.toString(); +} +exports.default = isPlainObject; diff --git a/node_modules/form-data-encoder/lib/cjs/util/normalizeValue.js b/node_modules/form-data-encoder/lib/cjs/util/normalizeValue.js new file mode 100644 index 0000000..3e0eeb4 --- /dev/null +++ b/node_modules/form-data-encoder/lib/cjs/util/normalizeValue.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const normalizeValue = (value) => String(value) + .replace(/\r|\n/g, (match, i, str) => { + if ((match === "\r" && str[i + 1] !== "\n") + || (match === "\n" && str[i - 1] !== "\r")) { + return "\r\n"; + } + return match; +}); +exports.default = normalizeValue; diff --git a/node_modules/form-data-encoder/lib/esm/FileLike.js b/node_modules/form-data-encoder/lib/esm/FileLike.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/FileLike.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/form-data-encoder/lib/esm/FormDataEncoder.js b/node_modules/form-data-encoder/lib/esm/FormDataEncoder.js new file mode 100644 index 0000000..bdfa262 --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/FormDataEncoder.js @@ -0,0 +1,119 @@ +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _FormDataEncoder_instances, _FormDataEncoder_CRLF, _FormDataEncoder_CRLF_BYTES, _FormDataEncoder_CRLF_BYTES_LENGTH, _FormDataEncoder_DASHES, _FormDataEncoder_encoder, _FormDataEncoder_footer, _FormDataEncoder_form, _FormDataEncoder_options, _FormDataEncoder_getFieldHeader; +import createBoundary from "./util/createBoundary.js"; +import isPlainObject from "./util/isPlainObject.js"; +import normalize from "./util/normalizeValue.js"; +import escape from "./util/escapeName.js"; +import { isFileLike } from "./util/isFileLike.js"; +import { isFormData } from "./util/isFormData.js"; +const defaultOptions = { + enableAdditionalHeaders: false +}; +export class FormDataEncoder { + constructor(form, boundaryOrOptions, options) { + _FormDataEncoder_instances.add(this); + _FormDataEncoder_CRLF.set(this, "\r\n"); + _FormDataEncoder_CRLF_BYTES.set(this, void 0); + _FormDataEncoder_CRLF_BYTES_LENGTH.set(this, void 0); + _FormDataEncoder_DASHES.set(this, "-".repeat(2)); + _FormDataEncoder_encoder.set(this, new TextEncoder()); + _FormDataEncoder_footer.set(this, void 0); + _FormDataEncoder_form.set(this, void 0); + _FormDataEncoder_options.set(this, void 0); + if (!isFormData(form)) { + throw new TypeError("Expected first argument to be a FormData instance."); + } + let boundary; + if (isPlainObject(boundaryOrOptions)) { + options = boundaryOrOptions; + } + else { + boundary = boundaryOrOptions; + } + if (!boundary) { + boundary = createBoundary(); + } + if (typeof boundary !== "string") { + throw new TypeError("Expected boundary argument to be a string."); + } + if (options && !isPlainObject(options)) { + throw new TypeError("Expected options argument to be an object."); + } + __classPrivateFieldSet(this, _FormDataEncoder_form, form, "f"); + __classPrivateFieldSet(this, _FormDataEncoder_options, { ...defaultOptions, ...options }, "f"); + __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES, __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")), "f"); + __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f").byteLength, "f"); + this.boundary = `form-data-boundary-${boundary}`; + this.contentType = `multipart/form-data; boundary=${this.boundary}`; + __classPrivateFieldSet(this, _FormDataEncoder_footer, __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`), "f"); + this.contentLength = String(this.getContentLength()); + this.headers = Object.freeze({ + "Content-Type": this.contentType, + "Content-Length": this.contentLength + }); + Object.defineProperties(this, { + boundary: { writable: false, configurable: false }, + contentType: { writable: false, configurable: false }, + contentLength: { writable: false, configurable: false }, + headers: { writable: false, configurable: false } + }); + } + getContentLength() { + let length = 0; + for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, "f")) { + const value = isFileLike(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(normalize(raw)); + length += __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value).byteLength; + length += isFileLike(value) ? value.size : value.byteLength; + length += __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, "f"); + } + return length + __classPrivateFieldGet(this, _FormDataEncoder_footer, "f").byteLength; + } + *values() { + for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, "f").entries()) { + const value = isFileLike(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(normalize(raw)); + yield __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value); + yield value; + yield __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f"); + } + yield __classPrivateFieldGet(this, _FormDataEncoder_footer, "f"); + } + async *encode() { + for (const part of this.values()) { + if (isFileLike(part)) { + yield* part.stream(); + } + else { + yield part; + } + } + } + [(_FormDataEncoder_CRLF = new WeakMap(), _FormDataEncoder_CRLF_BYTES = new WeakMap(), _FormDataEncoder_CRLF_BYTES_LENGTH = new WeakMap(), _FormDataEncoder_DASHES = new WeakMap(), _FormDataEncoder_encoder = new WeakMap(), _FormDataEncoder_footer = new WeakMap(), _FormDataEncoder_form = new WeakMap(), _FormDataEncoder_options = new WeakMap(), _FormDataEncoder_instances = new WeakSet(), _FormDataEncoder_getFieldHeader = function _FormDataEncoder_getFieldHeader(name, value) { + let header = ""; + header += `${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; + header += `Content-Disposition: form-data; name="${escape(name)}"`; + if (isFileLike(value)) { + header += `; filename="${escape(value.name)}"${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; + header += `Content-Type: ${value.type || "application/octet-stream"}`; + } + if (__classPrivateFieldGet(this, _FormDataEncoder_options, "f").enableAdditionalHeaders === true) { + header += `${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}Content-Length: ${isFileLike(value) ? value.size : value.byteLength}`; + } + return __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${header}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`); + }, Symbol.iterator)]() { + return this.values(); + } + [Symbol.asyncIterator]() { + return this.encode(); + } +} +export const Encoder = FormDataEncoder; diff --git a/node_modules/form-data-encoder/lib/esm/FormDataLike.js b/node_modules/form-data-encoder/lib/esm/FormDataLike.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/FormDataLike.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/form-data-encoder/lib/esm/index.js b/node_modules/form-data-encoder/lib/esm/index.js new file mode 100644 index 0000000..2a286c9 --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/index.js @@ -0,0 +1,5 @@ +export * from "./FormDataEncoder.js"; +export * from "./FileLike.js"; +export * from "./FormDataLike.js"; +export * from "./util/isFileLike.js"; +export * from "./util/isFormData.js"; diff --git a/node_modules/form-data-encoder/lib/esm/package.json b/node_modules/form-data-encoder/lib/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/form-data-encoder/lib/esm/util/createBoundary.js b/node_modules/form-data-encoder/lib/esm/util/createBoundary.js new file mode 100644 index 0000000..6af4a96 --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/util/createBoundary.js @@ -0,0 +1,10 @@ +const alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"; +function createBoundary() { + let size = 16; + let res = ""; + while (size--) { + res += alphabet[(Math.random() * alphabet.length) << 0]; + } + return res; +} +export default createBoundary; diff --git a/node_modules/form-data-encoder/lib/esm/util/escapeName.js b/node_modules/form-data-encoder/lib/esm/util/escapeName.js new file mode 100644 index 0000000..aac6066 --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/util/escapeName.js @@ -0,0 +1,5 @@ +const escapeName = (name) => String(name) + .replace(/\r/g, "%0D") + .replace(/\n/g, "%0A") + .replace(/"/g, "%22"); +export default escapeName; diff --git a/node_modules/form-data-encoder/lib/esm/util/isFileLike.js b/node_modules/form-data-encoder/lib/esm/util/isFileLike.js new file mode 100644 index 0000000..e38cf7b --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/util/isFileLike.js @@ -0,0 +1,9 @@ +import isFunction from "./isFunction.js"; +export const isFileLike = (value) => Boolean(value + && typeof value === "object" + && isFunction(value.constructor) + && value[Symbol.toStringTag] === "File" + && isFunction(value.stream) + && value.name != null + && value.size != null + && value.lastModified != null); diff --git a/node_modules/form-data-encoder/lib/esm/util/isFormData.js b/node_modules/form-data-encoder/lib/esm/util/isFormData.js new file mode 100644 index 0000000..04d1a7c --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/util/isFormData.js @@ -0,0 +1,9 @@ +import isFunction from "./isFunction.js"; +export const isFormData = (value) => Boolean(value + && isFunction(value.constructor) + && value[Symbol.toStringTag] === "FormData" + && isFunction(value.append) + && isFunction(value.getAll) + && isFunction(value.entries) + && isFunction(value[Symbol.iterator])); +export const isFormDataLike = isFormData; diff --git a/node_modules/form-data-encoder/lib/esm/util/isFunction.js b/node_modules/form-data-encoder/lib/esm/util/isFunction.js new file mode 100644 index 0000000..19ccca7 --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/util/isFunction.js @@ -0,0 +1,2 @@ +const isFunction = (value) => (typeof value === "function"); +export default isFunction; diff --git a/node_modules/form-data-encoder/lib/esm/util/isPlainObject.js b/node_modules/form-data-encoder/lib/esm/util/isPlainObject.js new file mode 100644 index 0000000..8147580 --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/util/isPlainObject.js @@ -0,0 +1,13 @@ +const getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase()); +function isPlainObject(value) { + if (getType(value) !== "object") { + return false; + } + const pp = Object.getPrototypeOf(value); + if (pp === null || pp === undefined) { + return true; + } + const Ctor = pp.constructor && pp.constructor.toString(); + return Ctor === Object.toString(); +} +export default isPlainObject; diff --git a/node_modules/form-data-encoder/lib/esm/util/normalizeValue.js b/node_modules/form-data-encoder/lib/esm/util/normalizeValue.js new file mode 100644 index 0000000..d675188 --- /dev/null +++ b/node_modules/form-data-encoder/lib/esm/util/normalizeValue.js @@ -0,0 +1,9 @@ +const normalizeValue = (value) => String(value) + .replace(/\r|\n/g, (match, i, str) => { + if ((match === "\r" && str[i + 1] !== "\n") + || (match === "\n" && str[i - 1] !== "\r")) { + return "\r\n"; + } + return match; +}); +export default normalizeValue; diff --git a/node_modules/form-data-encoder/license b/node_modules/form-data-encoder/license new file mode 100644 index 0000000..0c8fa88 --- /dev/null +++ b/node_modules/form-data-encoder/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2021-present Nick K. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/form-data-encoder/package.json b/node_modules/form-data-encoder/package.json new file mode 100644 index 0000000..898b8b2 --- /dev/null +++ b/node_modules/form-data-encoder/package.json @@ -0,0 +1,63 @@ +{ + "name": "form-data-encoder", + "version": "1.7.2", + "description": "Encode FormData content into the multipart/form-data format", + "repository": "octet-stream/form-data-encoder", + "sideEffects": false, + "keywords": [ + "form-data", + "encoder", + "multipart", + "files-upload", + "async-iterator", + "spec-compatible", + "form" + ], + "main": "./lib/cjs/index.js", + "module": "./lib/esm/index.js", + "types": "./@type/index.d.ts", + "exports": { + "import": "./lib/esm/index.js", + "require": "./lib/cjs/index.js" + }, + "scripts": { + "eslint": "eslint lib/**/*.ts", + "staged": "lint-staged", + "coverage": "c8 npm test", + "ci": "c8 npm test && c8 report --reporter=json", + "build:esm": "ttsc --project tsconfig.esm.json", + "build:cjs": "ttsc --project tsconfig.cjs.json", + "build:types": "ttsc --project tsconfig.d.ts.json", + "build": "npm run build:esm && npm run build:cjs && npm run build:types", + "test": "ava --fail-fast", + "cleanup": "npx rimraf @type \"lib/**/*.js\"", + "prepare": "npm run cleanup && npm run build", + "_postinstall": "husky install", + "prepublishOnly": "pinst --disable", + "postpublish": "pinst --enable" + }, + "author": "Nick K.", + "license": "MIT", + "devDependencies": { + "@octetstream/eslint-config": "5.0.0", + "@types/mime-types": "2.1.1", + "@types/node": "17.0.21", + "@typescript-eslint/eslint-plugin": "4.33.0", + "@typescript-eslint/parser": "4.33.0", + "@zoltu/typescript-transformer-append-js-extension": "1.0.1", + "ava": "4.1.0", + "c8": "7.11.0", + "eslint": "7.32.0", + "eslint-config-airbnb-typescript": "12.3.1", + "eslint-plugin-ava": "12.0.0", + "eslint-plugin-jsx-a11y": "6.4.1", + "eslint-plugin-react": "7.26.1", + "formdata-node": "4.3.2", + "husky": "7.0.4", + "lint-staged": "12.3.7", + "pinst": "2.1.6", + "ts-node": "10.7.0", + "ttypescript": "1.5.13", + "typescript": "4.4.4" + } +} diff --git a/node_modules/form-data-encoder/readme.md b/node_modules/form-data-encoder/readme.md new file mode 100644 index 0000000..8995d6e --- /dev/null +++ b/node_modules/form-data-encoder/readme.md @@ -0,0 +1,366 @@ +# form-data-encoder + +Encode `FormData` content into the `multipart/form-data` format + +[![Code Coverage](https://codecov.io/github/octet-stream/form-data-encoder/coverage.svg?branch=master)](https://codecov.io/github/octet-stream/form-data-encoder?branch=master) +[![CI](https://github.com/octet-stream/form-data-encoder/workflows/CI/badge.svg)](https://github.com/octet-stream/form-data-encoder/actions/workflows/ci.yml) +[![ESLint](https://github.com/octet-stream/form-data-encoder/workflows/ESLint/badge.svg)](https://github.com/octet-stream/form-data-encoder/actions/workflows/eslint.yml) + +## Installation + +You can install this package using npm: + +```sh +npm install form-data-encoder +``` + +Or yarn: + +```sh +yarn add form-data-encoder +``` + +Or pnpm: + +```sh +pnpm add form-data-encoder +``` + +## ESM/CJS support + +This package is targeting ESM and CJS for backwards compatibility reasons and smoothen transition period while you convert your projects to ESM only. Note that CJS support will be removed as [Node.js v12 will reach its EOL](https://github.com/nodejs/release#release-schedule). This change will be released as major version update, so you won't miss it. + +## Usage + +1. To start the encoding process, you need to create a new Encoder instance with the FormData you want to encode: + +```js +import {Readable} from "stream" + +import {FormData, File} from "formdata-node" +import {FormDataEncoder} from "form-data-encoder" + +import fetch from "node-fetch" + +const form = new FormData() + +form.set("greeting", "Hello, World!") +form.set("file", new File(["On Soviet Moon landscape see binoculars through YOU"], "file.txt")) + +const encoder = new FormDataEncoder(form) + +const options = { + method: "post", + + // Set request headers provided by the Encoder. + // The `headers` property has `Content-Type` and `Content-Length` headers. + headers: encoder.headers, + + // Create a Readable stream from the Encoder. + // You can omit usage of `Readable.from` for HTTP clients whose support async iterables in request body. + // The Encoder will yield FormData content portions encoded into the multipart/form-data format as node-fetch consumes the stream. + body: Readable.from(encoder.encode()) // or just Readable.from(encoder) +} + +const response = await fetch("https://httpbin.org/post", options) + +console.log(await response.json()) +``` + +2. Encoder support different spec-compatible FormData implementations. Let's try it with [`formdata-polyfill`](https://github.com/jimmywarting/FormData): + +```js +import {Readable} from "stream" + +import {FormDataEncoder} from "form-data-encoder" +import {FormData} from "formdata-polyfill/esm-min.js" +import {File} from "fetch-blob" // v3 + +const form = new FormData() + +form.set("field", "Some value") +form.set("file", new File(["File content goes here"], "file.txt")) + +const encoder = new FormDataEncoder(form) + +const options = { + method: "post", + headers: encoder.headers, + body: Readable.from(encoder) +} + +await fetch("https://httpbin.org/post", options) +``` + +3. Because the Encoder is iterable (it has both Symbol.asyncIterator and Symbol.iterator methods), you can use it with different targets. Let's say you want to convert FormData content into `Blob`, for that you can write a function like this: + +```js +import {Readable} from "stream" + +import {FormDataEncoder} from "form-data-encoder" + +import {FormData, File, Blob, fileFromPath} from "formdata-node" + +import fetch from "node-fetch" + +const form = new FormData() + +form.set("field", "Just a random string") +form.set("file", new File(["Using files is class amazing"], "file.txt")) +form.set("fileFromPath", await fileFromPath("path/to/a/file.txt")) + +// Note 1: When using with native Blob or fetch-blob@2 you might also need to generate boundary string for your FormDataEncoder instance +// because Blob will lowercase value of the `type` option and default boundary generator produces a string with both lower and upper cased alphabetical characters. Math.random() should be enough to fix this: +// const encoder = new FormDataEncoder(form, String(Math.random())) +const encoder = new FormDataEncoder(form) + +const options = { + method: "post", + + // Note 2: To use this approach with fetch-blob@2 you probably gonna need to convert the encoder parts output to an array first: + // new Blob([...encoder], {type: encoder.contentType}) + body: new Blob(encoder, {type: encoder.contentType}) +} + +const response = await fetch("https://httpbin.org/post", options) + +console.log(await response.json()) +``` + +4. Here's FormData to Blob conversion with async-iterator approach: + +```js +import {FormData} from "formdata-polyfill/esm-min.js" +import {blobFrom} from "fetch-blob/from.js" +import {FormDataEncoder} from "form-data-encoder" + +import Blob from "fetch-blob" +import fetch from "node-fetch" + +// This approach may require much more RAM compared to the previous one, but it works too. +async function toBlob(form) { + const encoder = new Encoder(form) + const chunks = [] + + for await (const chunk of encoder) { + chunks.push(chunk) + } + + return new Blob(chunks, {type: encoder.contentType}) +} + +const form = new FormData() + +form.set("name", "John Doe") +form.set("avatar", await blobFrom("path/to/an/avatar.png"), "avatar.png") + +const options = { + method: "post", + body: await toBlob(form) +} + +await fetch("https://httpbin.org/post", options) +``` + +5. Another way to convert FormData parts to blob using `form-data-encoder` is making a Blob-ish class: + +```js +import {Readable} from "stream" + +import {FormDataEncoder} from "form-data-encoder" +import {FormData} from "formdata-polyfill/esm-min.js" +import {blobFrom} from "fetch-blob/from.js" + +import Blob from "fetch-blob" +import fetch from "node-fetch" + +class BlobDataItem { + constructor(encoder) { + this.#encoder = encoder + this.#size = encoder.headers["Content-Length"] + this.#type = encoder.headers["Content-Type"] + } + + get type() { + return this.#type + } + + get size() { + return this.#size + } + + stream() { + return Readable.from(this.#encoder) + } + + get [Symbol.toStringTag]() { + return "Blob" + } +} + +const form = new FormData() + +form.set("name", "John Doe") +form.set("avatar", await blobFrom("path/to/an/avatar.png"), "avatar.png") + +const encoder = new FormDataEncoder(form) + +// Note that node-fetch@2 performs more strictness tests for Blob objects, so you may need to do extra steps before you set up request body (like, maybe you'll need to instaniate a Blob with BlobDataItem as one of its blobPart) +const blob = new BlobDataItem(enocoder) // or new Blob([new BlobDataItem(enocoder)], {type: encoder.contentType}) + +const options = { + method: "post", + body: blob +} + +await fetch("https://httpbin.org/post", options) +``` + +6. In this example we will pull FormData content into the ReadableStream: + +```js + // This module is only necessary when you targeting Node.js or need web streams that implement Symbol.asyncIterator +import {ReadableStream} from "web-streams-polyfill/ponyfill/es2018" + +import {FormDataEncoder} from "form-data-encoder" +import {FormData} from "formdata-node" + +import fetch from "node-fetch" + +function toReadableStream(encoder) { + const iterator = encoder.encode() + + return new ReadableStream({ + async pull(controller) { + const {value, done} = await iterator.next() + + if (done) { + return controller.close() + } + + controller.enqueue(value) + } + }) +} + +const form = new FormData() + +form.set("field", "My hovercraft is full of eels") + +const encoder = new FormDataEncoder(form) + +const options = { + method: "post", + headers: encoder.headers, + body: toReadableStream(encoder) +} + +// Note that this example requires `fetch` to support Symbol.asyncIterator, which node-fetch lacks of (but will support eventually) +await fetch("https://httpbin.org/post", options) +``` + +7. Speaking of async iterables - if HTTP client supports them, you can use encoder like this: + +```js +import {FormDataEncoder} from "form-data-encoder" +import {FormData} from "formdata-node" + +import fetch from "node-fetch" + +const form = new FormData() + +form.set("field", "My hovercraft is full of eels") + +const encoder = new FormDataEncoder(form) + +const options = { + method: "post", + headers: encoder.headers, + body: encoder +} + +await fetch("https://httpbin.org/post", options) +``` + +8. ...And for those client whose supporting form-data-encoder out of the box, the usage will be much, much more simpler: + +```js +import {FormData} from "formdata-node" // Or any other spec-compatible implementation + +import fetch from "node-fetch" + +const form = new FormData() + +form.set("field", "My hovercraft is full of eels") + +const options = { + method: "post", + body: form +} + +// Note that node-fetch does NOT support form-data-encoder +await fetch("https://httpbin.org/post", options) +``` + +## API + +### `class FormDataEncoder` + +##### `constructor(form[, boundary, options]) -> {Encoder}` + + - **{FormDataLike}** form - FormData object to encode. This object must be a spec-compatible FormData implementation. + - **{string}** [boundary] - An optional boundary string that will be used by the encoder. If there's no boundary string is present, Encoder will generate it automatically. + - **{object}** [options] - Encoder options. + - **{boolean}** [options.enableAdditionalHeaders = false] - When enabled, the encoder will emit additional per part headers, such as `Content-Length`. Please note that the web clients do not include these, so when enabled this option might cause an error if `multipart/form-data` does not consider additional headers. + +Creates a multipart/form-data encoder. + +#### Instance properties + +##### `boundary -> {string}` + +Returns boundary string. + +##### `contentType -> {string}` + +Returns Content-Type header. + +##### `contentLength -> {string}` + +Return Content-Length header. + +##### `headers -> {object}` + +Returns headers object with Content-Type and Content-Length header. + +#### Instance methods + +##### `values() -> {Generator}` + +Creates an iterator allowing to go through form-data parts (with metadata). +This method **will not** read the files. + +##### `encode() -> {AsyncGenerator}` + +Creates an async iterator allowing to perform the encoding by portions. +This method **will** also read files. + +##### `[Symbol.iterator]() -> {Generator}` + +An alias for `Encoder#values()` method. + +##### `[Symbol.asyncIterator]() -> {AsyncGenerator}` + +An alias for `Encoder#encode()` method. + +### `isFileLike(value) -> {boolean}` + +Check if a value is File-ish object. + + - **{unknown}** value - a value to test + +### `isFormDataLike(value) -> {boolean}` + +Check if a value is FormData-ish object. + + - **{unknown}** value - a value to test diff --git a/node_modules/form-data/License b/node_modules/form-data/License new file mode 100644 index 0000000..c7ff12a --- /dev/null +++ b/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/form-data/Readme.md b/node_modules/form-data/Readme.md new file mode 100644 index 0000000..b09df6a --- /dev/null +++ b/node_modules/form-data/Readme.md @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/master.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/index.d.ts b/node_modules/form-data/index.d.ts new file mode 100644 index 0000000..295e9e9 --- /dev/null +++ b/node_modules/form-data/index.d.ts @@ -0,0 +1,62 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +// Extracted because @types/node doesn't export interfaces. +interface ReadableOptions { + highWaterMark?: number; + encoding?: string; + objectMode?: boolean; + read?(this: stream.Readable, size: number): void; + destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean; +} + +interface Options extends ReadableOptions { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(userHeaders?: FormData.Headers): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + setBoundary(boundary: string): void; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/form-data/lib/browser.js b/node_modules/form-data/lib/browser.js new file mode 100644 index 0000000..09e7c70 --- /dev/null +++ b/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/form-data/lib/form_data.js b/node_modules/form-data/lib/form_data.js new file mode 100644 index 0000000..e05c8f1 --- /dev/null +++ b/node_modules/form-data/lib/form_data.js @@ -0,0 +1,501 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var Stream = require('stream').Stream; +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (Array.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } + + // add content length + if (length) { + request.setHeader('Content-Length', length); + } + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/form-data/lib/populate.js b/node_modules/form-data/lib/populate.js new file mode 100644 index 0000000..4d35738 --- /dev/null +++ b/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/form-data/package.json b/node_modules/form-data/package.json new file mode 100644 index 0000000..9bd12c9 --- /dev/null +++ b/node_modules/form-data/package.json @@ -0,0 +1,71 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "4.0.1", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "npm run lint", + "pretests-only": "rimraf coverage test/tmp", + "tests-only": "istanbul cover test/run.js", + "posttests-only": "istanbul report lcov text", + "test": "npm run tests-only", + "posttest": "npx npm@'>=10.2' audit --production", + "lint": "eslint --ext=js,mjs .", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run tests-only && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 6" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "puppeteer": "^1.19.0", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/formdata-node/@type/Blob.d.ts b/node_modules/formdata-node/@type/Blob.d.ts new file mode 100644 index 0000000..664b60a --- /dev/null +++ b/node_modules/formdata-node/@type/Blob.d.ts @@ -0,0 +1,66 @@ +/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */ +import { ReadableStream } from "web-streams-polyfill"; +/** + * Reflects minimal valid Blob for BlobParts. + */ +export interface BlobLike { + type: string; + size: number; + slice(start?: number, end?: number, contentType?: string): BlobLike; + arrayBuffer(): Promise; + [Symbol.toStringTag]: string; +} +export declare type BlobParts = unknown[] | Iterable; +export interface BlobPropertyBag { + /** + * The [`MIME type`](https://developer.mozilla.org/en-US/docs/Glossary/MIME_type) of the data that will be stored into the blob. + * The default value is the empty string, (`""`). + */ + type?: string; +} +/** + * The **Blob** object represents a blob, which is a file-like object of immutable, raw data; + * they can be read as text or binary data, or converted into a ReadableStream + * so its methods can be used for processing the data. + */ +export declare class Blob { + #private; + static [Symbol.hasInstance](value: unknown): value is Blob; + /** + * Returns a new [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) object. + * The content of the blob consists of the concatenation of the values given in the parameter array. + * + * @param blobParts An `Array` strings, or [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer), [`ArrayBufferView`](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView), [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects, or a mix of any of such objects, that will be put inside the [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob). + * @param options An optional object of type `BlobPropertyBag`. + */ + constructor(blobParts?: BlobParts, options?: BlobPropertyBag); + /** + * Returns the [`MIME type`](https://developer.mozilla.org/en-US/docs/Glossary/MIME_type) of the [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). + */ + get type(): string; + /** + * Returns the size of the [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) in bytes. + */ + get size(): number; + /** + * Creates and returns a new [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) object which contains data from a subset of the blob on which it's called. + * + * @param start An index into the Blob indicating the first byte to include in the new Blob. If you specify a negative value, it's treated as an offset from the end of the Blob toward the beginning. For example, -10 would be the 10th from last byte in the Blob. The default value is 0. If you specify a value for start that is larger than the size of the source Blob, the returned Blob has size 0 and contains no data. + * @param end An index into the Blob indicating the first byte that will *not* be included in the new Blob (i.e. the byte exactly at this index is not included). If you specify a negative value, it's treated as an offset from the end of the Blob toward the beginning. For example, -10 would be the 10th from last byte in the Blob. The default value is size. + * @param contentType The content type to assign to the new Blob; this will be the value of its type property. The default value is an empty string. + */ + slice(start?: number, end?: number, contentType?: string): Blob; + /** + * Returns a [`Promise`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) that resolves with a string containing the contents of the blob, interpreted as UTF-8. + */ + text(): Promise; + /** + * Returns a [`Promise`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) that resolves with the contents of the blob as binary data contained in an [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). + */ + arrayBuffer(): Promise; + /** + * Returns a [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) which upon reading returns the data contained within the [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob). + */ + stream(): ReadableStream; + get [Symbol.toStringTag](): string; +} diff --git a/node_modules/formdata-node/@type/BlobPart.d.ts b/node_modules/formdata-node/@type/BlobPart.d.ts new file mode 100644 index 0000000..beca832 --- /dev/null +++ b/node_modules/formdata-node/@type/BlobPart.d.ts @@ -0,0 +1,2 @@ +import type { Blob, BlobLike } from "./Blob"; +export declare type BlobPart = BlobLike | Blob | Uint8Array; diff --git a/node_modules/formdata-node/@type/File.d.ts b/node_modules/formdata-node/@type/File.d.ts new file mode 100644 index 0000000..dacd66e --- /dev/null +++ b/node_modules/formdata-node/@type/File.d.ts @@ -0,0 +1,53 @@ +import { Blob, BlobParts as FileBits, BlobPropertyBag } from "./Blob"; +export interface FileLike { + /** + * Name of the file referenced by the File object. + */ + readonly name: string; + /** + * Size of the file parts in bytes + */ + readonly size: number; + /** + * Returns the media type ([`MIME`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types)) of the file represented by a `File` object. + */ + readonly type: string; + /** + * The last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). Files without a known last modified date return the current date. + */ + readonly lastModified: number; + [Symbol.toStringTag]: string; + /** + * Returns a [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) which upon reading returns the data contained within the [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). + */ + stream(): AsyncIterable; +} +export interface FilePropertyBag extends BlobPropertyBag { + /** + * The last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). Files without a known last modified date return the current date. + */ + lastModified?: number; +} +/** + * @deprecated Use FilePropertyBag instead. + */ +export declare type FileOptions = FilePropertyBag; +/** + * The **File** interface provides information about files and allows JavaScript to access their content. + */ +export declare class File extends Blob implements FileLike { + #private; + static [Symbol.hasInstance](value: unknown): value is File; + /** + * Creates a new File instance. + * + * @param fileBits An `Array` strings, or [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer), [`ArrayBufferView`](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView), [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects, or a mix of any of such objects, that will be put inside the [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). + * @param name The name of the file. + * @param options An options object containing optional attributes for the file. + */ + constructor(fileBits: FileBits, name: string, options?: FilePropertyBag); + get name(): string; + get lastModified(): number; + get webkitRelativePath(): string; + get [Symbol.toStringTag](): string; +} diff --git a/node_modules/formdata-node/@type/FormData.d.ts b/node_modules/formdata-node/@type/FormData.d.ts new file mode 100644 index 0000000..37b3757 --- /dev/null +++ b/node_modules/formdata-node/@type/FormData.d.ts @@ -0,0 +1,105 @@ +/// +import { inspect } from "util"; +import { File } from "./File"; +/** + * A `string` or `File` that represents a single value from a set of `FormData` key-value pairs. + */ +export declare type FormDataEntryValue = string | File; +/** + * Constructor entries for FormData + */ +export declare type FormDataConstructorEntries = Array<{ + name: string; + value: unknown; + fileName?: string; +}>; +/** + * Provides a way to easily construct a set of key/value pairs representing form fields and their values, which can then be easily sent using fetch(). + * + * Note that this object is not a part of Node.js, so you might need to check if an HTTP client of your choice support spec-compliant FormData. + * However, if your HTTP client does not support FormData, you can use [`form-data-encoder`](https://npmjs.com/package/form-data-encoder) package to handle "multipart/form-data" encoding. + */ +export declare class FormData { + #private; + static [Symbol.hasInstance](value: unknown): value is FormData; + constructor(entries?: FormDataConstructorEntries); + /** + * Appends a new value onto an existing key inside a FormData object, + * or adds the key if it does not already exist. + * + * The difference between `set()` and `append()` is that if the specified key already exists, `set()` will overwrite all existing values with the new one, whereas `append()` will append the new value onto the end of the existing set of values. + * + * @param name The name of the field whose data is contained in `value`. + * @param value The field's value. This can be [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) + or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). If none of these are specified the value is converted to a string. + * @param fileName The filename reported to the server, when a Blob or File is passed as the second parameter. The default filename for Blob objects is "blob". The default filename for File objects is the file's filename. + */ + append(name: string, value: unknown, fileName?: string): void; + /** + * Set a new value for an existing key inside FormData, + * or add the new field if it does not already exist. + * + * @param name The name of the field whose data is contained in `value`. + * @param value The field's value. This can be [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) + or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). If none of these are specified the value is converted to a string. + * @param fileName The filename reported to the server, when a Blob or File is passed as the second parameter. The default filename for Blob objects is "blob". The default filename for File objects is the file's filename. + * + */ + set(name: string, value: unknown, fileName?: string): void; + /** + * Returns the first value associated with a given key from within a `FormData` object. + * If you expect multiple values and want all of them, use the `getAll()` method instead. + * + * @param {string} name A name of the value you want to retrieve. + * + * @returns A `FormDataEntryValue` containing the value. If the key doesn't exist, the method returns null. + */ + get(name: string): FormDataEntryValue | null; + /** + * Returns all the values associated with a given key from within a `FormData` object. + * + * @param {string} name A name of the value you want to retrieve. + * + * @returns An array of `FormDataEntryValue` whose key matches the value passed in the `name` parameter. If the key doesn't exist, the method returns an empty list. + */ + getAll(name: string): FormDataEntryValue[]; + /** + * Returns a boolean stating whether a `FormData` object contains a certain key. + * + * @param name A string representing the name of the key you want to test for. + * + * @return A boolean value. + */ + has(name: string): boolean; + /** + * Deletes a key and its value(s) from a `FormData` object. + * + * @param name The name of the key you want to delete. + */ + delete(name: string): void; + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through all keys contained in this `FormData` object. + * Each key is a `string`. + */ + keys(): Generator; + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through the `FormData` key/value pairs. + * The key of each pair is a string; the value is a [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue). + */ + entries(): Generator<[string, FormDataEntryValue]>; + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through all values contained in this object `FormData` object. + * Each value is a [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue). + */ + values(): Generator; + /** + * An alias for FormData#entries() + */ + [Symbol.iterator](): Generator<[string, FormDataEntryValue], any, unknown>; + /** + * Executes given callback function for each field of the FormData instance + */ + forEach(callback: (value: FormDataEntryValue, key: string, form: FormData) => void, thisArg?: unknown): void; + get [Symbol.toStringTag](): string; + [inspect.custom](): string; +} diff --git a/node_modules/formdata-node/@type/blobHelpers.d.ts b/node_modules/formdata-node/@type/blobHelpers.d.ts new file mode 100644 index 0000000..5a091e4 --- /dev/null +++ b/node_modules/formdata-node/@type/blobHelpers.d.ts @@ -0,0 +1,9 @@ +/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */ +import type { BlobPart } from "./BlobPart"; +/** + * Creates an iterator allowing to go through blob parts and consume their content + * + * @param parts blob parts from Blob class + */ +export declare function consumeBlobParts(parts: BlobPart[], clone?: boolean): AsyncGenerator; +export declare function sliceBlob(blobParts: BlobPart[], blobSize: number, start?: number, end?: number): Generator; diff --git a/node_modules/formdata-node/@type/browser.d.ts b/node_modules/formdata-node/@type/browser.d.ts new file mode 100644 index 0000000..9eaabd6 --- /dev/null +++ b/node_modules/formdata-node/@type/browser.d.ts @@ -0,0 +1,10 @@ +export declare const FormData: { + new (form?: HTMLFormElement | undefined): FormData; + prototype: FormData; +}, Blob: { + new (blobParts?: BlobPart[] | undefined, options?: BlobPropertyBag | undefined): Blob; + prototype: Blob; +}, File: { + new (fileBits: BlobPart[], fileName: string, options?: FilePropertyBag | undefined): File; + prototype: File; +}; diff --git a/node_modules/formdata-node/@type/deprecateConstructorEntries.d.ts b/node_modules/formdata-node/@type/deprecateConstructorEntries.d.ts new file mode 100644 index 0000000..529bf9f --- /dev/null +++ b/node_modules/formdata-node/@type/deprecateConstructorEntries.d.ts @@ -0,0 +1 @@ +export declare const deprecateConstructorEntries: () => void; diff --git a/node_modules/formdata-node/@type/fileFromPath.d.ts b/node_modules/formdata-node/@type/fileFromPath.d.ts new file mode 100644 index 0000000..20561f0 --- /dev/null +++ b/node_modules/formdata-node/@type/fileFromPath.d.ts @@ -0,0 +1,55 @@ +import { File, FilePropertyBag } from "./File"; +export * from "./isFile"; +export declare type FileFromPathOptions = Omit; +/** + * Creates a `File` referencing the one on a disk by given path. Synchronous version of the `fileFromPath` + * + * @param path Path to a file + * @param filename Optional name of the file. Will be passed as the second argument in `File` constructor. If not presented, the name will be taken from the file's path. + * @param options Additional `File` options, except for `lastModified`. + * + * @example + * + * ```js + * import {FormData, File} from "formdata-node" + * import {fileFromPathSync} from "formdata-node/file-from-path" + * + * const form = new FormData() + * + * const file = fileFromPathSync("/path/to/some/file.txt") + * + * form.set("file", file) + * + * form.get("file") // -> Your `File` object + * ``` + */ +export declare function fileFromPathSync(path: string): File; +export declare function fileFromPathSync(path: string, filename?: string): File; +export declare function fileFromPathSync(path: string, options?: FileFromPathOptions): File; +export declare function fileFromPathSync(path: string, filename?: string, options?: FileFromPathOptions): File; +/** + * Creates a `File` referencing the one on a disk by given path. + * + * @param path Path to a file + * @param filename Optional name of the file. Will be passed as the second argument in `File` constructor. If not presented, the name will be taken from the file's path. + * @param options Additional `File` options, except for `lastModified`. + * + * @example + * + * ```js + * import {FormData, File} from "formdata-node" + * import {fileFromPath} from "formdata-node/file-from-path" + * + * const form = new FormData() + * + * const file = await fileFromPath("/path/to/some/file.txt") + * + * form.set("file", file) + * + * form.get("file") // -> Your `File` object + * ``` + */ +export declare function fileFromPath(path: string): Promise; +export declare function fileFromPath(path: string, filename?: string): Promise; +export declare function fileFromPath(path: string, options?: FileFromPathOptions): Promise; +export declare function fileFromPath(path: string, filename?: string, options?: FileFromPathOptions): Promise; diff --git a/node_modules/formdata-node/@type/index.d.ts b/node_modules/formdata-node/@type/index.d.ts new file mode 100644 index 0000000..794b3ac --- /dev/null +++ b/node_modules/formdata-node/@type/index.d.ts @@ -0,0 +1,3 @@ +export * from "./FormData"; +export * from "./Blob"; +export * from "./File"; diff --git a/node_modules/formdata-node/@type/isBlob.d.ts b/node_modules/formdata-node/@type/isBlob.d.ts new file mode 100644 index 0000000..0570758 --- /dev/null +++ b/node_modules/formdata-node/@type/isBlob.d.ts @@ -0,0 +1,2 @@ +import { Blob } from "./Blob"; +export declare const isBlob: (value: unknown) => value is Blob; diff --git a/node_modules/formdata-node/@type/isFile.d.ts b/node_modules/formdata-node/@type/isFile.d.ts new file mode 100644 index 0000000..2cdf822 --- /dev/null +++ b/node_modules/formdata-node/@type/isFile.d.ts @@ -0,0 +1,7 @@ +import { File } from "./File"; +/** + * Checks if given value is a File, Blob or file-look-a-like object. + * + * @param value A value to test + */ +export declare const isFile: (value: unknown) => value is File; diff --git a/node_modules/formdata-node/@type/isFunction.d.ts b/node_modules/formdata-node/@type/isFunction.d.ts new file mode 100644 index 0000000..ae55ca2 --- /dev/null +++ b/node_modules/formdata-node/@type/isFunction.d.ts @@ -0,0 +1 @@ +export declare const isFunction: (value: unknown) => value is Function; diff --git a/node_modules/formdata-node/@type/isPlainObject.d.ts b/node_modules/formdata-node/@type/isPlainObject.d.ts new file mode 100644 index 0000000..531ad97 --- /dev/null +++ b/node_modules/formdata-node/@type/isPlainObject.d.ts @@ -0,0 +1,2 @@ +declare function isPlainObject(value: unknown): value is object; +export default isPlainObject; diff --git a/node_modules/formdata-node/lib/cjs/Blob.js b/node_modules/formdata-node/lib/cjs/Blob.js new file mode 100644 index 0000000..5617c06 --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/Blob.js @@ -0,0 +1,122 @@ +"use strict"; +/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _Blob_parts, _Blob_type, _Blob_size; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Blob = void 0; +const web_streams_polyfill_1 = require("web-streams-polyfill"); +const isFunction_1 = require("./isFunction"); +const blobHelpers_1 = require("./blobHelpers"); +class Blob { + constructor(blobParts = [], options = {}) { + _Blob_parts.set(this, []); + _Blob_type.set(this, ""); + _Blob_size.set(this, 0); + options !== null && options !== void 0 ? options : (options = {}); + if (typeof blobParts !== "object" || blobParts === null) { + throw new TypeError("Failed to construct 'Blob': " + + "The provided value cannot be converted to a sequence."); + } + if (!(0, isFunction_1.isFunction)(blobParts[Symbol.iterator])) { + throw new TypeError("Failed to construct 'Blob': " + + "The object must have a callable @@iterator property."); + } + if (typeof options !== "object" && !(0, isFunction_1.isFunction)(options)) { + throw new TypeError("Failed to construct 'Blob': parameter 2 cannot convert to dictionary."); + } + const encoder = new TextEncoder(); + for (const raw of blobParts) { + let part; + if (ArrayBuffer.isView(raw)) { + part = new Uint8Array(raw.buffer.slice(raw.byteOffset, raw.byteOffset + raw.byteLength)); + } + else if (raw instanceof ArrayBuffer) { + part = new Uint8Array(raw.slice(0)); + } + else if (raw instanceof Blob) { + part = raw; + } + else { + part = encoder.encode(String(raw)); + } + __classPrivateFieldSet(this, _Blob_size, __classPrivateFieldGet(this, _Blob_size, "f") + (ArrayBuffer.isView(part) ? part.byteLength : part.size), "f"); + __classPrivateFieldGet(this, _Blob_parts, "f").push(part); + } + const type = options.type === undefined ? "" : String(options.type); + __classPrivateFieldSet(this, _Blob_type, /^[\x20-\x7E]*$/.test(type) ? type : "", "f"); + } + static [(_Blob_parts = new WeakMap(), _Blob_type = new WeakMap(), _Blob_size = new WeakMap(), Symbol.hasInstance)](value) { + return Boolean(value + && typeof value === "object" + && (0, isFunction_1.isFunction)(value.constructor) + && ((0, isFunction_1.isFunction)(value.stream) + || (0, isFunction_1.isFunction)(value.arrayBuffer)) + && /^(Blob|File)$/.test(value[Symbol.toStringTag])); + } + get type() { + return __classPrivateFieldGet(this, _Blob_type, "f"); + } + get size() { + return __classPrivateFieldGet(this, _Blob_size, "f"); + } + slice(start, end, contentType) { + return new Blob((0, blobHelpers_1.sliceBlob)(__classPrivateFieldGet(this, _Blob_parts, "f"), this.size, start, end), { + type: contentType + }); + } + async text() { + const decoder = new TextDecoder(); + let result = ""; + for await (const chunk of (0, blobHelpers_1.consumeBlobParts)(__classPrivateFieldGet(this, _Blob_parts, "f"))) { + result += decoder.decode(chunk, { stream: true }); + } + result += decoder.decode(); + return result; + } + async arrayBuffer() { + const view = new Uint8Array(this.size); + let offset = 0; + for await (const chunk of (0, blobHelpers_1.consumeBlobParts)(__classPrivateFieldGet(this, _Blob_parts, "f"))) { + view.set(chunk, offset); + offset += chunk.length; + } + return view.buffer; + } + stream() { + const iterator = (0, blobHelpers_1.consumeBlobParts)(__classPrivateFieldGet(this, _Blob_parts, "f"), true); + return new web_streams_polyfill_1.ReadableStream({ + async pull(controller) { + const { value, done } = await iterator.next(); + if (done) { + return queueMicrotask(() => controller.close()); + } + controller.enqueue(value); + }, + async cancel() { + await iterator.return(); + } + }); + } + get [Symbol.toStringTag]() { + return "Blob"; + } +} +exports.Blob = Blob; +Object.defineProperties(Blob.prototype, { + type: { enumerable: true }, + size: { enumerable: true }, + slice: { enumerable: true }, + stream: { enumerable: true }, + text: { enumerable: true }, + arrayBuffer: { enumerable: true } +}); diff --git a/node_modules/formdata-node/lib/cjs/BlobPart.js b/node_modules/formdata-node/lib/cjs/BlobPart.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/BlobPart.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/formdata-node/lib/cjs/File.js b/node_modules/formdata-node/lib/cjs/File.js new file mode 100644 index 0000000..7829f0b --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/File.js @@ -0,0 +1,52 @@ +"use strict"; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _File_name, _File_lastModified; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.File = void 0; +const Blob_1 = require("./Blob"); +class File extends Blob_1.Blob { + constructor(fileBits, name, options = {}) { + super(fileBits, options); + _File_name.set(this, void 0); + _File_lastModified.set(this, 0); + if (arguments.length < 2) { + throw new TypeError("Failed to construct 'File': 2 arguments required, " + + `but only ${arguments.length} present.`); + } + __classPrivateFieldSet(this, _File_name, String(name), "f"); + const lastModified = options.lastModified === undefined + ? Date.now() + : Number(options.lastModified); + if (!Number.isNaN(lastModified)) { + __classPrivateFieldSet(this, _File_lastModified, lastModified, "f"); + } + } + static [(_File_name = new WeakMap(), _File_lastModified = new WeakMap(), Symbol.hasInstance)](value) { + return value instanceof Blob_1.Blob + && value[Symbol.toStringTag] === "File" + && typeof value.name === "string"; + } + get name() { + return __classPrivateFieldGet(this, _File_name, "f"); + } + get lastModified() { + return __classPrivateFieldGet(this, _File_lastModified, "f"); + } + get webkitRelativePath() { + return ""; + } + get [Symbol.toStringTag]() { + return "File"; + } +} +exports.File = File; diff --git a/node_modules/formdata-node/lib/cjs/FormData.js b/node_modules/formdata-node/lib/cjs/FormData.js new file mode 100644 index 0000000..ce2fd11 --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/FormData.js @@ -0,0 +1,148 @@ +"use strict"; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _FormData_instances, _FormData_entries, _FormData_setEntry; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FormData = void 0; +const util_1 = require("util"); +const File_1 = require("./File"); +const isFile_1 = require("./isFile"); +const isBlob_1 = require("./isBlob"); +const isFunction_1 = require("./isFunction"); +const deprecateConstructorEntries_1 = require("./deprecateConstructorEntries"); +class FormData { + constructor(entries) { + _FormData_instances.add(this); + _FormData_entries.set(this, new Map()); + if (entries) { + (0, deprecateConstructorEntries_1.deprecateConstructorEntries)(); + entries.forEach(({ name, value, fileName }) => this.append(name, value, fileName)); + } + } + static [(_FormData_entries = new WeakMap(), _FormData_instances = new WeakSet(), Symbol.hasInstance)](value) { + return Boolean(value + && (0, isFunction_1.isFunction)(value.constructor) + && value[Symbol.toStringTag] === "FormData" + && (0, isFunction_1.isFunction)(value.append) + && (0, isFunction_1.isFunction)(value.set) + && (0, isFunction_1.isFunction)(value.get) + && (0, isFunction_1.isFunction)(value.getAll) + && (0, isFunction_1.isFunction)(value.has) + && (0, isFunction_1.isFunction)(value.delete) + && (0, isFunction_1.isFunction)(value.entries) + && (0, isFunction_1.isFunction)(value.values) + && (0, isFunction_1.isFunction)(value.keys) + && (0, isFunction_1.isFunction)(value[Symbol.iterator]) + && (0, isFunction_1.isFunction)(value.forEach)); + } + append(name, value, fileName) { + __classPrivateFieldGet(this, _FormData_instances, "m", _FormData_setEntry).call(this, { + name, + fileName, + append: true, + rawValue: value, + argsLength: arguments.length + }); + } + set(name, value, fileName) { + __classPrivateFieldGet(this, _FormData_instances, "m", _FormData_setEntry).call(this, { + name, + fileName, + append: false, + rawValue: value, + argsLength: arguments.length + }); + } + get(name) { + const field = __classPrivateFieldGet(this, _FormData_entries, "f").get(String(name)); + if (!field) { + return null; + } + return field[0]; + } + getAll(name) { + const field = __classPrivateFieldGet(this, _FormData_entries, "f").get(String(name)); + if (!field) { + return []; + } + return field.slice(); + } + has(name) { + return __classPrivateFieldGet(this, _FormData_entries, "f").has(String(name)); + } + delete(name) { + __classPrivateFieldGet(this, _FormData_entries, "f").delete(String(name)); + } + *keys() { + for (const key of __classPrivateFieldGet(this, _FormData_entries, "f").keys()) { + yield key; + } + } + *entries() { + for (const name of this.keys()) { + const values = this.getAll(name); + for (const value of values) { + yield [name, value]; + } + } + } + *values() { + for (const [, value] of this) { + yield value; + } + } + [(_FormData_setEntry = function _FormData_setEntry({ name, rawValue, append, fileName, argsLength }) { + const methodName = append ? "append" : "set"; + if (argsLength < 2) { + throw new TypeError(`Failed to execute '${methodName}' on 'FormData': ` + + `2 arguments required, but only ${argsLength} present.`); + } + name = String(name); + let value; + if ((0, isFile_1.isFile)(rawValue)) { + value = fileName === undefined + ? rawValue + : new File_1.File([rawValue], fileName, { + type: rawValue.type, + lastModified: rawValue.lastModified + }); + } + else if ((0, isBlob_1.isBlob)(rawValue)) { + value = new File_1.File([rawValue], fileName === undefined ? "blob" : fileName, { + type: rawValue.type + }); + } + else if (fileName) { + throw new TypeError(`Failed to execute '${methodName}' on 'FormData': ` + + "parameter 2 is not of type 'Blob'."); + } + else { + value = String(rawValue); + } + const values = __classPrivateFieldGet(this, _FormData_entries, "f").get(name); + if (!values) { + return void __classPrivateFieldGet(this, _FormData_entries, "f").set(name, [value]); + } + if (!append) { + return void __classPrivateFieldGet(this, _FormData_entries, "f").set(name, [value]); + } + values.push(value); + }, Symbol.iterator)]() { + return this.entries(); + } + forEach(callback, thisArg) { + for (const [name, value] of this) { + callback.call(thisArg, value, name, this); + } + } + get [Symbol.toStringTag]() { + return "FormData"; + } + [util_1.inspect.custom]() { + return this[Symbol.toStringTag]; + } +} +exports.FormData = FormData; diff --git a/node_modules/formdata-node/lib/cjs/blobHelpers.js b/node_modules/formdata-node/lib/cjs/blobHelpers.js new file mode 100644 index 0000000..d33c1de --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/blobHelpers.js @@ -0,0 +1,80 @@ +"use strict"; +/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sliceBlob = exports.consumeBlobParts = void 0; +const isFunction_1 = require("./isFunction"); +const CHUNK_SIZE = 65536; +async function* clonePart(part) { + const end = part.byteOffset + part.byteLength; + let position = part.byteOffset; + while (position !== end) { + const size = Math.min(end - position, CHUNK_SIZE); + const chunk = part.buffer.slice(position, position + size); + position += chunk.byteLength; + yield new Uint8Array(chunk); + } +} +async function* consumeNodeBlob(blob) { + let position = 0; + while (position !== blob.size) { + const chunk = blob.slice(position, Math.min(blob.size, position + CHUNK_SIZE)); + const buffer = await chunk.arrayBuffer(); + position += buffer.byteLength; + yield new Uint8Array(buffer); + } +} +async function* consumeBlobParts(parts, clone = false) { + for (const part of parts) { + if (ArrayBuffer.isView(part)) { + if (clone) { + yield* clonePart(part); + } + else { + yield part; + } + } + else if ((0, isFunction_1.isFunction)(part.stream)) { + yield* part.stream(); + } + else { + yield* consumeNodeBlob(part); + } + } +} +exports.consumeBlobParts = consumeBlobParts; +function* sliceBlob(blobParts, blobSize, start = 0, end) { + end !== null && end !== void 0 ? end : (end = blobSize); + let relativeStart = start < 0 + ? Math.max(blobSize + start, 0) + : Math.min(start, blobSize); + let relativeEnd = end < 0 + ? Math.max(blobSize + end, 0) + : Math.min(end, blobSize); + const span = Math.max(relativeEnd - relativeStart, 0); + let added = 0; + for (const part of blobParts) { + if (added >= span) { + break; + } + const partSize = ArrayBuffer.isView(part) ? part.byteLength : part.size; + if (relativeStart && partSize <= relativeStart) { + relativeStart -= partSize; + relativeEnd -= partSize; + } + else { + let chunk; + if (ArrayBuffer.isView(part)) { + chunk = part.subarray(relativeStart, Math.min(partSize, relativeEnd)); + added += chunk.byteLength; + } + else { + chunk = part.slice(relativeStart, Math.min(partSize, relativeEnd)); + added += chunk.size; + } + relativeEnd -= partSize; + relativeStart = 0; + yield chunk; + } + } +} +exports.sliceBlob = sliceBlob; diff --git a/node_modules/formdata-node/lib/cjs/browser.js b/node_modules/formdata-node/lib/cjs/browser.js new file mode 100644 index 0000000..baa0447 --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/browser.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.File = exports.Blob = exports.FormData = void 0; +const globalObject = (function () { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + return window; +}()); +exports.FormData = globalObject.FormData, exports.Blob = globalObject.Blob, exports.File = globalObject.File; diff --git a/node_modules/formdata-node/lib/cjs/deprecateConstructorEntries.js b/node_modules/formdata-node/lib/cjs/deprecateConstructorEntries.js new file mode 100644 index 0000000..2a70a09 --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/deprecateConstructorEntries.js @@ -0,0 +1,6 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.deprecateConstructorEntries = void 0; +const util_1 = require("util"); +exports.deprecateConstructorEntries = (0, util_1.deprecate)(() => { }, "Constructor \"entries\" argument is not spec-compliant " + + "and will be removed in next major release."); diff --git a/node_modules/formdata-node/lib/cjs/fileFromPath.js b/node_modules/formdata-node/lib/cjs/fileFromPath.js new file mode 100644 index 0000000..26d5469 --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/fileFromPath.js @@ -0,0 +1,97 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _FileFromPath_path, _FileFromPath_start; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fileFromPath = exports.fileFromPathSync = void 0; +const fs_1 = require("fs"); +const path_1 = require("path"); +const node_domexception_1 = __importDefault(require("node-domexception")); +const File_1 = require("./File"); +const isPlainObject_1 = __importDefault(require("./isPlainObject")); +__exportStar(require("./isFile"), exports); +const MESSAGE = "The requested file could not be read, " + + "typically due to permission problems that have occurred after a reference " + + "to a file was acquired."; +class FileFromPath { + constructor(input) { + _FileFromPath_path.set(this, void 0); + _FileFromPath_start.set(this, void 0); + __classPrivateFieldSet(this, _FileFromPath_path, input.path, "f"); + __classPrivateFieldSet(this, _FileFromPath_start, input.start || 0, "f"); + this.name = (0, path_1.basename)(__classPrivateFieldGet(this, _FileFromPath_path, "f")); + this.size = input.size; + this.lastModified = input.lastModified; + } + slice(start, end) { + return new FileFromPath({ + path: __classPrivateFieldGet(this, _FileFromPath_path, "f"), + lastModified: this.lastModified, + size: end - start, + start + }); + } + async *stream() { + const { mtimeMs } = await fs_1.promises.stat(__classPrivateFieldGet(this, _FileFromPath_path, "f")); + if (mtimeMs > this.lastModified) { + throw new node_domexception_1.default(MESSAGE, "NotReadableError"); + } + if (this.size) { + yield* (0, fs_1.createReadStream)(__classPrivateFieldGet(this, _FileFromPath_path, "f"), { + start: __classPrivateFieldGet(this, _FileFromPath_start, "f"), + end: __classPrivateFieldGet(this, _FileFromPath_start, "f") + this.size - 1 + }); + } + } + get [(_FileFromPath_path = new WeakMap(), _FileFromPath_start = new WeakMap(), Symbol.toStringTag)]() { + return "File"; + } +} +function createFileFromPath(path, { mtimeMs, size }, filenameOrOptions, options = {}) { + let filename; + if ((0, isPlainObject_1.default)(filenameOrOptions)) { + [options, filename] = [filenameOrOptions, undefined]; + } + else { + filename = filenameOrOptions; + } + const file = new FileFromPath({ path, size, lastModified: mtimeMs }); + if (!filename) { + filename = file.name; + } + return new File_1.File([file], filename, { + ...options, lastModified: file.lastModified + }); +} +function fileFromPathSync(path, filenameOrOptions, options = {}) { + const stats = (0, fs_1.statSync)(path); + return createFileFromPath(path, stats, filenameOrOptions, options); +} +exports.fileFromPathSync = fileFromPathSync; +async function fileFromPath(path, filenameOrOptions, options) { + const stats = await fs_1.promises.stat(path); + return createFileFromPath(path, stats, filenameOrOptions, options); +} +exports.fileFromPath = fileFromPath; diff --git a/node_modules/formdata-node/lib/cjs/index.js b/node_modules/formdata-node/lib/cjs/index.js new file mode 100644 index 0000000..6aef203 --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/index.js @@ -0,0 +1,15 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./FormData"), exports); +__exportStar(require("./Blob"), exports); +__exportStar(require("./File"), exports); diff --git a/node_modules/formdata-node/lib/cjs/isBlob.js b/node_modules/formdata-node/lib/cjs/isBlob.js new file mode 100644 index 0000000..a599c5a --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/isBlob.js @@ -0,0 +1,6 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBlob = void 0; +const Blob_1 = require("./Blob"); +const isBlob = (value) => value instanceof Blob_1.Blob; +exports.isBlob = isBlob; diff --git a/node_modules/formdata-node/lib/cjs/isFile.js b/node_modules/formdata-node/lib/cjs/isFile.js new file mode 100644 index 0000000..1c04792 --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/isFile.js @@ -0,0 +1,6 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isFile = void 0; +const File_1 = require("./File"); +const isFile = (value) => value instanceof File_1.File; +exports.isFile = isFile; diff --git a/node_modules/formdata-node/lib/cjs/isFunction.js b/node_modules/formdata-node/lib/cjs/isFunction.js new file mode 100644 index 0000000..4b9bbdf --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/isFunction.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isFunction = void 0; +const isFunction = (value) => (typeof value === "function"); +exports.isFunction = isFunction; diff --git a/node_modules/formdata-node/lib/cjs/isPlainObject.js b/node_modules/formdata-node/lib/cjs/isPlainObject.js new file mode 100644 index 0000000..ec54623 --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/isPlainObject.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase()); +function isPlainObject(value) { + if (getType(value) !== "object") { + return false; + } + const pp = Object.getPrototypeOf(value); + if (pp === null || pp === undefined) { + return true; + } + const Ctor = pp.constructor && pp.constructor.toString(); + return Ctor === Object.toString(); +} +exports.default = isPlainObject; diff --git a/node_modules/formdata-node/lib/cjs/package.json b/node_modules/formdata-node/lib/cjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/formdata-node/lib/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/formdata-node/lib/esm/Blob.js b/node_modules/formdata-node/lib/esm/Blob.js new file mode 100644 index 0000000..e7e3cc6 --- /dev/null +++ b/node_modules/formdata-node/lib/esm/Blob.js @@ -0,0 +1,118 @@ +/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _Blob_parts, _Blob_type, _Blob_size; +import { ReadableStream } from "web-streams-polyfill"; +import { isFunction } from "./isFunction.js"; +import { consumeBlobParts, sliceBlob } from "./blobHelpers.js"; +export class Blob { + constructor(blobParts = [], options = {}) { + _Blob_parts.set(this, []); + _Blob_type.set(this, ""); + _Blob_size.set(this, 0); + options !== null && options !== void 0 ? options : (options = {}); + if (typeof blobParts !== "object" || blobParts === null) { + throw new TypeError("Failed to construct 'Blob': " + + "The provided value cannot be converted to a sequence."); + } + if (!isFunction(blobParts[Symbol.iterator])) { + throw new TypeError("Failed to construct 'Blob': " + + "The object must have a callable @@iterator property."); + } + if (typeof options !== "object" && !isFunction(options)) { + throw new TypeError("Failed to construct 'Blob': parameter 2 cannot convert to dictionary."); + } + const encoder = new TextEncoder(); + for (const raw of blobParts) { + let part; + if (ArrayBuffer.isView(raw)) { + part = new Uint8Array(raw.buffer.slice(raw.byteOffset, raw.byteOffset + raw.byteLength)); + } + else if (raw instanceof ArrayBuffer) { + part = new Uint8Array(raw.slice(0)); + } + else if (raw instanceof Blob) { + part = raw; + } + else { + part = encoder.encode(String(raw)); + } + __classPrivateFieldSet(this, _Blob_size, __classPrivateFieldGet(this, _Blob_size, "f") + (ArrayBuffer.isView(part) ? part.byteLength : part.size), "f"); + __classPrivateFieldGet(this, _Blob_parts, "f").push(part); + } + const type = options.type === undefined ? "" : String(options.type); + __classPrivateFieldSet(this, _Blob_type, /^[\x20-\x7E]*$/.test(type) ? type : "", "f"); + } + static [(_Blob_parts = new WeakMap(), _Blob_type = new WeakMap(), _Blob_size = new WeakMap(), Symbol.hasInstance)](value) { + return Boolean(value + && typeof value === "object" + && isFunction(value.constructor) + && (isFunction(value.stream) + || isFunction(value.arrayBuffer)) + && /^(Blob|File)$/.test(value[Symbol.toStringTag])); + } + get type() { + return __classPrivateFieldGet(this, _Blob_type, "f"); + } + get size() { + return __classPrivateFieldGet(this, _Blob_size, "f"); + } + slice(start, end, contentType) { + return new Blob(sliceBlob(__classPrivateFieldGet(this, _Blob_parts, "f"), this.size, start, end), { + type: contentType + }); + } + async text() { + const decoder = new TextDecoder(); + let result = ""; + for await (const chunk of consumeBlobParts(__classPrivateFieldGet(this, _Blob_parts, "f"))) { + result += decoder.decode(chunk, { stream: true }); + } + result += decoder.decode(); + return result; + } + async arrayBuffer() { + const view = new Uint8Array(this.size); + let offset = 0; + for await (const chunk of consumeBlobParts(__classPrivateFieldGet(this, _Blob_parts, "f"))) { + view.set(chunk, offset); + offset += chunk.length; + } + return view.buffer; + } + stream() { + const iterator = consumeBlobParts(__classPrivateFieldGet(this, _Blob_parts, "f"), true); + return new ReadableStream({ + async pull(controller) { + const { value, done } = await iterator.next(); + if (done) { + return queueMicrotask(() => controller.close()); + } + controller.enqueue(value); + }, + async cancel() { + await iterator.return(); + } + }); + } + get [Symbol.toStringTag]() { + return "Blob"; + } +} +Object.defineProperties(Blob.prototype, { + type: { enumerable: true }, + size: { enumerable: true }, + slice: { enumerable: true }, + stream: { enumerable: true }, + text: { enumerable: true }, + arrayBuffer: { enumerable: true } +}); diff --git a/node_modules/formdata-node/lib/esm/BlobPart.js b/node_modules/formdata-node/lib/esm/BlobPart.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/formdata-node/lib/esm/BlobPart.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/formdata-node/lib/esm/File.js b/node_modules/formdata-node/lib/esm/File.js new file mode 100644 index 0000000..ec06e31 --- /dev/null +++ b/node_modules/formdata-node/lib/esm/File.js @@ -0,0 +1,48 @@ +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _File_name, _File_lastModified; +import { Blob } from "./Blob.js"; +export class File extends Blob { + constructor(fileBits, name, options = {}) { + super(fileBits, options); + _File_name.set(this, void 0); + _File_lastModified.set(this, 0); + if (arguments.length < 2) { + throw new TypeError("Failed to construct 'File': 2 arguments required, " + + `but only ${arguments.length} present.`); + } + __classPrivateFieldSet(this, _File_name, String(name), "f"); + const lastModified = options.lastModified === undefined + ? Date.now() + : Number(options.lastModified); + if (!Number.isNaN(lastModified)) { + __classPrivateFieldSet(this, _File_lastModified, lastModified, "f"); + } + } + static [(_File_name = new WeakMap(), _File_lastModified = new WeakMap(), Symbol.hasInstance)](value) { + return value instanceof Blob + && value[Symbol.toStringTag] === "File" + && typeof value.name === "string"; + } + get name() { + return __classPrivateFieldGet(this, _File_name, "f"); + } + get lastModified() { + return __classPrivateFieldGet(this, _File_lastModified, "f"); + } + get webkitRelativePath() { + return ""; + } + get [Symbol.toStringTag]() { + return "File"; + } +} diff --git a/node_modules/formdata-node/lib/esm/FormData.js b/node_modules/formdata-node/lib/esm/FormData.js new file mode 100644 index 0000000..d7a988b --- /dev/null +++ b/node_modules/formdata-node/lib/esm/FormData.js @@ -0,0 +1,144 @@ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _FormData_instances, _FormData_entries, _FormData_setEntry; +import { inspect } from "util"; +import { File } from "./File.js"; +import { isFile } from "./isFile.js"; +import { isBlob } from "./isBlob.js"; +import { isFunction } from "./isFunction.js"; +import { deprecateConstructorEntries } from "./deprecateConstructorEntries.js"; +export class FormData { + constructor(entries) { + _FormData_instances.add(this); + _FormData_entries.set(this, new Map()); + if (entries) { + deprecateConstructorEntries(); + entries.forEach(({ name, value, fileName }) => this.append(name, value, fileName)); + } + } + static [(_FormData_entries = new WeakMap(), _FormData_instances = new WeakSet(), Symbol.hasInstance)](value) { + return Boolean(value + && isFunction(value.constructor) + && value[Symbol.toStringTag] === "FormData" + && isFunction(value.append) + && isFunction(value.set) + && isFunction(value.get) + && isFunction(value.getAll) + && isFunction(value.has) + && isFunction(value.delete) + && isFunction(value.entries) + && isFunction(value.values) + && isFunction(value.keys) + && isFunction(value[Symbol.iterator]) + && isFunction(value.forEach)); + } + append(name, value, fileName) { + __classPrivateFieldGet(this, _FormData_instances, "m", _FormData_setEntry).call(this, { + name, + fileName, + append: true, + rawValue: value, + argsLength: arguments.length + }); + } + set(name, value, fileName) { + __classPrivateFieldGet(this, _FormData_instances, "m", _FormData_setEntry).call(this, { + name, + fileName, + append: false, + rawValue: value, + argsLength: arguments.length + }); + } + get(name) { + const field = __classPrivateFieldGet(this, _FormData_entries, "f").get(String(name)); + if (!field) { + return null; + } + return field[0]; + } + getAll(name) { + const field = __classPrivateFieldGet(this, _FormData_entries, "f").get(String(name)); + if (!field) { + return []; + } + return field.slice(); + } + has(name) { + return __classPrivateFieldGet(this, _FormData_entries, "f").has(String(name)); + } + delete(name) { + __classPrivateFieldGet(this, _FormData_entries, "f").delete(String(name)); + } + *keys() { + for (const key of __classPrivateFieldGet(this, _FormData_entries, "f").keys()) { + yield key; + } + } + *entries() { + for (const name of this.keys()) { + const values = this.getAll(name); + for (const value of values) { + yield [name, value]; + } + } + } + *values() { + for (const [, value] of this) { + yield value; + } + } + [(_FormData_setEntry = function _FormData_setEntry({ name, rawValue, append, fileName, argsLength }) { + const methodName = append ? "append" : "set"; + if (argsLength < 2) { + throw new TypeError(`Failed to execute '${methodName}' on 'FormData': ` + + `2 arguments required, but only ${argsLength} present.`); + } + name = String(name); + let value; + if (isFile(rawValue)) { + value = fileName === undefined + ? rawValue + : new File([rawValue], fileName, { + type: rawValue.type, + lastModified: rawValue.lastModified + }); + } + else if (isBlob(rawValue)) { + value = new File([rawValue], fileName === undefined ? "blob" : fileName, { + type: rawValue.type + }); + } + else if (fileName) { + throw new TypeError(`Failed to execute '${methodName}' on 'FormData': ` + + "parameter 2 is not of type 'Blob'."); + } + else { + value = String(rawValue); + } + const values = __classPrivateFieldGet(this, _FormData_entries, "f").get(name); + if (!values) { + return void __classPrivateFieldGet(this, _FormData_entries, "f").set(name, [value]); + } + if (!append) { + return void __classPrivateFieldGet(this, _FormData_entries, "f").set(name, [value]); + } + values.push(value); + }, Symbol.iterator)]() { + return this.entries(); + } + forEach(callback, thisArg) { + for (const [name, value] of this) { + callback.call(thisArg, value, name, this); + } + } + get [Symbol.toStringTag]() { + return "FormData"; + } + [inspect.custom]() { + return this[Symbol.toStringTag]; + } +} diff --git a/node_modules/formdata-node/lib/esm/blobHelpers.js b/node_modules/formdata-node/lib/esm/blobHelpers.js new file mode 100644 index 0000000..27ac037 --- /dev/null +++ b/node_modules/formdata-node/lib/esm/blobHelpers.js @@ -0,0 +1,75 @@ +/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */ +import { isFunction } from "./isFunction.js"; +const CHUNK_SIZE = 65536; +async function* clonePart(part) { + const end = part.byteOffset + part.byteLength; + let position = part.byteOffset; + while (position !== end) { + const size = Math.min(end - position, CHUNK_SIZE); + const chunk = part.buffer.slice(position, position + size); + position += chunk.byteLength; + yield new Uint8Array(chunk); + } +} +async function* consumeNodeBlob(blob) { + let position = 0; + while (position !== blob.size) { + const chunk = blob.slice(position, Math.min(blob.size, position + CHUNK_SIZE)); + const buffer = await chunk.arrayBuffer(); + position += buffer.byteLength; + yield new Uint8Array(buffer); + } +} +export async function* consumeBlobParts(parts, clone = false) { + for (const part of parts) { + if (ArrayBuffer.isView(part)) { + if (clone) { + yield* clonePart(part); + } + else { + yield part; + } + } + else if (isFunction(part.stream)) { + yield* part.stream(); + } + else { + yield* consumeNodeBlob(part); + } + } +} +export function* sliceBlob(blobParts, blobSize, start = 0, end) { + end !== null && end !== void 0 ? end : (end = blobSize); + let relativeStart = start < 0 + ? Math.max(blobSize + start, 0) + : Math.min(start, blobSize); + let relativeEnd = end < 0 + ? Math.max(blobSize + end, 0) + : Math.min(end, blobSize); + const span = Math.max(relativeEnd - relativeStart, 0); + let added = 0; + for (const part of blobParts) { + if (added >= span) { + break; + } + const partSize = ArrayBuffer.isView(part) ? part.byteLength : part.size; + if (relativeStart && partSize <= relativeStart) { + relativeStart -= partSize; + relativeEnd -= partSize; + } + else { + let chunk; + if (ArrayBuffer.isView(part)) { + chunk = part.subarray(relativeStart, Math.min(partSize, relativeEnd)); + added += chunk.byteLength; + } + else { + chunk = part.slice(relativeStart, Math.min(partSize, relativeEnd)); + added += chunk.size; + } + relativeEnd -= partSize; + relativeStart = 0; + yield chunk; + } + } +} diff --git a/node_modules/formdata-node/lib/esm/browser.js b/node_modules/formdata-node/lib/esm/browser.js new file mode 100644 index 0000000..ee38251 --- /dev/null +++ b/node_modules/formdata-node/lib/esm/browser.js @@ -0,0 +1,10 @@ +const globalObject = (function () { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + return window; +}()); +export const { FormData, Blob, File } = globalObject; diff --git a/node_modules/formdata-node/lib/esm/deprecateConstructorEntries.js b/node_modules/formdata-node/lib/esm/deprecateConstructorEntries.js new file mode 100644 index 0000000..45b66bf --- /dev/null +++ b/node_modules/formdata-node/lib/esm/deprecateConstructorEntries.js @@ -0,0 +1,3 @@ +import { deprecate } from "util"; +export const deprecateConstructorEntries = deprecate(() => { }, "Constructor \"entries\" argument is not spec-compliant " + + "and will be removed in next major release."); diff --git a/node_modules/formdata-node/lib/esm/fileFromPath.js b/node_modules/formdata-node/lib/esm/fileFromPath.js new file mode 100644 index 0000000..30ab35e --- /dev/null +++ b/node_modules/formdata-node/lib/esm/fileFromPath.js @@ -0,0 +1,79 @@ +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _FileFromPath_path, _FileFromPath_start; +import { statSync, createReadStream, promises as fs } from "fs"; +import { basename } from "path"; +import DOMException from "node-domexception"; +import { File } from "./File.js"; +import isPlainObject from "./isPlainObject.js"; +export * from "./isFile.js"; +const MESSAGE = "The requested file could not be read, " + + "typically due to permission problems that have occurred after a reference " + + "to a file was acquired."; +class FileFromPath { + constructor(input) { + _FileFromPath_path.set(this, void 0); + _FileFromPath_start.set(this, void 0); + __classPrivateFieldSet(this, _FileFromPath_path, input.path, "f"); + __classPrivateFieldSet(this, _FileFromPath_start, input.start || 0, "f"); + this.name = basename(__classPrivateFieldGet(this, _FileFromPath_path, "f")); + this.size = input.size; + this.lastModified = input.lastModified; + } + slice(start, end) { + return new FileFromPath({ + path: __classPrivateFieldGet(this, _FileFromPath_path, "f"), + lastModified: this.lastModified, + size: end - start, + start + }); + } + async *stream() { + const { mtimeMs } = await fs.stat(__classPrivateFieldGet(this, _FileFromPath_path, "f")); + if (mtimeMs > this.lastModified) { + throw new DOMException(MESSAGE, "NotReadableError"); + } + if (this.size) { + yield* createReadStream(__classPrivateFieldGet(this, _FileFromPath_path, "f"), { + start: __classPrivateFieldGet(this, _FileFromPath_start, "f"), + end: __classPrivateFieldGet(this, _FileFromPath_start, "f") + this.size - 1 + }); + } + } + get [(_FileFromPath_path = new WeakMap(), _FileFromPath_start = new WeakMap(), Symbol.toStringTag)]() { + return "File"; + } +} +function createFileFromPath(path, { mtimeMs, size }, filenameOrOptions, options = {}) { + let filename; + if (isPlainObject(filenameOrOptions)) { + [options, filename] = [filenameOrOptions, undefined]; + } + else { + filename = filenameOrOptions; + } + const file = new FileFromPath({ path, size, lastModified: mtimeMs }); + if (!filename) { + filename = file.name; + } + return new File([file], filename, { + ...options, lastModified: file.lastModified + }); +} +export function fileFromPathSync(path, filenameOrOptions, options = {}) { + const stats = statSync(path); + return createFileFromPath(path, stats, filenameOrOptions, options); +} +export async function fileFromPath(path, filenameOrOptions, options) { + const stats = await fs.stat(path); + return createFileFromPath(path, stats, filenameOrOptions, options); +} diff --git a/node_modules/formdata-node/lib/esm/index.js b/node_modules/formdata-node/lib/esm/index.js new file mode 100644 index 0000000..fffae8c --- /dev/null +++ b/node_modules/formdata-node/lib/esm/index.js @@ -0,0 +1,3 @@ +export * from "./FormData.js"; +export * from "./Blob.js"; +export * from "./File.js"; diff --git a/node_modules/formdata-node/lib/esm/isBlob.js b/node_modules/formdata-node/lib/esm/isBlob.js new file mode 100644 index 0000000..8a62242 --- /dev/null +++ b/node_modules/formdata-node/lib/esm/isBlob.js @@ -0,0 +1,2 @@ +import { Blob } from "./Blob.js"; +export const isBlob = (value) => value instanceof Blob; diff --git a/node_modules/formdata-node/lib/esm/isFile.js b/node_modules/formdata-node/lib/esm/isFile.js new file mode 100644 index 0000000..03a0a58 --- /dev/null +++ b/node_modules/formdata-node/lib/esm/isFile.js @@ -0,0 +1,2 @@ +import { File } from "./File.js"; +export const isFile = (value) => value instanceof File; diff --git a/node_modules/formdata-node/lib/esm/isFunction.js b/node_modules/formdata-node/lib/esm/isFunction.js new file mode 100644 index 0000000..a8dbdc2 --- /dev/null +++ b/node_modules/formdata-node/lib/esm/isFunction.js @@ -0,0 +1 @@ +export const isFunction = (value) => (typeof value === "function"); diff --git a/node_modules/formdata-node/lib/esm/isPlainObject.js b/node_modules/formdata-node/lib/esm/isPlainObject.js new file mode 100644 index 0000000..8147580 --- /dev/null +++ b/node_modules/formdata-node/lib/esm/isPlainObject.js @@ -0,0 +1,13 @@ +const getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase()); +function isPlainObject(value) { + if (getType(value) !== "object") { + return false; + } + const pp = Object.getPrototypeOf(value); + if (pp === null || pp === undefined) { + return true; + } + const Ctor = pp.constructor && pp.constructor.toString(); + return Ctor === Object.toString(); +} +export default isPlainObject; diff --git a/node_modules/formdata-node/lib/esm/package.json b/node_modules/formdata-node/lib/esm/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/formdata-node/lib/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/formdata-node/lib/node-domexception.d.ts b/node_modules/formdata-node/lib/node-domexception.d.ts new file mode 100644 index 0000000..1a02556 --- /dev/null +++ b/node_modules/formdata-node/lib/node-domexception.d.ts @@ -0,0 +1,5 @@ +declare class DOMException { + constructor(message: string, name: string) +} + +export default DOMException diff --git a/node_modules/formdata-node/license b/node_modules/formdata-node/license new file mode 100644 index 0000000..e4e3178 --- /dev/null +++ b/node_modules/formdata-node/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017-present Nick K. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/formdata-node/package.json b/node_modules/formdata-node/package.json new file mode 100644 index 0000000..0f8c97e --- /dev/null +++ b/node_modules/formdata-node/package.json @@ -0,0 +1,94 @@ +{ + "name": "formdata-node", + "version": "4.4.1", + "description": "Spec-compliant FormData implementation for Node.js", + "repository": "octet-stream/form-data", + "sideEffects": false, + "keywords": [ + "form-data", + "node", + "form", + "upload", + "files-upload", + "ponyfill" + ], + "author": "Nick K. ", + "license": "MIT", + "main": "./lib/cjs/index.js", + "module": "./lib/esm/browser.js", + "browser": "./lib/cjs/browser.js", + "exports": { + "./package.json": "./package.json", + ".": { + "node": { + "types": "./@type/index.d.ts", + "import": "./lib/esm/index.js", + "require": "./lib/cjs/index.js" + }, + "browser": { + "types": "./@type/browser.d.ts", + "import": "./lib/esm/browser.js", + "require": "./lib/cjs/browser.js" + }, + "default": "./lib/esm/index.js" + }, + "./file-from-path": { + "types": "./@type/fileFromPath.d.ts", + "import": "./lib/esm/fileFromPath.js", + "require": "./lib/cjs/fileFromPath.js" + } + }, + "types": "./@type/index.d.ts", + "typesVersions": { + "*": { + "file-from-path": [ + "@type/fileFromPath.d.ts" + ] + } + }, + "engines": { + "node": ">= 12.20" + }, + "scripts": { + "eslint": "eslint lib/**/*.ts", + "staged": "lint-staged", + "coverage": "c8 npm test", + "report:html": "c8 -r=html npm test", + "ci": "c8 npm test && c8 report --reporter=json", + "build:esm": "ttsc --project tsconfig.esm.json", + "build:cjs": "ttsc --project tsconfig.cjs.json", + "build:types": "ttsc --project tsconfig.d.ts.json", + "build": "npm run build:esm && npm run build:cjs && npm run build:types", + "test": "ava --fail-fast", + "cleanup": "npx rimraf @type \"lib/**/*.js\"", + "prepare": "npm run cleanup && npm run build", + "husky": "husky install" + }, + "devDependencies": { + "@octetstream/eslint-config": "5.0.0", + "@types/node": "17.0.19", + "@types/sinon": "10.0.11", + "@typescript-eslint/eslint-plugin": "4.32.0", + "@typescript-eslint/parser": "4.32.0", + "@zoltu/typescript-transformer-append-js-extension": "1.0.1", + "ava": "4.0.1", + "c8": "7.12.0", + "eslint": "7.32.0", + "eslint-config-airbnb-typescript": "12.3.1", + "eslint-import-resolver-typescript": "2.5.0", + "eslint-plugin-ava": "12.0.0", + "eslint-plugin-jsx-a11y": "6.4.1", + "eslint-plugin-react": "7.26.1", + "husky": "7.0.4", + "lint-staged": "12.3.4", + "rimraf": "^3.0.2", + "sinon": "13.0.1", + "ts-node": "10.5.0", + "ttypescript": "1.5.13", + "typescript": "4.5.5" + }, + "dependencies": { + "node-domexception": "1.0.0", + "web-streams-polyfill": "4.0.0-beta.3" + } +} diff --git a/node_modules/formdata-node/readme.md b/node_modules/formdata-node/readme.md new file mode 100644 index 0000000..d67d0cb --- /dev/null +++ b/node_modules/formdata-node/readme.md @@ -0,0 +1,444 @@ +# FormData + +Spec-compliant [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) implementation for Node.js + +[![Code Coverage](https://codecov.io/github/octet-stream/form-data/coverage.svg?branch=master)](https://codecov.io/github/octet-stream/form-data?branch=master) +[![CI](https://github.com/octet-stream/form-data/workflows/CI/badge.svg)](https://github.com/octet-stream/form-data/actions/workflows/ci.yml) +[![ESLint](https://github.com/octet-stream/form-data/workflows/ESLint/badge.svg)](https://github.com/octet-stream/form-data/actions/workflows/eslint.yml) + +## Highlights + +1. Spec-compliant: implements every method of the [`FormData interface`](https://developer.mozilla.org/en-US/docs/Web/API/FormData). +2. Supports Blobs and Files sourced from anywhere: you can use builtin [`fileFromPath`](#filefrompathpath-filename-options---promisefile) and [`fileFromPathSync`](#filefrompathsyncpath-filename-options---file) helpers to create a File from FS, or you can implement your `BlobDataItem` object to use a different source of data. +3. Supports both ESM and CJS targets. See [`ESM/CJS support`](#esmcjs-support) section for details. +4. Written on TypeScript and ships with TS typings. +5. Isomorphic, but only re-exports native FormData object for browsers. If you need a polyfill for browsers, use [`formdata-polyfill`](https://github.com/jimmywarting/FormData) +6. It's a [`ponyfill`](https://ponyfill.com/)! Which means, no effect has been caused on `globalThis` or native `FormData` implementation. + +## Installation + +You can install this package with npm: + +``` +npm install formdata-node +``` + +Or yarn: + +``` +yarn add formdata-node +``` + +Or pnpm + +``` +pnpm add formdata-node +``` + +## ESM/CJS support + +This package is targeting ESM and CJS for backwards compatibility reasons and smoothen transition period while you convert your projects to ESM only. Note that CJS support will be removed as [Node.js v12 will reach its EOL](https://github.com/nodejs/release#release-schedule). This change will be released as major version update, so you won't miss it. + +## Usage + +1. Let's take a look at minimal example with [got](https://github.com/sindresorhus/got): + +```js +import {FormData} from "formdata-node" + +// I assume Got >= 12.x is used for this example +import got from "got" + +const form = new FormData() + +form.set("greeting", "Hello, World!") + +const data = await got.post("https://httpbin.org/post", {body: form}).json() + +console.log(data.form.greeting) // => Hello, World! +``` + +2. If your HTTP client does not support spec-compliant FormData, you can use [`form-data-encoder`](https://github.com/octet-stream/form-data-encoder) to encode entries: + +```js +import {Readable} from "stream" + +import {FormDataEncoder} from "form-data-encoder" +import {FormData} from "formdata-node" + +// Note that `node-fetch` >= 3.x have builtin support for spec-compliant FormData, sou you'll only need the `form-data-encoder` if you use `node-fetch` <= 2.x. +import fetch from "node-fetch" + +const form = new FormData() + +form.set("field", "Some value") + +const encoder = new FormDataEncoder(form) + +const options = { + method: "post", + headers: encoder.headers, + body: Readable.from(encoder) +} + +await fetch("https://httpbin.org/post", options) +``` + +3. Sending files over form-data: + +```js +import {FormData, File} from "formdata-node" // You can use `File` from fetch-blob >= 3.x + +import fetch from "node-fetch" + +const form = new FormData() +const file = new File(["My hovercraft is full of eels"], "file.txt") + +form.set("file", file) + +await fetch("https://httpbin.org/post", {method: "post", body: form}) +``` + +4. Blobs as field's values allowed too: + +```js +import {FormData, Blob} from "formdata-node" // You can use `Blob` from fetch-blob + +const form = new FormData() +const blob = new Blob(["Some content"], {type: "text/plain"}) + +form.set("blob", blob) + +// Will always be returned as `File` +let file = form.get("blob") + +// The created file has "blob" as the name by default +console.log(file.name) // -> blob + +// To change that, you need to set filename argument manually +form.set("file", blob, "some-file.txt") + +file = form.get("file") + +console.log(file.name) // -> some-file.txt +``` + +5. You can also append files using `fileFromPath` or `fileFromPathSync` helpers. It does the same thing as [`fetch-blob/from`](https://github.com/node-fetch/fetch-blob#blob-part-backed-up-by-filesystem), but returns a `File` instead of `Blob`: + +```js +import {fileFromPath} from "formdata-node/file-from-path" +import {FormData} from "formdata-node" + +import fetch from "node-fetch" + +const form = new FormData() + +form.set("file", await fileFromPath("/path/to/a/file")) + +await fetch("https://httpbin.org/post", {method: "post", body: form}) +``` + +6. You can still use files sourced from any stream, but unlike in v2 you'll need some extra work to achieve that: + +```js +import {Readable} from "stream" + +import {FormData} from "formdata-node" + +class BlobFromStream { + #stream + + constructor(stream, size) { + this.#stream = stream + this.size = size + } + + stream() { + return this.#stream + } + + get [Symbol.toStringTag]() { + return "Blob" + } +} + +const content = Buffer.from("Stream content") + +const stream = new Readable({ + read() { + this.push(content) + this.push(null) + } +}) + +const form = new FormData() + +form.set("stream", new BlobFromStream(stream, content.length), "file.txt") + +await fetch("https://httpbin.org/post", {method: "post", body: form}) +``` + +7. Note that if you don't know the length of that stream, you'll also need to handle form-data encoding manually or use [`form-data-encoder`](https://github.com/octet-stream/form-data-encoder) package. This is necessary to control which headers will be sent with your HTTP request: + +```js +import {Readable} from "stream" + +import {Encoder} from "form-data-encoder" +import {FormData} from "formdata-node" + +const form = new FormData() + +// You can use file-shaped or blob-shaped objects as FormData value instead of creating separate class +form.set("stream", { + type: "text/plain", + name: "file.txt", + [Symbol.toStringTag]: "File", + stream() { + return getStreamFromSomewhere() + } +}) + +const encoder = new Encoder(form) + +const options = { + method: "post", + headers: { + "content-type": encoder.contentType + }, + body: Readable.from(encoder) +} + +await fetch("https://httpbin.org/post", {method: "post", body: form}) +``` + +## Comparison + +| | formdata-node | formdata-polyfill | undici FormData | form-data | +| ---------------- | ------------- | ----------------- | --------------- | -------------------- | +| .append() | ✔️ | ✔️ | ✔️ | ✔️1 | +| .set() | ✔️ | ✔️ | ✔️ | ❌ | +| .get() | ✔️ | ✔️ | ✔️ | ❌ | +| .getAll() | ✔️ | ✔️ | ✔️ | ❌ | +| .forEach() | ✔️ | ✔️ | ✔️ | ❌ | +| .keys() | ✔️ | ✔️ | ✔️ | ❌ | +| .values() | ✔️ | ✔️ | ✔️ | ❌ | +| .entries() | ✔️ | ✔️ | ✔️ | ❌ | +| Symbol.iterator | ✔️ | ✔️ | ✔️ | ❌ | +| CommonJS | ✔️ | ❌ | ✔️ | ✔️ | +| ESM | ✔️ | ✔️ | ✔️2 | ✔️2 | +| Blob | ✔️3 | ✔️4 | ✔️3 | ❌ | +| Browser polyfill | ❌ | ✔️ | ✔️ | ❌ | +| Builtin encoder | ❌ | ✔️ | ✔️5 | ✔️ | + +1 Does not support Blob and File in entry value, but allows streams and Buffer (which is not spec-compiant, however). + +2 Can be imported in ESM, because Node.js support for CJS modules in ESM context, but it does not have ESM entry point. + +3 Have builtin implementations of Blob and/or File, allows native Blob and File as entry value. + +4 Support Blob and File via fetch-blob package, allows native Blob and File as entry value. + +5 Have `multipart/form-data` encoder as part of their `fetch` implementation. + +✔️ - For FormData methods, indicates that the method is present and spec-compliant. For features, shows its presence. + +❌ - Indicates that method or feature is not implemented. + +## API + +### `class FormData` + +##### `constructor([entries]) -> {FormData}` + +Creates a new FormData instance + + - **{array}** [entries = null] – an optional FormData initial entries. + Each initial field should be passed as a collection of the objects + with "name", "value" and "filename" props. + See the [FormData#append()](#appendname-value-filename---void) for more info about the available format. + +#### Instance methods + +##### `set(name, value[, filename]) -> {void}` + +Set a new value for an existing key inside **FormData**, +or add the new field if it does not already exist. + + - **{string}** name – The name of the field whose data is contained in `value`. + - **{unknown}** value – The field's value. This can be [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) + or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). If none of these are specified the value is converted to a string. + - **{string}** [filename = undefined] – The filename reported to the server, when a Blob or File is passed as the second parameter. The default filename for Blob objects is "blob". The default filename for File objects is the file's filename. + +##### `append(name, value[, filename]) -> {void}` + +Appends a new value onto an existing key inside a FormData object, +or adds the key if it does not already exist. + +The difference between `set()` and `append()` is that if the specified key already exists, `set()` will overwrite all existing values with the new one, whereas `append()` will append the new value onto the end of the existing set of values. + + - **{string}** name – The name of the field whose data is contained in `value`. + - **{unknown}** value – The field's value. This can be [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) + or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). If none of these are specified the value is converted to a string. + - **{string}** [filename = undefined] – The filename reported to the server, when a Blob or File is passed as the second parameter. The default filename for Blob objects is "blob". The default filename for File objects is the file's filename. + +##### `get(name) -> {FormDataValue}` + +Returns the first value associated with a given key from within a `FormData` object. +If you expect multiple values and want all of them, use the `getAll()` method instead. + + - **{string}** name – A name of the value you want to retrieve. + +##### `getAll(name) -> {Array}` + +Returns all the values associated with a given key from within a `FormData` object. + + - **{string}** name – A name of the value you want to retrieve. + +##### `has(name) -> {boolean}` + +Returns a boolean stating whether a `FormData` object contains a certain key. + + - **{string}** – A string representing the name of the key you want to test for. + +##### `delete(name) -> {void}` + +Deletes a key and its value(s) from a `FormData` object. + + - **{string}** name – The name of the key you want to delete. + +##### `forEach(callback[, thisArg]) -> {void}` + +Executes a given **callback** for each field of the FormData instance + + - **{function}** callback – Function to execute for each element, taking three arguments: + + **{FormDataValue}** value – A value(s) of the current field. + + **{string}** name – Name of the current field. + + **{FormData}** form – The FormData instance that **forEach** is being applied to + - **{unknown}** [thisArg = null] – Value to use as **this** context when executing the given **callback** + +##### `keys() -> {Generator}` + +Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through all keys contained in this `FormData` object. +Each key is a `string`. + +##### `values() -> {Generator}` + +Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through all values contained in this object `FormData` object. +Each value is a [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue). + +##### `entries() -> {Generator<[string, FormDataValue]>}` + +Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through key/value pairs contained in this `FormData` object. +The key of each pair is a string; the value is a [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue). + +##### `[Symbol.iterator]() -> {Generator<[string, FormDataValue]>}` + +An alias for [`FormData#entries()`](#entries---iterator) + +### `class Blob` + +The `Blob` object represents a blob, which is a file-like object of immutable, raw data; +they can be read as text or binary data, or converted into a ReadableStream +so its methods can be used for processing the data. + +##### `constructor(blobParts[, options]) -> {Blob}` + +Creates a new `Blob` instance. The `Blob` constructor accepts following arguments: + + - **{(ArrayBufferLike | ArrayBufferView | File | Blob | string)[]}** blobParts – An `Array` strings, or [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer), [`ArrayBufferView`](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView), [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects, or a mix of any of such objects, that will be put inside the [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob); + - **{object}** [options = {}] - An options object containing optional attributes for the file. Available options are as follows; + - **{string}** [options.type = ""] - Returns the media type ([`MIME`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types)) of the blob represented by a `Blob` object. + +#### Instance properties + +##### `type -> {string}` + +Returns the [`MIME type`](https://developer.mozilla.org/en-US/docs/Glossary/MIME_type) of the [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). + +##### `size -> {number}` + +Returns the size of the [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) in bytes. + +#### Instance methods + +##### `slice([start, end, contentType]) -> {Blob}` + +Creates and returns a new [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) object which contains data from a subset of the blob on which it's called. + +- **{number}** [start = 0] An index into the `Blob` indicating the first byte to include in the new `Blob`. If you specify a negative value, it's treated as an offset from the end of the `Blob` toward the beginning. For example, -10 would be the 10th from last byte in the `Blob`. The default value is 0. If you specify a value for start that is larger than the size of the source `Blob`, the returned `Blob` has size 0 and contains no data. + +- **{number}** [end = `blob`.size] An index into the `Blob` indicating the first byte that will *not* be included in the new `Blob` (i.e. the byte exactly at this index is not included). If you specify a negative value, it's treated as an offset from the end of the `Blob` toward the beginning. For example, -10 would be the 10th from last byte in the `Blob`. The default value is size. + +- **{string}** [contentType = ""] The content type to assign to the new ``Blob``; this will be the value of its type property. The default value is an empty string. + +##### `stream() -> {ReadableStream}` + +Returns a [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) which upon reading returns the data contained within the [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob). + +##### `arrayBuffer() -> {Promise}` + +Returns a [`Promise`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) that resolves with the contents of the blob as binary data contained in an [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). + +##### `text() -> {Promise}` + +Returns a [`Promise`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) that resolves with a string containing the contents of the blob, interpreted as UTF-8. + +### `class File extends Blob` + +The `File` class provides information about files. The `File` class inherits `Blob`. + +##### `constructor(fileBits, filename[, options]) -> {File}` + +Creates a new `File` instance. The `File` constructor accepts following arguments: + + - **{(ArrayBufferLike | ArrayBufferView | File | Blob | string)[]}** fileBits – An `Array` strings, or [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer), [`ArrayBufferView`](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView), [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects, or a mix of any of such objects, that will be put inside the [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File); + - **{string}** filename – Representing the file name. + - **{object}** [options = {}] - An options object containing optional attributes for the file. Available options are as follows; + - **{number}** [options.lastModified = Date.now()] – provides the last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). Files without a known last modified date return the current date; + - **{string}** [options.type = ""] - Returns the media type ([`MIME`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types)) of the file represented by a `File` object. + +### `fileFromPath(path[, filename, options]) -> {Promise}` + +Available from `formdata-node/file-from-path` subpath. + +Creates a `File` referencing the one on a disk by given path. + + - **{string}** path - Path to a file + - **{string}** [filename] - Optional name of the file. Will be passed as the second argument in `File` constructor. If not presented, the name will be taken from the file's path. + - **{object}** [options = {}] - Additional `File` options, except for `lastModified`. + - **{string}** [options.type = ""] - Returns the media type ([`MIME`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types)) of the file represented by a `File` object. + +### `fileFromPathSync(path[, filename, options]) -> {File}` + +Available from `formdata-node/file-from-path` subpath. + +Creates a `File` referencing the one on a disk by given path. Synchronous version of the `fileFromPath`. + - **{string}** path - Path to a file + - **{string}** [filename] - Optional name of the file. Will be passed as the second argument in `File` constructor. If not presented, the name will be taken from the file's path. + - **{object}** [options = {}] - Additional `File` options, except for `lastModified`. + - **{string}** [options.type = ""] - Returns the media type ([`MIME`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types)) of the file represented by a `File` object. + +### `isFile(value) -> {boolean}` + +Available from `formdata-node/file-from-path` subpath. + +Checks if given value is a File, Blob or file-look-a-like object. + + - **{unknown}** value - A value to test + +### Husky installation + +This package is using `husky` to perform git hooks on developer's machine, so your changes might be verified before you push them to `GitHub`. If you want to install these hooks, run `npm run husky` command. + +## Related links + +- [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) documentation on MDN +- [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) documentation on MDN +- [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) documentation on MDN +- [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue) documentation on MDN. +- [`formdata-polyfill`](https://github.com/jimmywarting/FormData) HTML5 `FormData` for Browsers & NodeJS. +- [`node-fetch`](https://github.com/node-fetch/node-fetch) a light-weight module that brings the Fetch API to Node.js +- [`fetch-blob`](https://github.com/node-fetch/fetch-blob) a Blob implementation on node.js, originally from `node-fetch`. +- [`form-data-encoder`](https://github.com/octet-stream/form-data-encoder) spec-compliant `multipart/form-data` encoder implementation. +- [`then-busboy`](https://github.com/octet-stream/then-busboy) a promise-based wrapper around Busboy. Process multipart/form-data content and returns it as a single object. Will be helpful to handle your data on the server-side applications. +- [`@octetstream/object-to-form-data`](https://github.com/octet-stream/object-to-form-data) converts JavaScript object to FormData. diff --git a/node_modules/humanize-ms/History.md b/node_modules/humanize-ms/History.md new file mode 100644 index 0000000..b159587 --- /dev/null +++ b/node_modules/humanize-ms/History.md @@ -0,0 +1,25 @@ + +1.2.1 / 2017-05-19 +================== + + * fix: package.json to reduce vulnerabilities (#3) + +1.2.0 / 2016-05-21 +================== + + * feat: warn with stack + +1.1.0 / 2016-04-04 +================== + + * deps: upgrade ms to 0.7.0 + +1.0.1 / 2014-12-31 +================== + + * feat(index.js): warn when result is undefined + +1.0.0 / 2014-08-14 +================== + + * init diff --git a/node_modules/humanize-ms/LICENSE b/node_modules/humanize-ms/LICENSE new file mode 100644 index 0000000..89de354 --- /dev/null +++ b/node_modules/humanize-ms/LICENSE @@ -0,0 +1,17 @@ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/humanize-ms/README.md b/node_modules/humanize-ms/README.md new file mode 100644 index 0000000..20a2ca3 --- /dev/null +++ b/node_modules/humanize-ms/README.md @@ -0,0 +1,40 @@ +humanize-ms +--------------- + +[![NPM version][npm-image]][npm-url] +[![build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] +[![Gittip][gittip-image]][gittip-url] +[![David deps][david-image]][david-url] + +[npm-image]: https://img.shields.io/npm/v/humanize-ms.svg?style=flat +[npm-url]: https://npmjs.org/package/humanize-ms +[travis-image]: https://img.shields.io/travis/node-modules/humanize-ms.svg?style=flat +[travis-url]: https://travis-ci.org/node-modules/humanize-ms +[coveralls-image]: https://img.shields.io/coveralls/node-modules/humanize-ms.svg?style=flat +[coveralls-url]: https://coveralls.io/r/node-modules/humanize-ms?branch=master +[gittip-image]: https://img.shields.io/gittip/dead-horse.svg?style=flat +[gittip-url]: https://www.gittip.com/dead-horse/ +[david-image]: https://img.shields.io/david/node-modules/humanize-ms.svg?style=flat +[david-url]: https://david-dm.org/node-modules/humanize-ms + +transform humanize time to ms + +## Installation + +```bash +$ npm install humanize-ms +``` + +## Examples + +```js +var ms = require('humanize-ms'); + +ms('1s') // 1000 +ms(1000) // 1000 +``` + +### License + +MIT diff --git a/node_modules/humanize-ms/index.js b/node_modules/humanize-ms/index.js new file mode 100644 index 0000000..660df81 --- /dev/null +++ b/node_modules/humanize-ms/index.js @@ -0,0 +1,24 @@ +/*! + * humanize-ms - index.js + * Copyright(c) 2014 dead_horse + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + */ + +var util = require('util'); +var ms = require('ms'); + +module.exports = function (t) { + if (typeof t === 'number') return t; + var r = ms(t); + if (r === undefined) { + var err = new Error(util.format('humanize-ms(%j) result undefined', t)); + console.warn(err.stack); + } + return r; +}; diff --git a/node_modules/humanize-ms/package.json b/node_modules/humanize-ms/package.json new file mode 100644 index 0000000..da4ab7f --- /dev/null +++ b/node_modules/humanize-ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "humanize-ms", + "version": "1.2.1", + "description": "transform humanize time to ms", + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "make test" + }, + "keywords": [ + "humanize", + "ms" + ], + "author": { + "name": "dead-horse", + "email": "dead_horse@qq.com", + "url": "http://deadhorse.me" + }, + "repository": { + "type": "git", + "url": "https://github.com/node-modules/humanize-ms" + }, + "license": "MIT", + "dependencies": { + "ms": "^2.0.0" + }, + "devDependencies": { + "autod": "*", + "beautify-benchmark": "~0.2.4", + "benchmark": "~1.0.0", + "istanbul": "*", + "mocha": "*", + "should": "*" + } +} diff --git a/node_modules/is-plain-object/LICENSE b/node_modules/is-plain-object/LICENSE new file mode 100644 index 0000000..3f2eca1 --- /dev/null +++ b/node_modules/is-plain-object/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-plain-object/README.md b/node_modules/is-plain-object/README.md new file mode 100644 index 0000000..5c074ab --- /dev/null +++ b/node_modules/is-plain-object/README.md @@ -0,0 +1,125 @@ +# is-plain-object [![NPM version](https://img.shields.io/npm/v/is-plain-object.svg?style=flat)](https://www.npmjs.com/package/is-plain-object) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![NPM total downloads](https://img.shields.io/npm/dt/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-plain-object.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-plain-object) + +> Returns true if an object was created by the `Object` constructor, or Object.create(null). + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-plain-object +``` + +Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null. + +## Usage + +with es modules +```js +import { isPlainObject } from 'is-plain-object'; +``` + +or with commonjs +```js +const { isPlainObject } = require('is-plain-object'); +``` + +**true** when created by the `Object` constructor, or Object.create(null). + +```js +isPlainObject(Object.create({})); +//=> true +isPlainObject(Object.create(Object.prototype)); +//=> true +isPlainObject({foo: 'bar'}); +//=> true +isPlainObject({}); +//=> true +isPlainObject(null); +//=> true +``` + +**false** when not created by the `Object` constructor. + +```js +isPlainObject(1); +//=> false +isPlainObject(['foo', 'bar']); +//=> false +isPlainObject([]); +//=> false +isPlainObject(new Foo); +//=> false +isPlainObject(Object.create(null)); +//=> false +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 19 | [jonschlinkert](https://github.com/jonschlinkert) | +| 6 | [TrySound](https://github.com/TrySound) | +| 6 | [stevenvachon](https://github.com/stevenvachon) | +| 3 | [onokumus](https://github.com/onokumus) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._ diff --git a/node_modules/is-plain-object/dist/is-plain-object.js b/node_modules/is-plain-object/dist/is-plain-object.js new file mode 100644 index 0000000..d134e4f --- /dev/null +++ b/node_modules/is-plain-object/dist/is-plain-object.js @@ -0,0 +1,38 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +function isObject(o) { + return Object.prototype.toString.call(o) === '[object Object]'; +} + +function isPlainObject(o) { + var ctor,prot; + + if (isObject(o) === false) return false; + + // If has modified constructor + ctor = o.constructor; + if (ctor === undefined) return true; + + // If has modified prototype + prot = ctor.prototype; + if (isObject(prot) === false) return false; + + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } + + // Most likely a plain Object + return true; +} + +exports.isPlainObject = isPlainObject; diff --git a/node_modules/is-plain-object/dist/is-plain-object.mjs b/node_modules/is-plain-object/dist/is-plain-object.mjs new file mode 100644 index 0000000..c2d9f35 --- /dev/null +++ b/node_modules/is-plain-object/dist/is-plain-object.mjs @@ -0,0 +1,34 @@ +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +function isObject(o) { + return Object.prototype.toString.call(o) === '[object Object]'; +} + +function isPlainObject(o) { + var ctor,prot; + + if (isObject(o) === false) return false; + + // If has modified constructor + ctor = o.constructor; + if (ctor === undefined) return true; + + // If has modified prototype + prot = ctor.prototype; + if (isObject(prot) === false) return false; + + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } + + // Most likely a plain Object + return true; +} + +export { isPlainObject }; diff --git a/node_modules/is-plain-object/is-plain-object.d.ts b/node_modules/is-plain-object/is-plain-object.d.ts new file mode 100644 index 0000000..a359940 --- /dev/null +++ b/node_modules/is-plain-object/is-plain-object.d.ts @@ -0,0 +1 @@ +export function isPlainObject(o: any): boolean; diff --git a/node_modules/is-plain-object/package.json b/node_modules/is-plain-object/package.json new file mode 100644 index 0000000..3ea169a --- /dev/null +++ b/node_modules/is-plain-object/package.json @@ -0,0 +1,85 @@ +{ + "name": "is-plain-object", + "description": "Returns true if an object was created by the `Object` constructor, or Object.create(null).", + "version": "5.0.0", + "homepage": "https://github.com/jonschlinkert/is-plain-object", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Osman Nuri Okumuş (http://onokumus.com)", + "Steven Vachon (https://svachon.com)", + "(https://github.com/wtgtybhertgeghgtwtg)", + "Bogdan Chadkin (https://github.com/TrySound)" + ], + "repository": "jonschlinkert/is-plain-object", + "bugs": { + "url": "https://github.com/jonschlinkert/is-plain-object/issues" + }, + "license": "MIT", + "main": "dist/is-plain-object.js", + "module": "dist/is-plain-object.mjs", + "types": "is-plain-object.d.ts", + "files": [ + "is-plain-object.d.ts", + "dist" + ], + "exports": { + ".": { + "import": "./dist/is-plain-object.mjs", + "require": "./dist/is-plain-object.js" + }, + "./package.json": "./package.json" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "build": "rollup -c", + "test_browser": "mocha-headless-chrome --args=disable-web-security -f test/browser.html", + "test_node": "mocha -r esm", + "test": "npm run test_node && npm run build && npm run test_browser", + "prepare": "rollup -c" + }, + "devDependencies": { + "chai": "^4.2.0", + "esm": "^3.2.22", + "gulp-format-md": "^1.0.0", + "mocha": "^6.1.4", + "mocha-headless-chrome": "^3.1.0", + "rollup": "^2.22.1" + }, + "keywords": [ + "check", + "is", + "is-object", + "isobject", + "javascript", + "kind", + "kind-of", + "object", + "plain", + "type", + "typeof", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "is-number", + "isobject", + "kind-of" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/mime-db/HISTORY.md b/node_modules/mime-db/HISTORY.md new file mode 100644 index 0000000..7436f64 --- /dev/null +++ b/node_modules/mime-db/HISTORY.md @@ -0,0 +1,507 @@ +1.52.0 / 2022-02-21 +=================== + + * Add extensions from IANA for more `image/*` types + * Add extension `.asc` to `application/pgp-keys` + * Add extensions to various XML types + * Add new upstream MIME types + +1.51.0 / 2021-11-08 +=================== + + * Add new upstream MIME types + * Mark `image/vnd.microsoft.icon` as compressible + * Mark `image/vnd.ms-dds` as compressible + +1.50.0 / 2021-09-15 +=================== + + * Add deprecated iWorks mime types and extensions + * Add new upstream MIME types + +1.49.0 / 2021-07-26 +=================== + + * Add extension `.trig` to `application/trig` + * Add new upstream MIME types + +1.48.0 / 2021-05-30 +=================== + + * Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + * Add new upstream MIME types + * Mark `text/yaml` as compressible + +1.47.0 / 2021-04-01 +=================== + + * Add new upstream MIME types + * Remove ambigious extensions from IANA for `application/*+xml` types + * Update primary extension to `.es` for `application/ecmascript` + +1.46.0 / 2021-02-13 +=================== + + * Add extension `.amr` to `audio/amr` + * Add extension `.m4s` to `video/iso.segment` + * Add extension `.opus` to `audio/ogg` + * Add new upstream MIME types + +1.45.0 / 2020-09-22 +=================== + + * Add `application/ubjson` with extension `.ubj` + * Add `image/avif` with extension `.avif` + * Add `image/ktx2` with extension `.ktx2` + * Add extension `.dbf` to `application/vnd.dbf` + * Add extension `.rar` to `application/vnd.rar` + * Add extension `.td` to `application/urc-targetdesc+xml` + * Add new upstream MIME types + * Fix extension of `application/vnd.apple.keynote` to be `.key` + +1.44.0 / 2020-04-22 +=================== + + * Add charsets from IANA + * Add extension `.cjs` to `application/node` + * Add new upstream MIME types + +1.43.0 / 2020-01-05 +=================== + + * Add `application/x-keepass2` with extension `.kdbx` + * Add extension `.mxmf` to `audio/mobile-xmf` + * Add extensions from IANA for `application/*+xml` types + * Add new upstream MIME types + +1.42.0 / 2019-09-25 +=================== + + * Add `image/vnd.ms-dds` with extension `.dds` + * Add new upstream MIME types + * Remove compressible from `multipart/mixed` + +1.41.0 / 2019-08-30 +=================== + + * Add new upstream MIME types + * Add `application/toml` with extension `.toml` + * Mark `font/ttf` as compressible + +1.40.0 / 2019-04-20 +=================== + + * Add extensions from IANA for `model/*` types + * Add `text/mdx` with extension `.mdx` + +1.39.0 / 2019-04-04 +=================== + + * Add extensions `.siv` and `.sieve` to `application/sieve` + * Add new upstream MIME types + +1.38.0 / 2019-02-04 +=================== + + * Add extension `.nq` to `application/n-quads` + * Add extension `.nt` to `application/n-triples` + * Add new upstream MIME types + * Mark `text/less` as compressible + +1.37.0 / 2018-10-19 +=================== + + * Add extensions to HEIC image types + * Add new upstream MIME types + +1.36.0 / 2018-08-20 +=================== + + * Add Apple file extensions from IANA + * Add extensions from IANA for `image/*` types + * Add new upstream MIME types + +1.35.0 / 2018-07-15 +=================== + + * Add extension `.owl` to `application/rdf+xml` + * Add new upstream MIME types + - Removes extension `.woff` from `application/font-woff` + +1.34.0 / 2018-06-03 +=================== + + * Add extension `.csl` to `application/vnd.citationstyles.style+xml` + * Add extension `.es` to `application/ecmascript` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/turtle` + * Mark all XML-derived types as compressible + +1.33.0 / 2018-02-15 +=================== + + * Add extensions from IANA for `message/*` types + * Add new upstream MIME types + * Fix some incorrect OOXML types + * Remove `application/font-woff2` + +1.32.0 / 2017-11-29 +=================== + + * Add new upstream MIME types + * Update `text/hjson` to registered `application/hjson` + * Add `text/shex` with extension `.shex` + +1.31.0 / 2017-10-25 +=================== + + * Add `application/raml+yaml` with extension `.raml` + * Add `application/wasm` with extension `.wasm` + * Add new `font` type from IANA + * Add new upstream font extensions + * Add new upstream MIME types + * Add extensions for JPEG-2000 images + +1.30.0 / 2017-08-27 +=================== + + * Add `application/vnd.ms-outlook` + * Add `application/x-arj` + * Add extension `.mjs` to `application/javascript` + * Add glTF types and extensions + * Add new upstream MIME types + * Add `text/x-org` + * Add VirtualBox MIME types + * Fix `source` records for `video/*` types that are IANA + * Update `font/opentype` to registered `font/otf` + +1.29.0 / 2017-07-10 +=================== + + * Add `application/fido.trusted-apps+json` + * Add extension `.wadl` to `application/vnd.sun.wadl+xml` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/css` + +1.28.0 / 2017-05-14 +=================== + + * Add new upstream MIME types + * Add extension `.gz` to `application/gzip` + * Update extensions `.md` and `.markdown` to be `text/markdown` + +1.27.0 / 2017-03-16 +=================== + + * Add new upstream MIME types + * Add `image/apng` with extension `.apng` + +1.26.0 / 2017-01-14 +=================== + + * Add new upstream MIME types + * Add extension `.geojson` to `application/geo+json` + +1.25.0 / 2016-11-11 +=================== + + * Add new upstream MIME types + +1.24.0 / 2016-09-18 +=================== + + * Add `audio/mp3` + * Add new upstream MIME types + +1.23.0 / 2016-05-01 +=================== + + * Add new upstream MIME types + * Add extension `.3gpp` to `audio/3gpp` + +1.22.0 / 2016-02-15 +=================== + + * Add `text/slim` + * Add extension `.rng` to `application/xml` + * Add new upstream MIME types + * Fix extension of `application/dash+xml` to be `.mpd` + * Update primary extension to `.m4a` for `audio/mp4` + +1.21.0 / 2016-01-06 +=================== + + * Add Google document types + * Add new upstream MIME types + +1.20.0 / 2015-11-10 +=================== + + * Add `text/x-suse-ymp` + * Add new upstream MIME types + +1.19.0 / 2015-09-17 +=================== + + * Add `application/vnd.apple.pkpass` + * Add new upstream MIME types + +1.18.0 / 2015-09-03 +=================== + + * Add new upstream MIME types + +1.17.0 / 2015-08-13 +=================== + + * Add `application/x-msdos-program` + * Add `audio/g711-0` + * Add `image/vnd.mozilla.apng` + * Add extension `.exe` to `application/x-msdos-program` + +1.16.0 / 2015-07-29 +=================== + + * Add `application/vnd.uri-map` + +1.15.0 / 2015-07-13 +=================== + + * Add `application/x-httpd-php` + +1.14.0 / 2015-06-25 +=================== + + * Add `application/scim+json` + * Add `application/vnd.3gpp.ussd+xml` + * Add `application/vnd.biopax.rdf+xml` + * Add `text/x-processing` + +1.13.0 / 2015-06-07 +=================== + + * Add nginx as a source + * Add `application/x-cocoa` + * Add `application/x-java-archive-diff` + * Add `application/x-makeself` + * Add `application/x-perl` + * Add `application/x-pilot` + * Add `application/x-redhat-package-manager` + * Add `application/x-sea` + * Add `audio/x-m4a` + * Add `audio/x-realaudio` + * Add `image/x-jng` + * Add `text/mathml` + +1.12.0 / 2015-06-05 +=================== + + * Add `application/bdoc` + * Add `application/vnd.hyperdrive+json` + * Add `application/x-bdoc` + * Add extension `.rtf` to `text/rtf` + +1.11.0 / 2015-05-31 +=================== + + * Add `audio/wav` + * Add `audio/wave` + * Add extension `.litcoffee` to `text/coffeescript` + * Add extension `.sfd-hdstx` to `application/vnd.hydrostatix.sof-data` + * Add extension `.n-gage` to `application/vnd.nokia.n-gage.symbian.install` + +1.10.0 / 2015-05-19 +=================== + + * Add `application/vnd.balsamiq.bmpr` + * Add `application/vnd.microsoft.portable-executable` + * Add `application/x-ns-proxy-autoconfig` + +1.9.1 / 2015-04-19 +================== + + * Remove `.json` extension from `application/manifest+json` + - This is causing bugs downstream + +1.9.0 / 2015-04-19 +================== + + * Add `application/manifest+json` + * Add `application/vnd.micro+json` + * Add `image/vnd.zbrush.pcx` + * Add `image/x-ms-bmp` + +1.8.0 / 2015-03-13 +================== + + * Add `application/vnd.citationstyles.style+xml` + * Add `application/vnd.fastcopy-disk-image` + * Add `application/vnd.gov.sk.xmldatacontainer+xml` + * Add extension `.jsonld` to `application/ld+json` + +1.7.0 / 2015-02-08 +================== + + * Add `application/vnd.gerber` + * Add `application/vnd.msa-disk-image` + +1.6.1 / 2015-02-05 +================== + + * Community extensions ownership transferred from `node-mime` + +1.6.0 / 2015-01-29 +================== + + * Add `application/jose` + * Add `application/jose+json` + * Add `application/json-seq` + * Add `application/jwk+json` + * Add `application/jwk-set+json` + * Add `application/jwt` + * Add `application/rdap+json` + * Add `application/vnd.gov.sk.e-form+xml` + * Add `application/vnd.ims.imsccv1p3` + +1.5.0 / 2014-12-30 +================== + + * Add `application/vnd.oracle.resource+json` + * Fix various invalid MIME type entries + - `application/mbox+xml` + - `application/oscp-response` + - `application/vwg-multiplexed` + - `audio/g721` + +1.4.0 / 2014-12-21 +================== + + * Add `application/vnd.ims.imsccv1p2` + * Fix various invalid MIME type entries + - `application/vnd-acucobol` + - `application/vnd-curl` + - `application/vnd-dart` + - `application/vnd-dxr` + - `application/vnd-fdf` + - `application/vnd-mif` + - `application/vnd-sema` + - `application/vnd-wap-wmlc` + - `application/vnd.adobe.flash-movie` + - `application/vnd.dece-zip` + - `application/vnd.dvb_service` + - `application/vnd.micrografx-igx` + - `application/vnd.sealed-doc` + - `application/vnd.sealed-eml` + - `application/vnd.sealed-mht` + - `application/vnd.sealed-ppt` + - `application/vnd.sealed-tiff` + - `application/vnd.sealed-xls` + - `application/vnd.sealedmedia.softseal-html` + - `application/vnd.sealedmedia.softseal-pdf` + - `application/vnd.wap-slc` + - `application/vnd.wap-wbxml` + - `audio/vnd.sealedmedia.softseal-mpeg` + - `image/vnd-djvu` + - `image/vnd-svf` + - `image/vnd-wap-wbmp` + - `image/vnd.sealed-png` + - `image/vnd.sealedmedia.softseal-gif` + - `image/vnd.sealedmedia.softseal-jpg` + - `model/vnd-dwf` + - `model/vnd.parasolid.transmit-binary` + - `model/vnd.parasolid.transmit-text` + - `text/vnd-a` + - `text/vnd-curl` + - `text/vnd.wap-wml` + * Remove example template MIME types + - `application/example` + - `audio/example` + - `image/example` + - `message/example` + - `model/example` + - `multipart/example` + - `text/example` + - `video/example` + +1.3.1 / 2014-12-16 +================== + + * Fix missing extensions + - `application/json5` + - `text/hjson` + +1.3.0 / 2014-12-07 +================== + + * Add `application/a2l` + * Add `application/aml` + * Add `application/atfx` + * Add `application/atxml` + * Add `application/cdfx+xml` + * Add `application/dii` + * Add `application/json5` + * Add `application/lxf` + * Add `application/mf4` + * Add `application/vnd.apache.thrift.compact` + * Add `application/vnd.apache.thrift.json` + * Add `application/vnd.coffeescript` + * Add `application/vnd.enphase.envoy` + * Add `application/vnd.ims.imsccv1p1` + * Add `text/csv-schema` + * Add `text/hjson` + * Add `text/markdown` + * Add `text/yaml` + +1.2.0 / 2014-11-09 +================== + + * Add `application/cea` + * Add `application/dit` + * Add `application/vnd.gov.sk.e-form+zip` + * Add `application/vnd.tmd.mediaflex.api+xml` + * Type `application/epub+zip` is now IANA-registered + +1.1.2 / 2014-10-23 +================== + + * Rebuild database for `application/x-www-form-urlencoded` change + +1.1.1 / 2014-10-20 +================== + + * Mark `application/x-www-form-urlencoded` as compressible. + +1.1.0 / 2014-09-28 +================== + + * Add `application/font-woff2` + +1.0.3 / 2014-09-25 +================== + + * Fix engine requirement in package + +1.0.2 / 2014-09-25 +================== + + * Add `application/coap-group+json` + * Add `application/dcd` + * Add `application/vnd.apache.thrift.binary` + * Add `image/vnd.tencent.tap` + * Mark all JSON-derived types as compressible + * Update `text/vtt` data + +1.0.1 / 2014-08-30 +================== + + * Fix extension ordering + +1.0.0 / 2014-08-30 +================== + + * Add `application/atf` + * Add `application/merge-patch+json` + * Add `multipart/x-mixed-replace` + * Add `source: 'apache'` metadata + * Add `source: 'iana'` metadata + * Remove badly-assumed charset data diff --git a/node_modules/mime-db/LICENSE b/node_modules/mime-db/LICENSE new file mode 100644 index 0000000..0751cb1 --- /dev/null +++ b/node_modules/mime-db/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015-2022 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-db/README.md b/node_modules/mime-db/README.md new file mode 100644 index 0000000..5a8fcfe --- /dev/null +++ b/node_modules/mime-db/README.md @@ -0,0 +1,100 @@ +# mime-db + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +This is a large database of mime types and information about them. +It consists of a single, public JSON file and does not include any logic, +allowing it to remain as un-opinionated as possible with an API. +It aggregates data from the following sources: + +- http://www.iana.org/assignments/media-types/media-types.xhtml +- http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types +- http://hg.nginx.org/nginx/raw-file/default/conf/mime.types + +## Installation + +```bash +npm install mime-db +``` + +### Database Download + +If you're crazy enough to use this in the browser, you can just grab the +JSON file using [jsDelivr](https://www.jsdelivr.com/). It is recommended to +replace `master` with [a release tag](https://github.com/jshttp/mime-db/tags) +as the JSON format may change in the future. + +``` +https://cdn.jsdelivr.net/gh/jshttp/mime-db@master/db.json +``` + +## Usage + +```js +var db = require('mime-db') + +// grab data on .js files +var data = db['application/javascript'] +``` + +## Data Structure + +The JSON file is a map lookup for lowercased mime types. +Each mime type has the following properties: + +- `.source` - where the mime type is defined. + If not set, it's probably a custom media type. + - `apache` - [Apache common media types](http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types) + - `iana` - [IANA-defined media types](http://www.iana.org/assignments/media-types/media-types.xhtml) + - `nginx` - [nginx media types](http://hg.nginx.org/nginx/raw-file/default/conf/mime.types) +- `.extensions[]` - known extensions associated with this mime type. +- `.compressible` - whether a file of this type can be gzipped. +- `.charset` - the default charset associated with this type, if any. + +If unknown, every property could be `undefined`. + +## Contributing + +To edit the database, only make PRs against `src/custom-types.json` or +`src/custom-suffix.json`. + +The `src/custom-types.json` file is a JSON object with the MIME type as the +keys and the values being an object with the following keys: + +- `compressible` - leave out if you don't know, otherwise `true`/`false` to + indicate whether the data represented by the type is typically compressible. +- `extensions` - include an array of file extensions that are associated with + the type. +- `notes` - human-readable notes about the type, typically what the type is. +- `sources` - include an array of URLs of where the MIME type and the associated + extensions are sourced from. This needs to be a [primary source](https://en.wikipedia.org/wiki/Primary_source); + links to type aggregating sites and Wikipedia are _not acceptable_. + +To update the build, run `npm run build`. + +### Adding Custom Media Types + +The best way to get new media types included in this library is to register +them with the IANA. The community registration procedure is outlined in +[RFC 6838 section 5](http://tools.ietf.org/html/rfc6838#section-5). Types +registered with the IANA are automatically pulled into this library. + +If that is not possible / feasible, they can be added directly here as a +"custom" type. To do this, it is required to have a primary source that +definitively lists the media type. If an extension is going to be listed as +associateed with this media type, the source must definitively link the +media type and extension as well. + +[ci-image]: https://badgen.net/github/checks/jshttp/mime-db/master?label=ci +[ci-url]: https://github.com/jshttp/mime-db/actions?query=workflow%3Aci +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-db/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-db?branch=master +[node-image]: https://badgen.net/npm/node/mime-db +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-db +[npm-url]: https://npmjs.org/package/mime-db +[npm-version-image]: https://badgen.net/npm/v/mime-db diff --git a/node_modules/mime-db/db.json b/node_modules/mime-db/db.json new file mode 100644 index 0000000..eb9c42c --- /dev/null +++ b/node_modules/mime-db/db.json @@ -0,0 +1,8519 @@ +{ + "application/1d-interleaved-parityfec": { + "source": "iana" + }, + "application/3gpdash-qoe-report+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/3gpp-ims+xml": { + "source": "iana", + "compressible": true + }, + "application/3gpphal+json": { + "source": "iana", + "compressible": true + }, + "application/3gpphalforms+json": { + "source": "iana", + "compressible": true + }, + "application/a2l": { + "source": "iana" + }, + "application/ace+cbor": { + "source": "iana" + }, + "application/activemessage": { + "source": "iana" + }, + "application/activity+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-directory+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcost+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcostparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointprop+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointpropparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-error+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-updatestreamcontrol+json": { + "source": "iana", + "compressible": true + }, + "application/alto-updatestreamparams+json": { + "source": "iana", + "compressible": true + }, + "application/aml": { + "source": "iana" + }, + "application/andrew-inset": { + "source": "iana", + "extensions": ["ez"] + }, + "application/applefile": { + "source": "iana" + }, + "application/applixware": { + "source": "apache", + "extensions": ["aw"] + }, + "application/at+jwt": { + "source": "iana" + }, + "application/atf": { + "source": "iana" + }, + "application/atfx": { + "source": "iana" + }, + "application/atom+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atom"] + }, + "application/atomcat+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomcat"] + }, + "application/atomdeleted+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomdeleted"] + }, + "application/atomicmail": { + "source": "iana" + }, + "application/atomsvc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomsvc"] + }, + "application/atsc-dwd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dwd"] + }, + "application/atsc-dynamic-event-message": { + "source": "iana" + }, + "application/atsc-held+xml": { + "source": "iana", + "compressible": true, + "extensions": ["held"] + }, + "application/atsc-rdt+json": { + "source": "iana", + "compressible": true + }, + "application/atsc-rsat+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rsat"] + }, + "application/atxml": { + "source": "iana" + }, + "application/auth-policy+xml": { + "source": "iana", + "compressible": true + }, + "application/bacnet-xdd+zip": { + "source": "iana", + "compressible": false + }, + "application/batch-smtp": { + "source": "iana" + }, + "application/bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/beep+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/calendar+json": { + "source": "iana", + "compressible": true + }, + "application/calendar+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xcs"] + }, + "application/call-completion": { + "source": "iana" + }, + "application/cals-1840": { + "source": "iana" + }, + "application/captive+json": { + "source": "iana", + "compressible": true + }, + "application/cbor": { + "source": "iana" + }, + "application/cbor-seq": { + "source": "iana" + }, + "application/cccex": { + "source": "iana" + }, + "application/ccmp+xml": { + "source": "iana", + "compressible": true + }, + "application/ccxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ccxml"] + }, + "application/cdfx+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cdfx"] + }, + "application/cdmi-capability": { + "source": "iana", + "extensions": ["cdmia"] + }, + "application/cdmi-container": { + "source": "iana", + "extensions": ["cdmic"] + }, + "application/cdmi-domain": { + "source": "iana", + "extensions": ["cdmid"] + }, + "application/cdmi-object": { + "source": "iana", + "extensions": ["cdmio"] + }, + "application/cdmi-queue": { + "source": "iana", + "extensions": ["cdmiq"] + }, + "application/cdni": { + "source": "iana" + }, + "application/cea": { + "source": "iana" + }, + "application/cea-2018+xml": { + "source": "iana", + "compressible": true + }, + "application/cellml+xml": { + "source": "iana", + "compressible": true + }, + "application/cfw": { + "source": "iana" + }, + "application/city+json": { + "source": "iana", + "compressible": true + }, + "application/clr": { + "source": "iana" + }, + "application/clue+xml": { + "source": "iana", + "compressible": true + }, + "application/clue_info+xml": { + "source": "iana", + "compressible": true + }, + "application/cms": { + "source": "iana" + }, + "application/cnrp+xml": { + "source": "iana", + "compressible": true + }, + "application/coap-group+json": { + "source": "iana", + "compressible": true + }, + "application/coap-payload": { + "source": "iana" + }, + "application/commonground": { + "source": "iana" + }, + "application/conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/cose": { + "source": "iana" + }, + "application/cose-key": { + "source": "iana" + }, + "application/cose-key-set": { + "source": "iana" + }, + "application/cpl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cpl"] + }, + "application/csrattrs": { + "source": "iana" + }, + "application/csta+xml": { + "source": "iana", + "compressible": true + }, + "application/cstadata+xml": { + "source": "iana", + "compressible": true + }, + "application/csvm+json": { + "source": "iana", + "compressible": true + }, + "application/cu-seeme": { + "source": "apache", + "extensions": ["cu"] + }, + "application/cwt": { + "source": "iana" + }, + "application/cybercash": { + "source": "iana" + }, + "application/dart": { + "compressible": true + }, + "application/dash+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpd"] + }, + "application/dash-patch+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpp"] + }, + "application/dashdelta": { + "source": "iana" + }, + "application/davmount+xml": { + "source": "iana", + "compressible": true, + "extensions": ["davmount"] + }, + "application/dca-rft": { + "source": "iana" + }, + "application/dcd": { + "source": "iana" + }, + "application/dec-dx": { + "source": "iana" + }, + "application/dialog-info+xml": { + "source": "iana", + "compressible": true + }, + "application/dicom": { + "source": "iana" + }, + "application/dicom+json": { + "source": "iana", + "compressible": true + }, + "application/dicom+xml": { + "source": "iana", + "compressible": true + }, + "application/dii": { + "source": "iana" + }, + "application/dit": { + "source": "iana" + }, + "application/dns": { + "source": "iana" + }, + "application/dns+json": { + "source": "iana", + "compressible": true + }, + "application/dns-message": { + "source": "iana" + }, + "application/docbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dbk"] + }, + "application/dots+cbor": { + "source": "iana" + }, + "application/dskpp+xml": { + "source": "iana", + "compressible": true + }, + "application/dssc+der": { + "source": "iana", + "extensions": ["dssc"] + }, + "application/dssc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdssc"] + }, + "application/dvcs": { + "source": "iana" + }, + "application/ecmascript": { + "source": "iana", + "compressible": true, + "extensions": ["es","ecma"] + }, + "application/edi-consent": { + "source": "iana" + }, + "application/edi-x12": { + "source": "iana", + "compressible": false + }, + "application/edifact": { + "source": "iana", + "compressible": false + }, + "application/efi": { + "source": "iana" + }, + "application/elm+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/elm+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.cap+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/emergencycalldata.comment+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.control+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.deviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.ecall.msd": { + "source": "iana" + }, + "application/emergencycalldata.providerinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.serviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.subscriberinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.veds+xml": { + "source": "iana", + "compressible": true + }, + "application/emma+xml": { + "source": "iana", + "compressible": true, + "extensions": ["emma"] + }, + "application/emotionml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["emotionml"] + }, + "application/encaprtp": { + "source": "iana" + }, + "application/epp+xml": { + "source": "iana", + "compressible": true + }, + "application/epub+zip": { + "source": "iana", + "compressible": false, + "extensions": ["epub"] + }, + "application/eshop": { + "source": "iana" + }, + "application/exi": { + "source": "iana", + "extensions": ["exi"] + }, + "application/expect-ct-report+json": { + "source": "iana", + "compressible": true + }, + "application/express": { + "source": "iana", + "extensions": ["exp"] + }, + "application/fastinfoset": { + "source": "iana" + }, + "application/fastsoap": { + "source": "iana" + }, + "application/fdt+xml": { + "source": "iana", + "compressible": true, + "extensions": ["fdt"] + }, + "application/fhir+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/fhir+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/fido.trusted-apps+json": { + "compressible": true + }, + "application/fits": { + "source": "iana" + }, + "application/flexfec": { + "source": "iana" + }, + "application/font-sfnt": { + "source": "iana" + }, + "application/font-tdpfr": { + "source": "iana", + "extensions": ["pfr"] + }, + "application/font-woff": { + "source": "iana", + "compressible": false + }, + "application/framework-attributes+xml": { + "source": "iana", + "compressible": true + }, + "application/geo+json": { + "source": "iana", + "compressible": true, + "extensions": ["geojson"] + }, + "application/geo+json-seq": { + "source": "iana" + }, + "application/geopackage+sqlite3": { + "source": "iana" + }, + "application/geoxacml+xml": { + "source": "iana", + "compressible": true + }, + "application/gltf-buffer": { + "source": "iana" + }, + "application/gml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["gml"] + }, + "application/gpx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["gpx"] + }, + "application/gxf": { + "source": "apache", + "extensions": ["gxf"] + }, + "application/gzip": { + "source": "iana", + "compressible": false, + "extensions": ["gz"] + }, + "application/h224": { + "source": "iana" + }, + "application/held+xml": { + "source": "iana", + "compressible": true + }, + "application/hjson": { + "extensions": ["hjson"] + }, + "application/http": { + "source": "iana" + }, + "application/hyperstudio": { + "source": "iana", + "extensions": ["stk"] + }, + "application/ibe-key-request+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pkg-reply+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pp-data": { + "source": "iana" + }, + "application/iges": { + "source": "iana" + }, + "application/im-iscomposing+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/index": { + "source": "iana" + }, + "application/index.cmd": { + "source": "iana" + }, + "application/index.obj": { + "source": "iana" + }, + "application/index.response": { + "source": "iana" + }, + "application/index.vnd": { + "source": "iana" + }, + "application/inkml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ink","inkml"] + }, + "application/iotp": { + "source": "iana" + }, + "application/ipfix": { + "source": "iana", + "extensions": ["ipfix"] + }, + "application/ipp": { + "source": "iana" + }, + "application/isup": { + "source": "iana" + }, + "application/its+xml": { + "source": "iana", + "compressible": true, + "extensions": ["its"] + }, + "application/java-archive": { + "source": "apache", + "compressible": false, + "extensions": ["jar","war","ear"] + }, + "application/java-serialized-object": { + "source": "apache", + "compressible": false, + "extensions": ["ser"] + }, + "application/java-vm": { + "source": "apache", + "compressible": false, + "extensions": ["class"] + }, + "application/javascript": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["js","mjs"] + }, + "application/jf2feed+json": { + "source": "iana", + "compressible": true + }, + "application/jose": { + "source": "iana" + }, + "application/jose+json": { + "source": "iana", + "compressible": true + }, + "application/jrd+json": { + "source": "iana", + "compressible": true + }, + "application/jscalendar+json": { + "source": "iana", + "compressible": true + }, + "application/json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["json","map"] + }, + "application/json-patch+json": { + "source": "iana", + "compressible": true + }, + "application/json-seq": { + "source": "iana" + }, + "application/json5": { + "extensions": ["json5"] + }, + "application/jsonml+json": { + "source": "apache", + "compressible": true, + "extensions": ["jsonml"] + }, + "application/jwk+json": { + "source": "iana", + "compressible": true + }, + "application/jwk-set+json": { + "source": "iana", + "compressible": true + }, + "application/jwt": { + "source": "iana" + }, + "application/kpml-request+xml": { + "source": "iana", + "compressible": true + }, + "application/kpml-response+xml": { + "source": "iana", + "compressible": true + }, + "application/ld+json": { + "source": "iana", + "compressible": true, + "extensions": ["jsonld"] + }, + "application/lgr+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lgr"] + }, + "application/link-format": { + "source": "iana" + }, + "application/load-control+xml": { + "source": "iana", + "compressible": true + }, + "application/lost+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lostxml"] + }, + "application/lostsync+xml": { + "source": "iana", + "compressible": true + }, + "application/lpf+zip": { + "source": "iana", + "compressible": false + }, + "application/lxf": { + "source": "iana" + }, + "application/mac-binhex40": { + "source": "iana", + "extensions": ["hqx"] + }, + "application/mac-compactpro": { + "source": "apache", + "extensions": ["cpt"] + }, + "application/macwriteii": { + "source": "iana" + }, + "application/mads+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mads"] + }, + "application/manifest+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["webmanifest"] + }, + "application/marc": { + "source": "iana", + "extensions": ["mrc"] + }, + "application/marcxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mrcx"] + }, + "application/mathematica": { + "source": "iana", + "extensions": ["ma","nb","mb"] + }, + "application/mathml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mathml"] + }, + "application/mathml-content+xml": { + "source": "iana", + "compressible": true + }, + "application/mathml-presentation+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-associated-procedure-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-deregister+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-envelope+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-protection-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-reception-report+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-schedule+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-user-service-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbox": { + "source": "iana", + "extensions": ["mbox"] + }, + "application/media-policy-dataset+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpf"] + }, + "application/media_control+xml": { + "source": "iana", + "compressible": true + }, + "application/mediaservercontrol+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mscml"] + }, + "application/merge-patch+json": { + "source": "iana", + "compressible": true + }, + "application/metalink+xml": { + "source": "apache", + "compressible": true, + "extensions": ["metalink"] + }, + "application/metalink4+xml": { + "source": "iana", + "compressible": true, + "extensions": ["meta4"] + }, + "application/mets+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mets"] + }, + "application/mf4": { + "source": "iana" + }, + "application/mikey": { + "source": "iana" + }, + "application/mipc": { + "source": "iana" + }, + "application/missing-blocks+cbor-seq": { + "source": "iana" + }, + "application/mmt-aei+xml": { + "source": "iana", + "compressible": true, + "extensions": ["maei"] + }, + "application/mmt-usd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["musd"] + }, + "application/mods+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mods"] + }, + "application/moss-keys": { + "source": "iana" + }, + "application/moss-signature": { + "source": "iana" + }, + "application/mosskey-data": { + "source": "iana" + }, + "application/mosskey-request": { + "source": "iana" + }, + "application/mp21": { + "source": "iana", + "extensions": ["m21","mp21"] + }, + "application/mp4": { + "source": "iana", + "extensions": ["mp4s","m4p"] + }, + "application/mpeg4-generic": { + "source": "iana" + }, + "application/mpeg4-iod": { + "source": "iana" + }, + "application/mpeg4-iod-xmt": { + "source": "iana" + }, + "application/mrb-consumer+xml": { + "source": "iana", + "compressible": true + }, + "application/mrb-publish+xml": { + "source": "iana", + "compressible": true + }, + "application/msc-ivr+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/msc-mixer+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/msword": { + "source": "iana", + "compressible": false, + "extensions": ["doc","dot"] + }, + "application/mud+json": { + "source": "iana", + "compressible": true + }, + "application/multipart-core": { + "source": "iana" + }, + "application/mxf": { + "source": "iana", + "extensions": ["mxf"] + }, + "application/n-quads": { + "source": "iana", + "extensions": ["nq"] + }, + "application/n-triples": { + "source": "iana", + "extensions": ["nt"] + }, + "application/nasdata": { + "source": "iana" + }, + "application/news-checkgroups": { + "source": "iana", + "charset": "US-ASCII" + }, + "application/news-groupinfo": { + "source": "iana", + "charset": "US-ASCII" + }, + "application/news-transmission": { + "source": "iana" + }, + "application/nlsml+xml": { + "source": "iana", + "compressible": true + }, + "application/node": { + "source": "iana", + "extensions": ["cjs"] + }, + "application/nss": { + "source": "iana" + }, + "application/oauth-authz-req+jwt": { + "source": "iana" + }, + "application/oblivious-dns-message": { + "source": "iana" + }, + "application/ocsp-request": { + "source": "iana" + }, + "application/ocsp-response": { + "source": "iana" + }, + "application/octet-stream": { + "source": "iana", + "compressible": false, + "extensions": ["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"] + }, + "application/oda": { + "source": "iana", + "extensions": ["oda"] + }, + "application/odm+xml": { + "source": "iana", + "compressible": true + }, + "application/odx": { + "source": "iana" + }, + "application/oebps-package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["opf"] + }, + "application/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogx"] + }, + "application/omdoc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["omdoc"] + }, + "application/onenote": { + "source": "apache", + "extensions": ["onetoc","onetoc2","onetmp","onepkg"] + }, + "application/opc-nodeset+xml": { + "source": "iana", + "compressible": true + }, + "application/oscore": { + "source": "iana" + }, + "application/oxps": { + "source": "iana", + "extensions": ["oxps"] + }, + "application/p21": { + "source": "iana" + }, + "application/p21+zip": { + "source": "iana", + "compressible": false + }, + "application/p2p-overlay+xml": { + "source": "iana", + "compressible": true, + "extensions": ["relo"] + }, + "application/parityfec": { + "source": "iana" + }, + "application/passport": { + "source": "iana" + }, + "application/patch-ops-error+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xer"] + }, + "application/pdf": { + "source": "iana", + "compressible": false, + "extensions": ["pdf"] + }, + "application/pdx": { + "source": "iana" + }, + "application/pem-certificate-chain": { + "source": "iana" + }, + "application/pgp-encrypted": { + "source": "iana", + "compressible": false, + "extensions": ["pgp"] + }, + "application/pgp-keys": { + "source": "iana", + "extensions": ["asc"] + }, + "application/pgp-signature": { + "source": "iana", + "extensions": ["asc","sig"] + }, + "application/pics-rules": { + "source": "apache", + "extensions": ["prf"] + }, + "application/pidf+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/pidf-diff+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/pkcs10": { + "source": "iana", + "extensions": ["p10"] + }, + "application/pkcs12": { + "source": "iana" + }, + "application/pkcs7-mime": { + "source": "iana", + "extensions": ["p7m","p7c"] + }, + "application/pkcs7-signature": { + "source": "iana", + "extensions": ["p7s"] + }, + "application/pkcs8": { + "source": "iana", + "extensions": ["p8"] + }, + "application/pkcs8-encrypted": { + "source": "iana" + }, + "application/pkix-attr-cert": { + "source": "iana", + "extensions": ["ac"] + }, + "application/pkix-cert": { + "source": "iana", + "extensions": ["cer"] + }, + "application/pkix-crl": { + "source": "iana", + "extensions": ["crl"] + }, + "application/pkix-pkipath": { + "source": "iana", + "extensions": ["pkipath"] + }, + "application/pkixcmp": { + "source": "iana", + "extensions": ["pki"] + }, + "application/pls+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pls"] + }, + "application/poc-settings+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/postscript": { + "source": "iana", + "compressible": true, + "extensions": ["ai","eps","ps"] + }, + "application/ppsp-tracker+json": { + "source": "iana", + "compressible": true + }, + "application/problem+json": { + "source": "iana", + "compressible": true + }, + "application/problem+xml": { + "source": "iana", + "compressible": true + }, + "application/provenance+xml": { + "source": "iana", + "compressible": true, + "extensions": ["provx"] + }, + "application/prs.alvestrand.titrax-sheet": { + "source": "iana" + }, + "application/prs.cww": { + "source": "iana", + "extensions": ["cww"] + }, + "application/prs.cyn": { + "source": "iana", + "charset": "7-BIT" + }, + "application/prs.hpub+zip": { + "source": "iana", + "compressible": false + }, + "application/prs.nprend": { + "source": "iana" + }, + "application/prs.plucker": { + "source": "iana" + }, + "application/prs.rdf-xml-crypt": { + "source": "iana" + }, + "application/prs.xsf+xml": { + "source": "iana", + "compressible": true + }, + "application/pskc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pskcxml"] + }, + "application/pvd+json": { + "source": "iana", + "compressible": true + }, + "application/qsig": { + "source": "iana" + }, + "application/raml+yaml": { + "compressible": true, + "extensions": ["raml"] + }, + "application/raptorfec": { + "source": "iana" + }, + "application/rdap+json": { + "source": "iana", + "compressible": true + }, + "application/rdf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rdf","owl"] + }, + "application/reginfo+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rif"] + }, + "application/relax-ng-compact-syntax": { + "source": "iana", + "extensions": ["rnc"] + }, + "application/remote-printing": { + "source": "iana" + }, + "application/reputon+json": { + "source": "iana", + "compressible": true + }, + "application/resource-lists+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rl"] + }, + "application/resource-lists-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rld"] + }, + "application/rfc+xml": { + "source": "iana", + "compressible": true + }, + "application/riscos": { + "source": "iana" + }, + "application/rlmi+xml": { + "source": "iana", + "compressible": true + }, + "application/rls-services+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rs"] + }, + "application/route-apd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rapd"] + }, + "application/route-s-tsid+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sls"] + }, + "application/route-usd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rusd"] + }, + "application/rpki-ghostbusters": { + "source": "iana", + "extensions": ["gbr"] + }, + "application/rpki-manifest": { + "source": "iana", + "extensions": ["mft"] + }, + "application/rpki-publication": { + "source": "iana" + }, + "application/rpki-roa": { + "source": "iana", + "extensions": ["roa"] + }, + "application/rpki-updown": { + "source": "iana" + }, + "application/rsd+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rsd"] + }, + "application/rss+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rss"] + }, + "application/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "application/rtploopback": { + "source": "iana" + }, + "application/rtx": { + "source": "iana" + }, + "application/samlassertion+xml": { + "source": "iana", + "compressible": true + }, + "application/samlmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/sarif+json": { + "source": "iana", + "compressible": true + }, + "application/sarif-external-properties+json": { + "source": "iana", + "compressible": true + }, + "application/sbe": { + "source": "iana" + }, + "application/sbml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sbml"] + }, + "application/scaip+xml": { + "source": "iana", + "compressible": true + }, + "application/scim+json": { + "source": "iana", + "compressible": true + }, + "application/scvp-cv-request": { + "source": "iana", + "extensions": ["scq"] + }, + "application/scvp-cv-response": { + "source": "iana", + "extensions": ["scs"] + }, + "application/scvp-vp-request": { + "source": "iana", + "extensions": ["spq"] + }, + "application/scvp-vp-response": { + "source": "iana", + "extensions": ["spp"] + }, + "application/sdp": { + "source": "iana", + "extensions": ["sdp"] + }, + "application/secevent+jwt": { + "source": "iana" + }, + "application/senml+cbor": { + "source": "iana" + }, + "application/senml+json": { + "source": "iana", + "compressible": true + }, + "application/senml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["senmlx"] + }, + "application/senml-etch+cbor": { + "source": "iana" + }, + "application/senml-etch+json": { + "source": "iana", + "compressible": true + }, + "application/senml-exi": { + "source": "iana" + }, + "application/sensml+cbor": { + "source": "iana" + }, + "application/sensml+json": { + "source": "iana", + "compressible": true + }, + "application/sensml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sensmlx"] + }, + "application/sensml-exi": { + "source": "iana" + }, + "application/sep+xml": { + "source": "iana", + "compressible": true + }, + "application/sep-exi": { + "source": "iana" + }, + "application/session-info": { + "source": "iana" + }, + "application/set-payment": { + "source": "iana" + }, + "application/set-payment-initiation": { + "source": "iana", + "extensions": ["setpay"] + }, + "application/set-registration": { + "source": "iana" + }, + "application/set-registration-initiation": { + "source": "iana", + "extensions": ["setreg"] + }, + "application/sgml": { + "source": "iana" + }, + "application/sgml-open-catalog": { + "source": "iana" + }, + "application/shf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["shf"] + }, + "application/sieve": { + "source": "iana", + "extensions": ["siv","sieve"] + }, + "application/simple-filter+xml": { + "source": "iana", + "compressible": true + }, + "application/simple-message-summary": { + "source": "iana" + }, + "application/simplesymbolcontainer": { + "source": "iana" + }, + "application/sipc": { + "source": "iana" + }, + "application/slate": { + "source": "iana" + }, + "application/smil": { + "source": "iana" + }, + "application/smil+xml": { + "source": "iana", + "compressible": true, + "extensions": ["smi","smil"] + }, + "application/smpte336m": { + "source": "iana" + }, + "application/soap+fastinfoset": { + "source": "iana" + }, + "application/soap+xml": { + "source": "iana", + "compressible": true + }, + "application/sparql-query": { + "source": "iana", + "extensions": ["rq"] + }, + "application/sparql-results+xml": { + "source": "iana", + "compressible": true, + "extensions": ["srx"] + }, + "application/spdx+json": { + "source": "iana", + "compressible": true + }, + "application/spirits-event+xml": { + "source": "iana", + "compressible": true + }, + "application/sql": { + "source": "iana" + }, + "application/srgs": { + "source": "iana", + "extensions": ["gram"] + }, + "application/srgs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["grxml"] + }, + "application/sru+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sru"] + }, + "application/ssdl+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ssdl"] + }, + "application/ssml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ssml"] + }, + "application/stix+json": { + "source": "iana", + "compressible": true + }, + "application/swid+xml": { + "source": "iana", + "compressible": true, + "extensions": ["swidtag"] + }, + "application/tamp-apex-update": { + "source": "iana" + }, + "application/tamp-apex-update-confirm": { + "source": "iana" + }, + "application/tamp-community-update": { + "source": "iana" + }, + "application/tamp-community-update-confirm": { + "source": "iana" + }, + "application/tamp-error": { + "source": "iana" + }, + "application/tamp-sequence-adjust": { + "source": "iana" + }, + "application/tamp-sequence-adjust-confirm": { + "source": "iana" + }, + "application/tamp-status-query": { + "source": "iana" + }, + "application/tamp-status-response": { + "source": "iana" + }, + "application/tamp-update": { + "source": "iana" + }, + "application/tamp-update-confirm": { + "source": "iana" + }, + "application/tar": { + "compressible": true + }, + "application/taxii+json": { + "source": "iana", + "compressible": true + }, + "application/td+json": { + "source": "iana", + "compressible": true + }, + "application/tei+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tei","teicorpus"] + }, + "application/tetra_isi": { + "source": "iana" + }, + "application/thraud+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tfi"] + }, + "application/timestamp-query": { + "source": "iana" + }, + "application/timestamp-reply": { + "source": "iana" + }, + "application/timestamped-data": { + "source": "iana", + "extensions": ["tsd"] + }, + "application/tlsrpt+gzip": { + "source": "iana" + }, + "application/tlsrpt+json": { + "source": "iana", + "compressible": true + }, + "application/tnauthlist": { + "source": "iana" + }, + "application/token-introspection+jwt": { + "source": "iana" + }, + "application/toml": { + "compressible": true, + "extensions": ["toml"] + }, + "application/trickle-ice-sdpfrag": { + "source": "iana" + }, + "application/trig": { + "source": "iana", + "extensions": ["trig"] + }, + "application/ttml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ttml"] + }, + "application/tve-trigger": { + "source": "iana" + }, + "application/tzif": { + "source": "iana" + }, + "application/tzif-leap": { + "source": "iana" + }, + "application/ubjson": { + "compressible": false, + "extensions": ["ubj"] + }, + "application/ulpfec": { + "source": "iana" + }, + "application/urc-grpsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/urc-ressheet+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rsheet"] + }, + "application/urc-targetdesc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["td"] + }, + "application/urc-uisocketdesc+xml": { + "source": "iana", + "compressible": true + }, + "application/vcard+json": { + "source": "iana", + "compressible": true + }, + "application/vcard+xml": { + "source": "iana", + "compressible": true + }, + "application/vemmi": { + "source": "iana" + }, + "application/vividence.scriptfile": { + "source": "apache" + }, + "application/vnd.1000minds.decision-model+xml": { + "source": "iana", + "compressible": true, + "extensions": ["1km"] + }, + "application/vnd.3gpp-prose+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-prose-pc3ch+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-v2x-local-service-information": { + "source": "iana" + }, + "application/vnd.3gpp.5gnas": { + "source": "iana" + }, + "application/vnd.3gpp.access-transfer-events+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.bsf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.gmop+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.gtpc": { + "source": "iana" + }, + "application/vnd.3gpp.interworking-data": { + "source": "iana" + }, + "application/vnd.3gpp.lpp": { + "source": "iana" + }, + "application/vnd.3gpp.mc-signalling-ear": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-payload": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-signalling": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-floor-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-signed+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-init-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-transmission-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mid-call+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.ngap": { + "source": "iana" + }, + "application/vnd.3gpp.pfcp": { + "source": "iana" + }, + "application/vnd.3gpp.pic-bw-large": { + "source": "iana", + "extensions": ["plb"] + }, + "application/vnd.3gpp.pic-bw-small": { + "source": "iana", + "extensions": ["psb"] + }, + "application/vnd.3gpp.pic-bw-var": { + "source": "iana", + "extensions": ["pvb"] + }, + "application/vnd.3gpp.s1ap": { + "source": "iana" + }, + "application/vnd.3gpp.sms": { + "source": "iana" + }, + "application/vnd.3gpp.sms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-ext+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.state-and-event-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.ussd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.bcmcsinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.sms": { + "source": "iana" + }, + "application/vnd.3gpp2.tcap": { + "source": "iana", + "extensions": ["tcap"] + }, + "application/vnd.3lightssoftware.imagescal": { + "source": "iana" + }, + "application/vnd.3m.post-it-notes": { + "source": "iana", + "extensions": ["pwn"] + }, + "application/vnd.accpac.simply.aso": { + "source": "iana", + "extensions": ["aso"] + }, + "application/vnd.accpac.simply.imp": { + "source": "iana", + "extensions": ["imp"] + }, + "application/vnd.acucobol": { + "source": "iana", + "extensions": ["acu"] + }, + "application/vnd.acucorp": { + "source": "iana", + "extensions": ["atc","acutc"] + }, + "application/vnd.adobe.air-application-installer-package+zip": { + "source": "apache", + "compressible": false, + "extensions": ["air"] + }, + "application/vnd.adobe.flash.movie": { + "source": "iana" + }, + "application/vnd.adobe.formscentral.fcdt": { + "source": "iana", + "extensions": ["fcdt"] + }, + "application/vnd.adobe.fxp": { + "source": "iana", + "extensions": ["fxp","fxpl"] + }, + "application/vnd.adobe.partial-upload": { + "source": "iana" + }, + "application/vnd.adobe.xdp+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdp"] + }, + "application/vnd.adobe.xfdf": { + "source": "iana", + "extensions": ["xfdf"] + }, + "application/vnd.aether.imp": { + "source": "iana" + }, + "application/vnd.afpc.afplinedata": { + "source": "iana" + }, + "application/vnd.afpc.afplinedata-pagedef": { + "source": "iana" + }, + "application/vnd.afpc.cmoca-cmresource": { + "source": "iana" + }, + "application/vnd.afpc.foca-charset": { + "source": "iana" + }, + "application/vnd.afpc.foca-codedfont": { + "source": "iana" + }, + "application/vnd.afpc.foca-codepage": { + "source": "iana" + }, + "application/vnd.afpc.modca": { + "source": "iana" + }, + "application/vnd.afpc.modca-cmtable": { + "source": "iana" + }, + "application/vnd.afpc.modca-formdef": { + "source": "iana" + }, + "application/vnd.afpc.modca-mediummap": { + "source": "iana" + }, + "application/vnd.afpc.modca-objectcontainer": { + "source": "iana" + }, + "application/vnd.afpc.modca-overlay": { + "source": "iana" + }, + "application/vnd.afpc.modca-pagesegment": { + "source": "iana" + }, + "application/vnd.age": { + "source": "iana", + "extensions": ["age"] + }, + "application/vnd.ah-barcode": { + "source": "iana" + }, + "application/vnd.ahead.space": { + "source": "iana", + "extensions": ["ahead"] + }, + "application/vnd.airzip.filesecure.azf": { + "source": "iana", + "extensions": ["azf"] + }, + "application/vnd.airzip.filesecure.azs": { + "source": "iana", + "extensions": ["azs"] + }, + "application/vnd.amadeus+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.amazon.ebook": { + "source": "apache", + "extensions": ["azw"] + }, + "application/vnd.amazon.mobi8-ebook": { + "source": "iana" + }, + "application/vnd.americandynamics.acc": { + "source": "iana", + "extensions": ["acc"] + }, + "application/vnd.amiga.ami": { + "source": "iana", + "extensions": ["ami"] + }, + "application/vnd.amundsen.maze+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.android.ota": { + "source": "iana" + }, + "application/vnd.android.package-archive": { + "source": "apache", + "compressible": false, + "extensions": ["apk"] + }, + "application/vnd.anki": { + "source": "iana" + }, + "application/vnd.anser-web-certificate-issue-initiation": { + "source": "iana", + "extensions": ["cii"] + }, + "application/vnd.anser-web-funds-transfer-initiation": { + "source": "apache", + "extensions": ["fti"] + }, + "application/vnd.antix.game-component": { + "source": "iana", + "extensions": ["atx"] + }, + "application/vnd.apache.arrow.file": { + "source": "iana" + }, + "application/vnd.apache.arrow.stream": { + "source": "iana" + }, + "application/vnd.apache.thrift.binary": { + "source": "iana" + }, + "application/vnd.apache.thrift.compact": { + "source": "iana" + }, + "application/vnd.apache.thrift.json": { + "source": "iana" + }, + "application/vnd.api+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.aplextor.warrp+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apothekende.reservation+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apple.installer+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpkg"] + }, + "application/vnd.apple.keynote": { + "source": "iana", + "extensions": ["key"] + }, + "application/vnd.apple.mpegurl": { + "source": "iana", + "extensions": ["m3u8"] + }, + "application/vnd.apple.numbers": { + "source": "iana", + "extensions": ["numbers"] + }, + "application/vnd.apple.pages": { + "source": "iana", + "extensions": ["pages"] + }, + "application/vnd.apple.pkpass": { + "compressible": false, + "extensions": ["pkpass"] + }, + "application/vnd.arastra.swi": { + "source": "iana" + }, + "application/vnd.aristanetworks.swi": { + "source": "iana", + "extensions": ["swi"] + }, + "application/vnd.artisan+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.artsquare": { + "source": "iana" + }, + "application/vnd.astraea-software.iota": { + "source": "iana", + "extensions": ["iota"] + }, + "application/vnd.audiograph": { + "source": "iana", + "extensions": ["aep"] + }, + "application/vnd.autopackage": { + "source": "iana" + }, + "application/vnd.avalon+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.avistar+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.balsamiq.bmml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["bmml"] + }, + "application/vnd.balsamiq.bmpr": { + "source": "iana" + }, + "application/vnd.banana-accounting": { + "source": "iana" + }, + "application/vnd.bbf.usp.error": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bekitzur-stech+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bint.med-content": { + "source": "iana" + }, + "application/vnd.biopax.rdf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.blink-idb-value-wrapper": { + "source": "iana" + }, + "application/vnd.blueice.multipass": { + "source": "iana", + "extensions": ["mpm"] + }, + "application/vnd.bluetooth.ep.oob": { + "source": "iana" + }, + "application/vnd.bluetooth.le.oob": { + "source": "iana" + }, + "application/vnd.bmi": { + "source": "iana", + "extensions": ["bmi"] + }, + "application/vnd.bpf": { + "source": "iana" + }, + "application/vnd.bpf3": { + "source": "iana" + }, + "application/vnd.businessobjects": { + "source": "iana", + "extensions": ["rep"] + }, + "application/vnd.byu.uapi+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cab-jscript": { + "source": "iana" + }, + "application/vnd.canon-cpdl": { + "source": "iana" + }, + "application/vnd.canon-lips": { + "source": "iana" + }, + "application/vnd.capasystems-pg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cendio.thinlinc.clientconf": { + "source": "iana" + }, + "application/vnd.century-systems.tcp_stream": { + "source": "iana" + }, + "application/vnd.chemdraw+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cdxml"] + }, + "application/vnd.chess-pgn": { + "source": "iana" + }, + "application/vnd.chipnuts.karaoke-mmd": { + "source": "iana", + "extensions": ["mmd"] + }, + "application/vnd.ciedi": { + "source": "iana" + }, + "application/vnd.cinderella": { + "source": "iana", + "extensions": ["cdy"] + }, + "application/vnd.cirpack.isdn-ext": { + "source": "iana" + }, + "application/vnd.citationstyles.style+xml": { + "source": "iana", + "compressible": true, + "extensions": ["csl"] + }, + "application/vnd.claymore": { + "source": "iana", + "extensions": ["cla"] + }, + "application/vnd.cloanto.rp9": { + "source": "iana", + "extensions": ["rp9"] + }, + "application/vnd.clonk.c4group": { + "source": "iana", + "extensions": ["c4g","c4d","c4f","c4p","c4u"] + }, + "application/vnd.cluetrust.cartomobile-config": { + "source": "iana", + "extensions": ["c11amc"] + }, + "application/vnd.cluetrust.cartomobile-config-pkg": { + "source": "iana", + "extensions": ["c11amz"] + }, + "application/vnd.coffeescript": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet-template": { + "source": "iana" + }, + "application/vnd.collection+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.doc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.next+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.comicbook+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.comicbook-rar": { + "source": "iana" + }, + "application/vnd.commerce-battelle": { + "source": "iana" + }, + "application/vnd.commonspace": { + "source": "iana", + "extensions": ["csp"] + }, + "application/vnd.contact.cmsg": { + "source": "iana", + "extensions": ["cdbcmsg"] + }, + "application/vnd.coreos.ignition+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cosmocaller": { + "source": "iana", + "extensions": ["cmc"] + }, + "application/vnd.crick.clicker": { + "source": "iana", + "extensions": ["clkx"] + }, + "application/vnd.crick.clicker.keyboard": { + "source": "iana", + "extensions": ["clkk"] + }, + "application/vnd.crick.clicker.palette": { + "source": "iana", + "extensions": ["clkp"] + }, + "application/vnd.crick.clicker.template": { + "source": "iana", + "extensions": ["clkt"] + }, + "application/vnd.crick.clicker.wordbank": { + "source": "iana", + "extensions": ["clkw"] + }, + "application/vnd.criticaltools.wbs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wbs"] + }, + "application/vnd.cryptii.pipe+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.crypto-shade-file": { + "source": "iana" + }, + "application/vnd.cryptomator.encrypted": { + "source": "iana" + }, + "application/vnd.cryptomator.vault": { + "source": "iana" + }, + "application/vnd.ctc-posml": { + "source": "iana", + "extensions": ["pml"] + }, + "application/vnd.ctct.ws+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cups-pdf": { + "source": "iana" + }, + "application/vnd.cups-postscript": { + "source": "iana" + }, + "application/vnd.cups-ppd": { + "source": "iana", + "extensions": ["ppd"] + }, + "application/vnd.cups-raster": { + "source": "iana" + }, + "application/vnd.cups-raw": { + "source": "iana" + }, + "application/vnd.curl": { + "source": "iana" + }, + "application/vnd.curl.car": { + "source": "apache", + "extensions": ["car"] + }, + "application/vnd.curl.pcurl": { + "source": "apache", + "extensions": ["pcurl"] + }, + "application/vnd.cyan.dean.root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cybank": { + "source": "iana" + }, + "application/vnd.cyclonedx+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cyclonedx+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.d2l.coursepackage1p0+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.d3m-dataset": { + "source": "iana" + }, + "application/vnd.d3m-problem": { + "source": "iana" + }, + "application/vnd.dart": { + "source": "iana", + "compressible": true, + "extensions": ["dart"] + }, + "application/vnd.data-vision.rdz": { + "source": "iana", + "extensions": ["rdz"] + }, + "application/vnd.datapackage+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dataresource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dbf": { + "source": "iana", + "extensions": ["dbf"] + }, + "application/vnd.debian.binary-package": { + "source": "iana" + }, + "application/vnd.dece.data": { + "source": "iana", + "extensions": ["uvf","uvvf","uvd","uvvd"] + }, + "application/vnd.dece.ttml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uvt","uvvt"] + }, + "application/vnd.dece.unspecified": { + "source": "iana", + "extensions": ["uvx","uvvx"] + }, + "application/vnd.dece.zip": { + "source": "iana", + "extensions": ["uvz","uvvz"] + }, + "application/vnd.denovo.fcselayout-link": { + "source": "iana", + "extensions": ["fe_launch"] + }, + "application/vnd.desmume.movie": { + "source": "iana" + }, + "application/vnd.dir-bi.plate-dl-nosuffix": { + "source": "iana" + }, + "application/vnd.dm.delegation+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dna": { + "source": "iana", + "extensions": ["dna"] + }, + "application/vnd.document+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dolby.mlp": { + "source": "apache", + "extensions": ["mlp"] + }, + "application/vnd.dolby.mobile.1": { + "source": "iana" + }, + "application/vnd.dolby.mobile.2": { + "source": "iana" + }, + "application/vnd.doremir.scorecloud-binary-document": { + "source": "iana" + }, + "application/vnd.dpgraph": { + "source": "iana", + "extensions": ["dpg"] + }, + "application/vnd.dreamfactory": { + "source": "iana", + "extensions": ["dfac"] + }, + "application/vnd.drive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ds-keypoint": { + "source": "apache", + "extensions": ["kpxx"] + }, + "application/vnd.dtg.local": { + "source": "iana" + }, + "application/vnd.dtg.local.flash": { + "source": "iana" + }, + "application/vnd.dtg.local.html": { + "source": "iana" + }, + "application/vnd.dvb.ait": { + "source": "iana", + "extensions": ["ait"] + }, + "application/vnd.dvb.dvbisl+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.dvbj": { + "source": "iana" + }, + "application/vnd.dvb.esgcontainer": { + "source": "iana" + }, + "application/vnd.dvb.ipdcdftnotifaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess2": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgpdd": { + "source": "iana" + }, + "application/vnd.dvb.ipdcroaming": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-base": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-enhancement": { + "source": "iana" + }, + "application/vnd.dvb.notif-aggregate-root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-container+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-generic+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-msglist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-response+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-init+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.pfr": { + "source": "iana" + }, + "application/vnd.dvb.service": { + "source": "iana", + "extensions": ["svc"] + }, + "application/vnd.dxr": { + "source": "iana" + }, + "application/vnd.dynageo": { + "source": "iana", + "extensions": ["geo"] + }, + "application/vnd.dzr": { + "source": "iana" + }, + "application/vnd.easykaraoke.cdgdownload": { + "source": "iana" + }, + "application/vnd.ecdis-update": { + "source": "iana" + }, + "application/vnd.ecip.rlp": { + "source": "iana" + }, + "application/vnd.eclipse.ditto+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ecowin.chart": { + "source": "iana", + "extensions": ["mag"] + }, + "application/vnd.ecowin.filerequest": { + "source": "iana" + }, + "application/vnd.ecowin.fileupdate": { + "source": "iana" + }, + "application/vnd.ecowin.series": { + "source": "iana" + }, + "application/vnd.ecowin.seriesrequest": { + "source": "iana" + }, + "application/vnd.ecowin.seriesupdate": { + "source": "iana" + }, + "application/vnd.efi.img": { + "source": "iana" + }, + "application/vnd.efi.iso": { + "source": "iana" + }, + "application/vnd.emclient.accessrequest+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.enliven": { + "source": "iana", + "extensions": ["nml"] + }, + "application/vnd.enphase.envoy": { + "source": "iana" + }, + "application/vnd.eprints.data+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.epson.esf": { + "source": "iana", + "extensions": ["esf"] + }, + "application/vnd.epson.msf": { + "source": "iana", + "extensions": ["msf"] + }, + "application/vnd.epson.quickanime": { + "source": "iana", + "extensions": ["qam"] + }, + "application/vnd.epson.salt": { + "source": "iana", + "extensions": ["slt"] + }, + "application/vnd.epson.ssf": { + "source": "iana", + "extensions": ["ssf"] + }, + "application/vnd.ericsson.quickcall": { + "source": "iana" + }, + "application/vnd.espass-espass+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.eszigno3+xml": { + "source": "iana", + "compressible": true, + "extensions": ["es3","et3"] + }, + "application/vnd.etsi.aoc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.asic-e+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.asic-s+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.cug+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvcommand+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-bc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-cod+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-npvr+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvservice+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsync+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mcid+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mheg5": { + "source": "iana" + }, + "application/vnd.etsi.overload-control-policy-dataset+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.pstn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.sci+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.simservs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.timestamp-token": { + "source": "iana" + }, + "application/vnd.etsi.tsl+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.tsl.der": { + "source": "iana" + }, + "application/vnd.eu.kasparian.car+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.eudora.data": { + "source": "iana" + }, + "application/vnd.evolv.ecig.profile": { + "source": "iana" + }, + "application/vnd.evolv.ecig.settings": { + "source": "iana" + }, + "application/vnd.evolv.ecig.theme": { + "source": "iana" + }, + "application/vnd.exstream-empower+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.exstream-package": { + "source": "iana" + }, + "application/vnd.ezpix-album": { + "source": "iana", + "extensions": ["ez2"] + }, + "application/vnd.ezpix-package": { + "source": "iana", + "extensions": ["ez3"] + }, + "application/vnd.f-secure.mobile": { + "source": "iana" + }, + "application/vnd.familysearch.gedcom+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.fastcopy-disk-image": { + "source": "iana" + }, + "application/vnd.fdf": { + "source": "iana", + "extensions": ["fdf"] + }, + "application/vnd.fdsn.mseed": { + "source": "iana", + "extensions": ["mseed"] + }, + "application/vnd.fdsn.seed": { + "source": "iana", + "extensions": ["seed","dataless"] + }, + "application/vnd.ffsns": { + "source": "iana" + }, + "application/vnd.ficlab.flb+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.filmit.zfc": { + "source": "iana" + }, + "application/vnd.fints": { + "source": "iana" + }, + "application/vnd.firemonkeys.cloudcell": { + "source": "iana" + }, + "application/vnd.flographit": { + "source": "iana", + "extensions": ["gph"] + }, + "application/vnd.fluxtime.clip": { + "source": "iana", + "extensions": ["ftc"] + }, + "application/vnd.font-fontforge-sfd": { + "source": "iana" + }, + "application/vnd.framemaker": { + "source": "iana", + "extensions": ["fm","frame","maker","book"] + }, + "application/vnd.frogans.fnc": { + "source": "iana", + "extensions": ["fnc"] + }, + "application/vnd.frogans.ltf": { + "source": "iana", + "extensions": ["ltf"] + }, + "application/vnd.fsc.weblaunch": { + "source": "iana", + "extensions": ["fsc"] + }, + "application/vnd.fujifilm.fb.docuworks": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.binder": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.jfi+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.fujitsu.oasys": { + "source": "iana", + "extensions": ["oas"] + }, + "application/vnd.fujitsu.oasys2": { + "source": "iana", + "extensions": ["oa2"] + }, + "application/vnd.fujitsu.oasys3": { + "source": "iana", + "extensions": ["oa3"] + }, + "application/vnd.fujitsu.oasysgp": { + "source": "iana", + "extensions": ["fg5"] + }, + "application/vnd.fujitsu.oasysprs": { + "source": "iana", + "extensions": ["bh2"] + }, + "application/vnd.fujixerox.art-ex": { + "source": "iana" + }, + "application/vnd.fujixerox.art4": { + "source": "iana" + }, + "application/vnd.fujixerox.ddd": { + "source": "iana", + "extensions": ["ddd"] + }, + "application/vnd.fujixerox.docuworks": { + "source": "iana", + "extensions": ["xdw"] + }, + "application/vnd.fujixerox.docuworks.binder": { + "source": "iana", + "extensions": ["xbd"] + }, + "application/vnd.fujixerox.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujixerox.hbpl": { + "source": "iana" + }, + "application/vnd.fut-misnet": { + "source": "iana" + }, + "application/vnd.futoin+cbor": { + "source": "iana" + }, + "application/vnd.futoin+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.fuzzysheet": { + "source": "iana", + "extensions": ["fzs"] + }, + "application/vnd.genomatix.tuxedo": { + "source": "iana", + "extensions": ["txd"] + }, + "application/vnd.gentics.grd+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geo+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geocube+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.geogebra.file": { + "source": "iana", + "extensions": ["ggb"] + }, + "application/vnd.geogebra.slides": { + "source": "iana" + }, + "application/vnd.geogebra.tool": { + "source": "iana", + "extensions": ["ggt"] + }, + "application/vnd.geometry-explorer": { + "source": "iana", + "extensions": ["gex","gre"] + }, + "application/vnd.geonext": { + "source": "iana", + "extensions": ["gxt"] + }, + "application/vnd.geoplan": { + "source": "iana", + "extensions": ["g2w"] + }, + "application/vnd.geospace": { + "source": "iana", + "extensions": ["g3w"] + }, + "application/vnd.gerber": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt-response": { + "source": "iana" + }, + "application/vnd.gmx": { + "source": "iana", + "extensions": ["gmx"] + }, + "application/vnd.google-apps.document": { + "compressible": false, + "extensions": ["gdoc"] + }, + "application/vnd.google-apps.presentation": { + "compressible": false, + "extensions": ["gslides"] + }, + "application/vnd.google-apps.spreadsheet": { + "compressible": false, + "extensions": ["gsheet"] + }, + "application/vnd.google-earth.kml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["kml"] + }, + "application/vnd.google-earth.kmz": { + "source": "iana", + "compressible": false, + "extensions": ["kmz"] + }, + "application/vnd.gov.sk.e-form+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.gov.sk.e-form+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.gov.sk.xmldatacontainer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.grafeq": { + "source": "iana", + "extensions": ["gqf","gqs"] + }, + "application/vnd.gridmp": { + "source": "iana" + }, + "application/vnd.groove-account": { + "source": "iana", + "extensions": ["gac"] + }, + "application/vnd.groove-help": { + "source": "iana", + "extensions": ["ghf"] + }, + "application/vnd.groove-identity-message": { + "source": "iana", + "extensions": ["gim"] + }, + "application/vnd.groove-injector": { + "source": "iana", + "extensions": ["grv"] + }, + "application/vnd.groove-tool-message": { + "source": "iana", + "extensions": ["gtm"] + }, + "application/vnd.groove-tool-template": { + "source": "iana", + "extensions": ["tpl"] + }, + "application/vnd.groove-vcard": { + "source": "iana", + "extensions": ["vcg"] + }, + "application/vnd.hal+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hal+xml": { + "source": "iana", + "compressible": true, + "extensions": ["hal"] + }, + "application/vnd.handheld-entertainment+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zmm"] + }, + "application/vnd.hbci": { + "source": "iana", + "extensions": ["hbci"] + }, + "application/vnd.hc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hcl-bireports": { + "source": "iana" + }, + "application/vnd.hdt": { + "source": "iana" + }, + "application/vnd.heroku+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hhe.lesson-player": { + "source": "iana", + "extensions": ["les"] + }, + "application/vnd.hl7cda+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.hl7v2+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.hp-hpgl": { + "source": "iana", + "extensions": ["hpgl"] + }, + "application/vnd.hp-hpid": { + "source": "iana", + "extensions": ["hpid"] + }, + "application/vnd.hp-hps": { + "source": "iana", + "extensions": ["hps"] + }, + "application/vnd.hp-jlyt": { + "source": "iana", + "extensions": ["jlt"] + }, + "application/vnd.hp-pcl": { + "source": "iana", + "extensions": ["pcl"] + }, + "application/vnd.hp-pclxl": { + "source": "iana", + "extensions": ["pclxl"] + }, + "application/vnd.httphone": { + "source": "iana" + }, + "application/vnd.hydrostatix.sof-data": { + "source": "iana", + "extensions": ["sfd-hdstx"] + }, + "application/vnd.hyper+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyper-item+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyperdrive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hzn-3d-crossword": { + "source": "iana" + }, + "application/vnd.ibm.afplinedata": { + "source": "iana" + }, + "application/vnd.ibm.electronic-media": { + "source": "iana" + }, + "application/vnd.ibm.minipay": { + "source": "iana", + "extensions": ["mpy"] + }, + "application/vnd.ibm.modcap": { + "source": "iana", + "extensions": ["afp","listafp","list3820"] + }, + "application/vnd.ibm.rights-management": { + "source": "iana", + "extensions": ["irm"] + }, + "application/vnd.ibm.secure-container": { + "source": "iana", + "extensions": ["sc"] + }, + "application/vnd.iccprofile": { + "source": "iana", + "extensions": ["icc","icm"] + }, + "application/vnd.ieee.1905": { + "source": "iana" + }, + "application/vnd.igloader": { + "source": "iana", + "extensions": ["igl"] + }, + "application/vnd.imagemeter.folder+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.imagemeter.image+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.immervision-ivp": { + "source": "iana", + "extensions": ["ivp"] + }, + "application/vnd.immervision-ivu": { + "source": "iana", + "extensions": ["ivu"] + }, + "application/vnd.ims.imsccv1p1": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p2": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p3": { + "source": "iana" + }, + "application/vnd.ims.lis.v2.result+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolconsumerprofile+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy.id+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings.simple+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.informedcontrol.rms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.informix-visionary": { + "source": "iana" + }, + "application/vnd.infotech.project": { + "source": "iana" + }, + "application/vnd.infotech.project+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.innopath.wamp.notification": { + "source": "iana" + }, + "application/vnd.insors.igm": { + "source": "iana", + "extensions": ["igm"] + }, + "application/vnd.intercon.formnet": { + "source": "iana", + "extensions": ["xpw","xpx"] + }, + "application/vnd.intergeo": { + "source": "iana", + "extensions": ["i2g"] + }, + "application/vnd.intertrust.digibox": { + "source": "iana" + }, + "application/vnd.intertrust.nncp": { + "source": "iana" + }, + "application/vnd.intu.qbo": { + "source": "iana", + "extensions": ["qbo"] + }, + "application/vnd.intu.qfx": { + "source": "iana", + "extensions": ["qfx"] + }, + "application/vnd.iptc.g2.catalogitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.conceptitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.knowledgeitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.packageitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.planningitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ipunplugged.rcprofile": { + "source": "iana", + "extensions": ["rcprofile"] + }, + "application/vnd.irepository.package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["irp"] + }, + "application/vnd.is-xpr": { + "source": "iana", + "extensions": ["xpr"] + }, + "application/vnd.isac.fcs": { + "source": "iana", + "extensions": ["fcs"] + }, + "application/vnd.iso11783-10+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.jam": { + "source": "iana", + "extensions": ["jam"] + }, + "application/vnd.japannet-directory-service": { + "source": "iana" + }, + "application/vnd.japannet-jpnstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-payment-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-registration": { + "source": "iana" + }, + "application/vnd.japannet-registration-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-setstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-verification": { + "source": "iana" + }, + "application/vnd.japannet-verification-wakeup": { + "source": "iana" + }, + "application/vnd.jcp.javame.midlet-rms": { + "source": "iana", + "extensions": ["rms"] + }, + "application/vnd.jisp": { + "source": "iana", + "extensions": ["jisp"] + }, + "application/vnd.joost.joda-archive": { + "source": "iana", + "extensions": ["joda"] + }, + "application/vnd.jsk.isdn-ngn": { + "source": "iana" + }, + "application/vnd.kahootz": { + "source": "iana", + "extensions": ["ktz","ktr"] + }, + "application/vnd.kde.karbon": { + "source": "iana", + "extensions": ["karbon"] + }, + "application/vnd.kde.kchart": { + "source": "iana", + "extensions": ["chrt"] + }, + "application/vnd.kde.kformula": { + "source": "iana", + "extensions": ["kfo"] + }, + "application/vnd.kde.kivio": { + "source": "iana", + "extensions": ["flw"] + }, + "application/vnd.kde.kontour": { + "source": "iana", + "extensions": ["kon"] + }, + "application/vnd.kde.kpresenter": { + "source": "iana", + "extensions": ["kpr","kpt"] + }, + "application/vnd.kde.kspread": { + "source": "iana", + "extensions": ["ksp"] + }, + "application/vnd.kde.kword": { + "source": "iana", + "extensions": ["kwd","kwt"] + }, + "application/vnd.kenameaapp": { + "source": "iana", + "extensions": ["htke"] + }, + "application/vnd.kidspiration": { + "source": "iana", + "extensions": ["kia"] + }, + "application/vnd.kinar": { + "source": "iana", + "extensions": ["kne","knp"] + }, + "application/vnd.koan": { + "source": "iana", + "extensions": ["skp","skd","skt","skm"] + }, + "application/vnd.kodak-descriptor": { + "source": "iana", + "extensions": ["sse"] + }, + "application/vnd.las": { + "source": "iana" + }, + "application/vnd.las.las+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.las.las+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lasxml"] + }, + "application/vnd.laszip": { + "source": "iana" + }, + "application/vnd.leap+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.liberty-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.llamagraphics.life-balance.desktop": { + "source": "iana", + "extensions": ["lbd"] + }, + "application/vnd.llamagraphics.life-balance.exchange+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lbe"] + }, + "application/vnd.logipipe.circuit+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.loom": { + "source": "iana" + }, + "application/vnd.lotus-1-2-3": { + "source": "iana", + "extensions": ["123"] + }, + "application/vnd.lotus-approach": { + "source": "iana", + "extensions": ["apr"] + }, + "application/vnd.lotus-freelance": { + "source": "iana", + "extensions": ["pre"] + }, + "application/vnd.lotus-notes": { + "source": "iana", + "extensions": ["nsf"] + }, + "application/vnd.lotus-organizer": { + "source": "iana", + "extensions": ["org"] + }, + "application/vnd.lotus-screencam": { + "source": "iana", + "extensions": ["scm"] + }, + "application/vnd.lotus-wordpro": { + "source": "iana", + "extensions": ["lwp"] + }, + "application/vnd.macports.portpkg": { + "source": "iana", + "extensions": ["portpkg"] + }, + "application/vnd.mapbox-vector-tile": { + "source": "iana", + "extensions": ["mvt"] + }, + "application/vnd.marlin.drm.actiontoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.conftoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.license+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.mdcf": { + "source": "iana" + }, + "application/vnd.mason+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.maxar.archive.3tz+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.maxmind.maxmind-db": { + "source": "iana" + }, + "application/vnd.mcd": { + "source": "iana", + "extensions": ["mcd"] + }, + "application/vnd.medcalcdata": { + "source": "iana", + "extensions": ["mc1"] + }, + "application/vnd.mediastation.cdkey": { + "source": "iana", + "extensions": ["cdkey"] + }, + "application/vnd.meridian-slingshot": { + "source": "iana" + }, + "application/vnd.mfer": { + "source": "iana", + "extensions": ["mwf"] + }, + "application/vnd.mfmp": { + "source": "iana", + "extensions": ["mfm"] + }, + "application/vnd.micro+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.micrografx.flo": { + "source": "iana", + "extensions": ["flo"] + }, + "application/vnd.micrografx.igx": { + "source": "iana", + "extensions": ["igx"] + }, + "application/vnd.microsoft.portable-executable": { + "source": "iana" + }, + "application/vnd.microsoft.windows.thumbnail-cache": { + "source": "iana" + }, + "application/vnd.miele+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.mif": { + "source": "iana", + "extensions": ["mif"] + }, + "application/vnd.minisoft-hp3000-save": { + "source": "iana" + }, + "application/vnd.mitsubishi.misty-guard.trustweb": { + "source": "iana" + }, + "application/vnd.mobius.daf": { + "source": "iana", + "extensions": ["daf"] + }, + "application/vnd.mobius.dis": { + "source": "iana", + "extensions": ["dis"] + }, + "application/vnd.mobius.mbk": { + "source": "iana", + "extensions": ["mbk"] + }, + "application/vnd.mobius.mqy": { + "source": "iana", + "extensions": ["mqy"] + }, + "application/vnd.mobius.msl": { + "source": "iana", + "extensions": ["msl"] + }, + "application/vnd.mobius.plc": { + "source": "iana", + "extensions": ["plc"] + }, + "application/vnd.mobius.txf": { + "source": "iana", + "extensions": ["txf"] + }, + "application/vnd.mophun.application": { + "source": "iana", + "extensions": ["mpn"] + }, + "application/vnd.mophun.certificate": { + "source": "iana", + "extensions": ["mpc"] + }, + "application/vnd.motorola.flexsuite": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.adsi": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.fis": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.gotap": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.kmr": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.ttc": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.wem": { + "source": "iana" + }, + "application/vnd.motorola.iprm": { + "source": "iana" + }, + "application/vnd.mozilla.xul+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xul"] + }, + "application/vnd.ms-3mfdocument": { + "source": "iana" + }, + "application/vnd.ms-artgalry": { + "source": "iana", + "extensions": ["cil"] + }, + "application/vnd.ms-asf": { + "source": "iana" + }, + "application/vnd.ms-cab-compressed": { + "source": "iana", + "extensions": ["cab"] + }, + "application/vnd.ms-color.iccprofile": { + "source": "apache" + }, + "application/vnd.ms-excel": { + "source": "iana", + "compressible": false, + "extensions": ["xls","xlm","xla","xlc","xlt","xlw"] + }, + "application/vnd.ms-excel.addin.macroenabled.12": { + "source": "iana", + "extensions": ["xlam"] + }, + "application/vnd.ms-excel.sheet.binary.macroenabled.12": { + "source": "iana", + "extensions": ["xlsb"] + }, + "application/vnd.ms-excel.sheet.macroenabled.12": { + "source": "iana", + "extensions": ["xlsm"] + }, + "application/vnd.ms-excel.template.macroenabled.12": { + "source": "iana", + "extensions": ["xltm"] + }, + "application/vnd.ms-fontobject": { + "source": "iana", + "compressible": true, + "extensions": ["eot"] + }, + "application/vnd.ms-htmlhelp": { + "source": "iana", + "extensions": ["chm"] + }, + "application/vnd.ms-ims": { + "source": "iana", + "extensions": ["ims"] + }, + "application/vnd.ms-lrm": { + "source": "iana", + "extensions": ["lrm"] + }, + "application/vnd.ms-office.activex+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-officetheme": { + "source": "iana", + "extensions": ["thmx"] + }, + "application/vnd.ms-opentype": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-outlook": { + "compressible": false, + "extensions": ["msg"] + }, + "application/vnd.ms-package.obfuscated-opentype": { + "source": "apache" + }, + "application/vnd.ms-pki.seccat": { + "source": "apache", + "extensions": ["cat"] + }, + "application/vnd.ms-pki.stl": { + "source": "apache", + "extensions": ["stl"] + }, + "application/vnd.ms-playready.initiator+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-powerpoint": { + "source": "iana", + "compressible": false, + "extensions": ["ppt","pps","pot"] + }, + "application/vnd.ms-powerpoint.addin.macroenabled.12": { + "source": "iana", + "extensions": ["ppam"] + }, + "application/vnd.ms-powerpoint.presentation.macroenabled.12": { + "source": "iana", + "extensions": ["pptm"] + }, + "application/vnd.ms-powerpoint.slide.macroenabled.12": { + "source": "iana", + "extensions": ["sldm"] + }, + "application/vnd.ms-powerpoint.slideshow.macroenabled.12": { + "source": "iana", + "extensions": ["ppsm"] + }, + "application/vnd.ms-powerpoint.template.macroenabled.12": { + "source": "iana", + "extensions": ["potm"] + }, + "application/vnd.ms-printdevicecapabilities+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-printing.printticket+xml": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-printschematicket+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-project": { + "source": "iana", + "extensions": ["mpp","mpt"] + }, + "application/vnd.ms-tnef": { + "source": "iana" + }, + "application/vnd.ms-windows.devicepairing": { + "source": "iana" + }, + "application/vnd.ms-windows.nwprinting.oob": { + "source": "iana" + }, + "application/vnd.ms-windows.printerpairing": { + "source": "iana" + }, + "application/vnd.ms-windows.wsd.oob": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-resp": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-resp": { + "source": "iana" + }, + "application/vnd.ms-word.document.macroenabled.12": { + "source": "iana", + "extensions": ["docm"] + }, + "application/vnd.ms-word.template.macroenabled.12": { + "source": "iana", + "extensions": ["dotm"] + }, + "application/vnd.ms-works": { + "source": "iana", + "extensions": ["wps","wks","wcm","wdb"] + }, + "application/vnd.ms-wpl": { + "source": "iana", + "extensions": ["wpl"] + }, + "application/vnd.ms-xpsdocument": { + "source": "iana", + "compressible": false, + "extensions": ["xps"] + }, + "application/vnd.msa-disk-image": { + "source": "iana" + }, + "application/vnd.mseq": { + "source": "iana", + "extensions": ["mseq"] + }, + "application/vnd.msign": { + "source": "iana" + }, + "application/vnd.multiad.creator": { + "source": "iana" + }, + "application/vnd.multiad.creator.cif": { + "source": "iana" + }, + "application/vnd.music-niff": { + "source": "iana" + }, + "application/vnd.musician": { + "source": "iana", + "extensions": ["mus"] + }, + "application/vnd.muvee.style": { + "source": "iana", + "extensions": ["msty"] + }, + "application/vnd.mynfc": { + "source": "iana", + "extensions": ["taglet"] + }, + "application/vnd.nacamar.ybrid+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ncd.control": { + "source": "iana" + }, + "application/vnd.ncd.reference": { + "source": "iana" + }, + "application/vnd.nearst.inv+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.nebumind.line": { + "source": "iana" + }, + "application/vnd.nervana": { + "source": "iana" + }, + "application/vnd.netfpx": { + "source": "iana" + }, + "application/vnd.neurolanguage.nlu": { + "source": "iana", + "extensions": ["nlu"] + }, + "application/vnd.nimn": { + "source": "iana" + }, + "application/vnd.nintendo.nitro.rom": { + "source": "iana" + }, + "application/vnd.nintendo.snes.rom": { + "source": "iana" + }, + "application/vnd.nitf": { + "source": "iana", + "extensions": ["ntf","nitf"] + }, + "application/vnd.noblenet-directory": { + "source": "iana", + "extensions": ["nnd"] + }, + "application/vnd.noblenet-sealer": { + "source": "iana", + "extensions": ["nns"] + }, + "application/vnd.noblenet-web": { + "source": "iana", + "extensions": ["nnw"] + }, + "application/vnd.nokia.catalogs": { + "source": "iana" + }, + "application/vnd.nokia.conml+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.conml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.iptv.config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.isds-radio-presets": { + "source": "iana" + }, + "application/vnd.nokia.landmark+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.landmark+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.landmarkcollection+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.n-gage.ac+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ac"] + }, + "application/vnd.nokia.n-gage.data": { + "source": "iana", + "extensions": ["ngdat"] + }, + "application/vnd.nokia.n-gage.symbian.install": { + "source": "iana", + "extensions": ["n-gage"] + }, + "application/vnd.nokia.ncd": { + "source": "iana" + }, + "application/vnd.nokia.pcd+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.pcd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.radio-preset": { + "source": "iana", + "extensions": ["rpst"] + }, + "application/vnd.nokia.radio-presets": { + "source": "iana", + "extensions": ["rpss"] + }, + "application/vnd.novadigm.edm": { + "source": "iana", + "extensions": ["edm"] + }, + "application/vnd.novadigm.edx": { + "source": "iana", + "extensions": ["edx"] + }, + "application/vnd.novadigm.ext": { + "source": "iana", + "extensions": ["ext"] + }, + "application/vnd.ntt-local.content-share": { + "source": "iana" + }, + "application/vnd.ntt-local.file-transfer": { + "source": "iana" + }, + "application/vnd.ntt-local.ogw_remote-access": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_remote": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_tcp_stream": { + "source": "iana" + }, + "application/vnd.oasis.opendocument.chart": { + "source": "iana", + "extensions": ["odc"] + }, + "application/vnd.oasis.opendocument.chart-template": { + "source": "iana", + "extensions": ["otc"] + }, + "application/vnd.oasis.opendocument.database": { + "source": "iana", + "extensions": ["odb"] + }, + "application/vnd.oasis.opendocument.formula": { + "source": "iana", + "extensions": ["odf"] + }, + "application/vnd.oasis.opendocument.formula-template": { + "source": "iana", + "extensions": ["odft"] + }, + "application/vnd.oasis.opendocument.graphics": { + "source": "iana", + "compressible": false, + "extensions": ["odg"] + }, + "application/vnd.oasis.opendocument.graphics-template": { + "source": "iana", + "extensions": ["otg"] + }, + "application/vnd.oasis.opendocument.image": { + "source": "iana", + "extensions": ["odi"] + }, + "application/vnd.oasis.opendocument.image-template": { + "source": "iana", + "extensions": ["oti"] + }, + "application/vnd.oasis.opendocument.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["odp"] + }, + "application/vnd.oasis.opendocument.presentation-template": { + "source": "iana", + "extensions": ["otp"] + }, + "application/vnd.oasis.opendocument.spreadsheet": { + "source": "iana", + "compressible": false, + "extensions": ["ods"] + }, + "application/vnd.oasis.opendocument.spreadsheet-template": { + "source": "iana", + "extensions": ["ots"] + }, + "application/vnd.oasis.opendocument.text": { + "source": "iana", + "compressible": false, + "extensions": ["odt"] + }, + "application/vnd.oasis.opendocument.text-master": { + "source": "iana", + "extensions": ["odm"] + }, + "application/vnd.oasis.opendocument.text-template": { + "source": "iana", + "extensions": ["ott"] + }, + "application/vnd.oasis.opendocument.text-web": { + "source": "iana", + "extensions": ["oth"] + }, + "application/vnd.obn": { + "source": "iana" + }, + "application/vnd.ocf+cbor": { + "source": "iana" + }, + "application/vnd.oci.image.manifest.v1+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oftn.l10n+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessdownload+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessstreaming+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.cspg-hexbinary": { + "source": "iana" + }, + "application/vnd.oipf.dae.svg+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.dae.xhtml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.mippvcontrolmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.pae.gem": { + "source": "iana" + }, + "application/vnd.oipf.spdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.spdlist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.ueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.userprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.olpc-sugar": { + "source": "iana", + "extensions": ["xo"] + }, + "application/vnd.oma-scws-config": { + "source": "iana" + }, + "application/vnd.oma-scws-http-request": { + "source": "iana" + }, + "application/vnd.oma-scws-http-response": { + "source": "iana" + }, + "application/vnd.oma.bcast.associated-procedure-parameter+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.drm-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.imd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.ltkm": { + "source": "iana" + }, + "application/vnd.oma.bcast.notification+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.provisioningtrigger": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgboot": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgdd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sgdu": { + "source": "iana" + }, + "application/vnd.oma.bcast.simple-symbol-container": { + "source": "iana" + }, + "application/vnd.oma.bcast.smartcard-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sprov+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.stkm": { + "source": "iana" + }, + "application/vnd.oma.cab-address-book+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-feature-handler+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-pcc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-subs-invite+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-user-prefs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.dcd": { + "source": "iana" + }, + "application/vnd.oma.dcdc": { + "source": "iana" + }, + "application/vnd.oma.dd2+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dd2"] + }, + "application/vnd.oma.drm.risd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.group-usage-list+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+cbor": { + "source": "iana" + }, + "application/vnd.oma.lwm2m+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+tlv": { + "source": "iana" + }, + "application/vnd.oma.pal+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.detailed-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.final-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.groups+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.invocation-descriptor+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.optimized-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.push": { + "source": "iana" + }, + "application/vnd.oma.scidm.messages+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.xcap-directory+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.omads-email+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omads-file+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omads-folder+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omaloc-supl-init": { + "source": "iana" + }, + "application/vnd.onepager": { + "source": "iana" + }, + "application/vnd.onepagertamp": { + "source": "iana" + }, + "application/vnd.onepagertamx": { + "source": "iana" + }, + "application/vnd.onepagertat": { + "source": "iana" + }, + "application/vnd.onepagertatp": { + "source": "iana" + }, + "application/vnd.onepagertatx": { + "source": "iana" + }, + "application/vnd.openblox.game+xml": { + "source": "iana", + "compressible": true, + "extensions": ["obgx"] + }, + "application/vnd.openblox.game-binary": { + "source": "iana" + }, + "application/vnd.openeye.oeb": { + "source": "iana" + }, + "application/vnd.openofficeorg.extension": { + "source": "apache", + "extensions": ["oxt"] + }, + "application/vnd.openstreetmap.data+xml": { + "source": "iana", + "compressible": true, + "extensions": ["osm"] + }, + "application/vnd.opentimestamps.ots": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.custom-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.customxmlproperties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawing+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chart+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.extended-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["pptx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide": { + "source": "iana", + "extensions": ["sldx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow": { + "source": "iana", + "extensions": ["ppsx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tags+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.template": { + "source": "iana", + "extensions": ["potx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": { + "source": "iana", + "compressible": false, + "extensions": ["xlsx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template": { + "source": "iana", + "extensions": ["xltx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.theme+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.themeoverride+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.vmldrawing": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document": { + "source": "iana", + "compressible": false, + "extensions": ["docx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template": { + "source": "iana", + "extensions": ["dotx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.core-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.relationships+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oracle.resource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.orange.indata": { + "source": "iana" + }, + "application/vnd.osa.netdeploy": { + "source": "iana" + }, + "application/vnd.osgeo.mapguide.package": { + "source": "iana", + "extensions": ["mgp"] + }, + "application/vnd.osgi.bundle": { + "source": "iana" + }, + "application/vnd.osgi.dp": { + "source": "iana", + "extensions": ["dp"] + }, + "application/vnd.osgi.subsystem": { + "source": "iana", + "extensions": ["esa"] + }, + "application/vnd.otps.ct-kip+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oxli.countgraph": { + "source": "iana" + }, + "application/vnd.pagerduty+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.palm": { + "source": "iana", + "extensions": ["pdb","pqa","oprc"] + }, + "application/vnd.panoply": { + "source": "iana" + }, + "application/vnd.paos.xml": { + "source": "iana" + }, + "application/vnd.patentdive": { + "source": "iana" + }, + "application/vnd.patientecommsdoc": { + "source": "iana" + }, + "application/vnd.pawaafile": { + "source": "iana", + "extensions": ["paw"] + }, + "application/vnd.pcos": { + "source": "iana" + }, + "application/vnd.pg.format": { + "source": "iana", + "extensions": ["str"] + }, + "application/vnd.pg.osasli": { + "source": "iana", + "extensions": ["ei6"] + }, + "application/vnd.piaccess.application-licence": { + "source": "iana" + }, + "application/vnd.picsel": { + "source": "iana", + "extensions": ["efif"] + }, + "application/vnd.pmi.widget": { + "source": "iana", + "extensions": ["wg"] + }, + "application/vnd.poc.group-advertisement+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.pocketlearn": { + "source": "iana", + "extensions": ["plf"] + }, + "application/vnd.powerbuilder6": { + "source": "iana", + "extensions": ["pbd"] + }, + "application/vnd.powerbuilder6-s": { + "source": "iana" + }, + "application/vnd.powerbuilder7": { + "source": "iana" + }, + "application/vnd.powerbuilder7-s": { + "source": "iana" + }, + "application/vnd.powerbuilder75": { + "source": "iana" + }, + "application/vnd.powerbuilder75-s": { + "source": "iana" + }, + "application/vnd.preminet": { + "source": "iana" + }, + "application/vnd.previewsystems.box": { + "source": "iana", + "extensions": ["box"] + }, + "application/vnd.proteus.magazine": { + "source": "iana", + "extensions": ["mgz"] + }, + "application/vnd.psfs": { + "source": "iana" + }, + "application/vnd.publishare-delta-tree": { + "source": "iana", + "extensions": ["qps"] + }, + "application/vnd.pvi.ptid1": { + "source": "iana", + "extensions": ["ptid"] + }, + "application/vnd.pwg-multiplexed": { + "source": "iana" + }, + "application/vnd.pwg-xhtml-print+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.qualcomm.brew-app-res": { + "source": "iana" + }, + "application/vnd.quarantainenet": { + "source": "iana" + }, + "application/vnd.quark.quarkxpress": { + "source": "iana", + "extensions": ["qxd","qxt","qwd","qwt","qxl","qxb"] + }, + "application/vnd.quobject-quoxdocument": { + "source": "iana" + }, + "application/vnd.radisys.moml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-stream+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-base+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-detect+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-sendrecv+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-group+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-speech+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-transform+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.rainstor.data": { + "source": "iana" + }, + "application/vnd.rapid": { + "source": "iana" + }, + "application/vnd.rar": { + "source": "iana", + "extensions": ["rar"] + }, + "application/vnd.realvnc.bed": { + "source": "iana", + "extensions": ["bed"] + }, + "application/vnd.recordare.musicxml": { + "source": "iana", + "extensions": ["mxl"] + }, + "application/vnd.recordare.musicxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["musicxml"] + }, + "application/vnd.renlearn.rlprint": { + "source": "iana" + }, + "application/vnd.resilient.logic": { + "source": "iana" + }, + "application/vnd.restful+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.rig.cryptonote": { + "source": "iana", + "extensions": ["cryptonote"] + }, + "application/vnd.rim.cod": { + "source": "apache", + "extensions": ["cod"] + }, + "application/vnd.rn-realmedia": { + "source": "apache", + "extensions": ["rm"] + }, + "application/vnd.rn-realmedia-vbr": { + "source": "apache", + "extensions": ["rmvb"] + }, + "application/vnd.route66.link66+xml": { + "source": "iana", + "compressible": true, + "extensions": ["link66"] + }, + "application/vnd.rs-274x": { + "source": "iana" + }, + "application/vnd.ruckus.download": { + "source": "iana" + }, + "application/vnd.s3sms": { + "source": "iana" + }, + "application/vnd.sailingtracker.track": { + "source": "iana", + "extensions": ["st"] + }, + "application/vnd.sar": { + "source": "iana" + }, + "application/vnd.sbm.cid": { + "source": "iana" + }, + "application/vnd.sbm.mid2": { + "source": "iana" + }, + "application/vnd.scribus": { + "source": "iana" + }, + "application/vnd.sealed.3df": { + "source": "iana" + }, + "application/vnd.sealed.csf": { + "source": "iana" + }, + "application/vnd.sealed.doc": { + "source": "iana" + }, + "application/vnd.sealed.eml": { + "source": "iana" + }, + "application/vnd.sealed.mht": { + "source": "iana" + }, + "application/vnd.sealed.net": { + "source": "iana" + }, + "application/vnd.sealed.ppt": { + "source": "iana" + }, + "application/vnd.sealed.tiff": { + "source": "iana" + }, + "application/vnd.sealed.xls": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.html": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.pdf": { + "source": "iana" + }, + "application/vnd.seemail": { + "source": "iana", + "extensions": ["see"] + }, + "application/vnd.seis+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.sema": { + "source": "iana", + "extensions": ["sema"] + }, + "application/vnd.semd": { + "source": "iana", + "extensions": ["semd"] + }, + "application/vnd.semf": { + "source": "iana", + "extensions": ["semf"] + }, + "application/vnd.shade-save-file": { + "source": "iana" + }, + "application/vnd.shana.informed.formdata": { + "source": "iana", + "extensions": ["ifm"] + }, + "application/vnd.shana.informed.formtemplate": { + "source": "iana", + "extensions": ["itp"] + }, + "application/vnd.shana.informed.interchange": { + "source": "iana", + "extensions": ["iif"] + }, + "application/vnd.shana.informed.package": { + "source": "iana", + "extensions": ["ipk"] + }, + "application/vnd.shootproof+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.shopkick+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.shp": { + "source": "iana" + }, + "application/vnd.shx": { + "source": "iana" + }, + "application/vnd.sigrok.session": { + "source": "iana" + }, + "application/vnd.simtech-mindmapper": { + "source": "iana", + "extensions": ["twd","twds"] + }, + "application/vnd.siren+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.smaf": { + "source": "iana", + "extensions": ["mmf"] + }, + "application/vnd.smart.notebook": { + "source": "iana" + }, + "application/vnd.smart.teacher": { + "source": "iana", + "extensions": ["teacher"] + }, + "application/vnd.snesdev-page-table": { + "source": "iana" + }, + "application/vnd.software602.filler.form+xml": { + "source": "iana", + "compressible": true, + "extensions": ["fo"] + }, + "application/vnd.software602.filler.form-xml-zip": { + "source": "iana" + }, + "application/vnd.solent.sdkm+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sdkm","sdkd"] + }, + "application/vnd.spotfire.dxp": { + "source": "iana", + "extensions": ["dxp"] + }, + "application/vnd.spotfire.sfs": { + "source": "iana", + "extensions": ["sfs"] + }, + "application/vnd.sqlite3": { + "source": "iana" + }, + "application/vnd.sss-cod": { + "source": "iana" + }, + "application/vnd.sss-dtf": { + "source": "iana" + }, + "application/vnd.sss-ntf": { + "source": "iana" + }, + "application/vnd.stardivision.calc": { + "source": "apache", + "extensions": ["sdc"] + }, + "application/vnd.stardivision.draw": { + "source": "apache", + "extensions": ["sda"] + }, + "application/vnd.stardivision.impress": { + "source": "apache", + "extensions": ["sdd"] + }, + "application/vnd.stardivision.math": { + "source": "apache", + "extensions": ["smf"] + }, + "application/vnd.stardivision.writer": { + "source": "apache", + "extensions": ["sdw","vor"] + }, + "application/vnd.stardivision.writer-global": { + "source": "apache", + "extensions": ["sgl"] + }, + "application/vnd.stepmania.package": { + "source": "iana", + "extensions": ["smzip"] + }, + "application/vnd.stepmania.stepchart": { + "source": "iana", + "extensions": ["sm"] + }, + "application/vnd.street-stream": { + "source": "iana" + }, + "application/vnd.sun.wadl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wadl"] + }, + "application/vnd.sun.xml.calc": { + "source": "apache", + "extensions": ["sxc"] + }, + "application/vnd.sun.xml.calc.template": { + "source": "apache", + "extensions": ["stc"] + }, + "application/vnd.sun.xml.draw": { + "source": "apache", + "extensions": ["sxd"] + }, + "application/vnd.sun.xml.draw.template": { + "source": "apache", + "extensions": ["std"] + }, + "application/vnd.sun.xml.impress": { + "source": "apache", + "extensions": ["sxi"] + }, + "application/vnd.sun.xml.impress.template": { + "source": "apache", + "extensions": ["sti"] + }, + "application/vnd.sun.xml.math": { + "source": "apache", + "extensions": ["sxm"] + }, + "application/vnd.sun.xml.writer": { + "source": "apache", + "extensions": ["sxw"] + }, + "application/vnd.sun.xml.writer.global": { + "source": "apache", + "extensions": ["sxg"] + }, + "application/vnd.sun.xml.writer.template": { + "source": "apache", + "extensions": ["stw"] + }, + "application/vnd.sus-calendar": { + "source": "iana", + "extensions": ["sus","susp"] + }, + "application/vnd.svd": { + "source": "iana", + "extensions": ["svd"] + }, + "application/vnd.swiftview-ics": { + "source": "iana" + }, + "application/vnd.sycle+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.syft+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.symbian.install": { + "source": "apache", + "extensions": ["sis","sisx"] + }, + "application/vnd.syncml+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["xsm"] + }, + "application/vnd.syncml.dm+wbxml": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["bdm"] + }, + "application/vnd.syncml.dm+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["xdm"] + }, + "application/vnd.syncml.dm.notification": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["ddf"] + }, + "application/vnd.syncml.dmtnds+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmtnds+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.syncml.ds.notification": { + "source": "iana" + }, + "application/vnd.tableschema+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tao.intent-module-archive": { + "source": "iana", + "extensions": ["tao"] + }, + "application/vnd.tcpdump.pcap": { + "source": "iana", + "extensions": ["pcap","cap","dmp"] + }, + "application/vnd.think-cell.ppttc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tmd.mediaflex.api+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.tml": { + "source": "iana" + }, + "application/vnd.tmobile-livetv": { + "source": "iana", + "extensions": ["tmo"] + }, + "application/vnd.tri.onesource": { + "source": "iana" + }, + "application/vnd.trid.tpt": { + "source": "iana", + "extensions": ["tpt"] + }, + "application/vnd.triscape.mxs": { + "source": "iana", + "extensions": ["mxs"] + }, + "application/vnd.trueapp": { + "source": "iana", + "extensions": ["tra"] + }, + "application/vnd.truedoc": { + "source": "iana" + }, + "application/vnd.ubisoft.webplayer": { + "source": "iana" + }, + "application/vnd.ufdl": { + "source": "iana", + "extensions": ["ufd","ufdl"] + }, + "application/vnd.uiq.theme": { + "source": "iana", + "extensions": ["utz"] + }, + "application/vnd.umajin": { + "source": "iana", + "extensions": ["umj"] + }, + "application/vnd.unity": { + "source": "iana", + "extensions": ["unityweb"] + }, + "application/vnd.uoml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uoml"] + }, + "application/vnd.uplanet.alert": { + "source": "iana" + }, + "application/vnd.uplanet.alert-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.channel": { + "source": "iana" + }, + "application/vnd.uplanet.channel-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.list": { + "source": "iana" + }, + "application/vnd.uplanet.list-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.signal": { + "source": "iana" + }, + "application/vnd.uri-map": { + "source": "iana" + }, + "application/vnd.valve.source.material": { + "source": "iana" + }, + "application/vnd.vcx": { + "source": "iana", + "extensions": ["vcx"] + }, + "application/vnd.vd-study": { + "source": "iana" + }, + "application/vnd.vectorworks": { + "source": "iana" + }, + "application/vnd.vel+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.verimatrix.vcas": { + "source": "iana" + }, + "application/vnd.veritone.aion+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.veryant.thin": { + "source": "iana" + }, + "application/vnd.ves.encrypted": { + "source": "iana" + }, + "application/vnd.vidsoft.vidconference": { + "source": "iana" + }, + "application/vnd.visio": { + "source": "iana", + "extensions": ["vsd","vst","vss","vsw"] + }, + "application/vnd.visionary": { + "source": "iana", + "extensions": ["vis"] + }, + "application/vnd.vividence.scriptfile": { + "source": "iana" + }, + "application/vnd.vsf": { + "source": "iana", + "extensions": ["vsf"] + }, + "application/vnd.wap.sic": { + "source": "iana" + }, + "application/vnd.wap.slc": { + "source": "iana" + }, + "application/vnd.wap.wbxml": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["wbxml"] + }, + "application/vnd.wap.wmlc": { + "source": "iana", + "extensions": ["wmlc"] + }, + "application/vnd.wap.wmlscriptc": { + "source": "iana", + "extensions": ["wmlsc"] + }, + "application/vnd.webturbo": { + "source": "iana", + "extensions": ["wtb"] + }, + "application/vnd.wfa.dpp": { + "source": "iana" + }, + "application/vnd.wfa.p2p": { + "source": "iana" + }, + "application/vnd.wfa.wsc": { + "source": "iana" + }, + "application/vnd.windows.devicepairing": { + "source": "iana" + }, + "application/vnd.wmc": { + "source": "iana" + }, + "application/vnd.wmf.bootstrap": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica.package": { + "source": "iana" + }, + "application/vnd.wolfram.player": { + "source": "iana", + "extensions": ["nbp"] + }, + "application/vnd.wordperfect": { + "source": "iana", + "extensions": ["wpd"] + }, + "application/vnd.wqd": { + "source": "iana", + "extensions": ["wqd"] + }, + "application/vnd.wrq-hp3000-labelled": { + "source": "iana" + }, + "application/vnd.wt.stf": { + "source": "iana", + "extensions": ["stf"] + }, + "application/vnd.wv.csp+wbxml": { + "source": "iana" + }, + "application/vnd.wv.csp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.wv.ssp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xacml+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.xara": { + "source": "iana", + "extensions": ["xar"] + }, + "application/vnd.xfdl": { + "source": "iana", + "extensions": ["xfdl"] + }, + "application/vnd.xfdl.webform": { + "source": "iana" + }, + "application/vnd.xmi+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xmpie.cpkg": { + "source": "iana" + }, + "application/vnd.xmpie.dpkg": { + "source": "iana" + }, + "application/vnd.xmpie.plan": { + "source": "iana" + }, + "application/vnd.xmpie.ppkg": { + "source": "iana" + }, + "application/vnd.xmpie.xlim": { + "source": "iana" + }, + "application/vnd.yamaha.hv-dic": { + "source": "iana", + "extensions": ["hvd"] + }, + "application/vnd.yamaha.hv-script": { + "source": "iana", + "extensions": ["hvs"] + }, + "application/vnd.yamaha.hv-voice": { + "source": "iana", + "extensions": ["hvp"] + }, + "application/vnd.yamaha.openscoreformat": { + "source": "iana", + "extensions": ["osf"] + }, + "application/vnd.yamaha.openscoreformat.osfpvg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["osfpvg"] + }, + "application/vnd.yamaha.remote-setup": { + "source": "iana" + }, + "application/vnd.yamaha.smaf-audio": { + "source": "iana", + "extensions": ["saf"] + }, + "application/vnd.yamaha.smaf-phrase": { + "source": "iana", + "extensions": ["spf"] + }, + "application/vnd.yamaha.through-ngn": { + "source": "iana" + }, + "application/vnd.yamaha.tunnel-udpencap": { + "source": "iana" + }, + "application/vnd.yaoweme": { + "source": "iana" + }, + "application/vnd.yellowriver-custom-menu": { + "source": "iana", + "extensions": ["cmp"] + }, + "application/vnd.youtube.yt": { + "source": "iana" + }, + "application/vnd.zul": { + "source": "iana", + "extensions": ["zir","zirz"] + }, + "application/vnd.zzazz.deck+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zaz"] + }, + "application/voicexml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["vxml"] + }, + "application/voucher-cms+json": { + "source": "iana", + "compressible": true + }, + "application/vq-rtcpxr": { + "source": "iana" + }, + "application/wasm": { + "source": "iana", + "compressible": true, + "extensions": ["wasm"] + }, + "application/watcherinfo+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wif"] + }, + "application/webpush-options+json": { + "source": "iana", + "compressible": true + }, + "application/whoispp-query": { + "source": "iana" + }, + "application/whoispp-response": { + "source": "iana" + }, + "application/widget": { + "source": "iana", + "extensions": ["wgt"] + }, + "application/winhlp": { + "source": "apache", + "extensions": ["hlp"] + }, + "application/wita": { + "source": "iana" + }, + "application/wordperfect5.1": { + "source": "iana" + }, + "application/wsdl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wsdl"] + }, + "application/wspolicy+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wspolicy"] + }, + "application/x-7z-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["7z"] + }, + "application/x-abiword": { + "source": "apache", + "extensions": ["abw"] + }, + "application/x-ace-compressed": { + "source": "apache", + "extensions": ["ace"] + }, + "application/x-amf": { + "source": "apache" + }, + "application/x-apple-diskimage": { + "source": "apache", + "extensions": ["dmg"] + }, + "application/x-arj": { + "compressible": false, + "extensions": ["arj"] + }, + "application/x-authorware-bin": { + "source": "apache", + "extensions": ["aab","x32","u32","vox"] + }, + "application/x-authorware-map": { + "source": "apache", + "extensions": ["aam"] + }, + "application/x-authorware-seg": { + "source": "apache", + "extensions": ["aas"] + }, + "application/x-bcpio": { + "source": "apache", + "extensions": ["bcpio"] + }, + "application/x-bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/x-bittorrent": { + "source": "apache", + "extensions": ["torrent"] + }, + "application/x-blorb": { + "source": "apache", + "extensions": ["blb","blorb"] + }, + "application/x-bzip": { + "source": "apache", + "compressible": false, + "extensions": ["bz"] + }, + "application/x-bzip2": { + "source": "apache", + "compressible": false, + "extensions": ["bz2","boz"] + }, + "application/x-cbr": { + "source": "apache", + "extensions": ["cbr","cba","cbt","cbz","cb7"] + }, + "application/x-cdlink": { + "source": "apache", + "extensions": ["vcd"] + }, + "application/x-cfs-compressed": { + "source": "apache", + "extensions": ["cfs"] + }, + "application/x-chat": { + "source": "apache", + "extensions": ["chat"] + }, + "application/x-chess-pgn": { + "source": "apache", + "extensions": ["pgn"] + }, + "application/x-chrome-extension": { + "extensions": ["crx"] + }, + "application/x-cocoa": { + "source": "nginx", + "extensions": ["cco"] + }, + "application/x-compress": { + "source": "apache" + }, + "application/x-conference": { + "source": "apache", + "extensions": ["nsc"] + }, + "application/x-cpio": { + "source": "apache", + "extensions": ["cpio"] + }, + "application/x-csh": { + "source": "apache", + "extensions": ["csh"] + }, + "application/x-deb": { + "compressible": false + }, + "application/x-debian-package": { + "source": "apache", + "extensions": ["deb","udeb"] + }, + "application/x-dgc-compressed": { + "source": "apache", + "extensions": ["dgc"] + }, + "application/x-director": { + "source": "apache", + "extensions": ["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"] + }, + "application/x-doom": { + "source": "apache", + "extensions": ["wad"] + }, + "application/x-dtbncx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ncx"] + }, + "application/x-dtbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dtb"] + }, + "application/x-dtbresource+xml": { + "source": "apache", + "compressible": true, + "extensions": ["res"] + }, + "application/x-dvi": { + "source": "apache", + "compressible": false, + "extensions": ["dvi"] + }, + "application/x-envoy": { + "source": "apache", + "extensions": ["evy"] + }, + "application/x-eva": { + "source": "apache", + "extensions": ["eva"] + }, + "application/x-font-bdf": { + "source": "apache", + "extensions": ["bdf"] + }, + "application/x-font-dos": { + "source": "apache" + }, + "application/x-font-framemaker": { + "source": "apache" + }, + "application/x-font-ghostscript": { + "source": "apache", + "extensions": ["gsf"] + }, + "application/x-font-libgrx": { + "source": "apache" + }, + "application/x-font-linux-psf": { + "source": "apache", + "extensions": ["psf"] + }, + "application/x-font-pcf": { + "source": "apache", + "extensions": ["pcf"] + }, + "application/x-font-snf": { + "source": "apache", + "extensions": ["snf"] + }, + "application/x-font-speedo": { + "source": "apache" + }, + "application/x-font-sunos-news": { + "source": "apache" + }, + "application/x-font-type1": { + "source": "apache", + "extensions": ["pfa","pfb","pfm","afm"] + }, + "application/x-font-vfont": { + "source": "apache" + }, + "application/x-freearc": { + "source": "apache", + "extensions": ["arc"] + }, + "application/x-futuresplash": { + "source": "apache", + "extensions": ["spl"] + }, + "application/x-gca-compressed": { + "source": "apache", + "extensions": ["gca"] + }, + "application/x-glulx": { + "source": "apache", + "extensions": ["ulx"] + }, + "application/x-gnumeric": { + "source": "apache", + "extensions": ["gnumeric"] + }, + "application/x-gramps-xml": { + "source": "apache", + "extensions": ["gramps"] + }, + "application/x-gtar": { + "source": "apache", + "extensions": ["gtar"] + }, + "application/x-gzip": { + "source": "apache" + }, + "application/x-hdf": { + "source": "apache", + "extensions": ["hdf"] + }, + "application/x-httpd-php": { + "compressible": true, + "extensions": ["php"] + }, + "application/x-install-instructions": { + "source": "apache", + "extensions": ["install"] + }, + "application/x-iso9660-image": { + "source": "apache", + "extensions": ["iso"] + }, + "application/x-iwork-keynote-sffkey": { + "extensions": ["key"] + }, + "application/x-iwork-numbers-sffnumbers": { + "extensions": ["numbers"] + }, + "application/x-iwork-pages-sffpages": { + "extensions": ["pages"] + }, + "application/x-java-archive-diff": { + "source": "nginx", + "extensions": ["jardiff"] + }, + "application/x-java-jnlp-file": { + "source": "apache", + "compressible": false, + "extensions": ["jnlp"] + }, + "application/x-javascript": { + "compressible": true + }, + "application/x-keepass2": { + "extensions": ["kdbx"] + }, + "application/x-latex": { + "source": "apache", + "compressible": false, + "extensions": ["latex"] + }, + "application/x-lua-bytecode": { + "extensions": ["luac"] + }, + "application/x-lzh-compressed": { + "source": "apache", + "extensions": ["lzh","lha"] + }, + "application/x-makeself": { + "source": "nginx", + "extensions": ["run"] + }, + "application/x-mie": { + "source": "apache", + "extensions": ["mie"] + }, + "application/x-mobipocket-ebook": { + "source": "apache", + "extensions": ["prc","mobi"] + }, + "application/x-mpegurl": { + "compressible": false + }, + "application/x-ms-application": { + "source": "apache", + "extensions": ["application"] + }, + "application/x-ms-shortcut": { + "source": "apache", + "extensions": ["lnk"] + }, + "application/x-ms-wmd": { + "source": "apache", + "extensions": ["wmd"] + }, + "application/x-ms-wmz": { + "source": "apache", + "extensions": ["wmz"] + }, + "application/x-ms-xbap": { + "source": "apache", + "extensions": ["xbap"] + }, + "application/x-msaccess": { + "source": "apache", + "extensions": ["mdb"] + }, + "application/x-msbinder": { + "source": "apache", + "extensions": ["obd"] + }, + "application/x-mscardfile": { + "source": "apache", + "extensions": ["crd"] + }, + "application/x-msclip": { + "source": "apache", + "extensions": ["clp"] + }, + "application/x-msdos-program": { + "extensions": ["exe"] + }, + "application/x-msdownload": { + "source": "apache", + "extensions": ["exe","dll","com","bat","msi"] + }, + "application/x-msmediaview": { + "source": "apache", + "extensions": ["mvb","m13","m14"] + }, + "application/x-msmetafile": { + "source": "apache", + "extensions": ["wmf","wmz","emf","emz"] + }, + "application/x-msmoney": { + "source": "apache", + "extensions": ["mny"] + }, + "application/x-mspublisher": { + "source": "apache", + "extensions": ["pub"] + }, + "application/x-msschedule": { + "source": "apache", + "extensions": ["scd"] + }, + "application/x-msterminal": { + "source": "apache", + "extensions": ["trm"] + }, + "application/x-mswrite": { + "source": "apache", + "extensions": ["wri"] + }, + "application/x-netcdf": { + "source": "apache", + "extensions": ["nc","cdf"] + }, + "application/x-ns-proxy-autoconfig": { + "compressible": true, + "extensions": ["pac"] + }, + "application/x-nzb": { + "source": "apache", + "extensions": ["nzb"] + }, + "application/x-perl": { + "source": "nginx", + "extensions": ["pl","pm"] + }, + "application/x-pilot": { + "source": "nginx", + "extensions": ["prc","pdb"] + }, + "application/x-pkcs12": { + "source": "apache", + "compressible": false, + "extensions": ["p12","pfx"] + }, + "application/x-pkcs7-certificates": { + "source": "apache", + "extensions": ["p7b","spc"] + }, + "application/x-pkcs7-certreqresp": { + "source": "apache", + "extensions": ["p7r"] + }, + "application/x-pki-message": { + "source": "iana" + }, + "application/x-rar-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["rar"] + }, + "application/x-redhat-package-manager": { + "source": "nginx", + "extensions": ["rpm"] + }, + "application/x-research-info-systems": { + "source": "apache", + "extensions": ["ris"] + }, + "application/x-sea": { + "source": "nginx", + "extensions": ["sea"] + }, + "application/x-sh": { + "source": "apache", + "compressible": true, + "extensions": ["sh"] + }, + "application/x-shar": { + "source": "apache", + "extensions": ["shar"] + }, + "application/x-shockwave-flash": { + "source": "apache", + "compressible": false, + "extensions": ["swf"] + }, + "application/x-silverlight-app": { + "source": "apache", + "extensions": ["xap"] + }, + "application/x-sql": { + "source": "apache", + "extensions": ["sql"] + }, + "application/x-stuffit": { + "source": "apache", + "compressible": false, + "extensions": ["sit"] + }, + "application/x-stuffitx": { + "source": "apache", + "extensions": ["sitx"] + }, + "application/x-subrip": { + "source": "apache", + "extensions": ["srt"] + }, + "application/x-sv4cpio": { + "source": "apache", + "extensions": ["sv4cpio"] + }, + "application/x-sv4crc": { + "source": "apache", + "extensions": ["sv4crc"] + }, + "application/x-t3vm-image": { + "source": "apache", + "extensions": ["t3"] + }, + "application/x-tads": { + "source": "apache", + "extensions": ["gam"] + }, + "application/x-tar": { + "source": "apache", + "compressible": true, + "extensions": ["tar"] + }, + "application/x-tcl": { + "source": "apache", + "extensions": ["tcl","tk"] + }, + "application/x-tex": { + "source": "apache", + "extensions": ["tex"] + }, + "application/x-tex-tfm": { + "source": "apache", + "extensions": ["tfm"] + }, + "application/x-texinfo": { + "source": "apache", + "extensions": ["texinfo","texi"] + }, + "application/x-tgif": { + "source": "apache", + "extensions": ["obj"] + }, + "application/x-ustar": { + "source": "apache", + "extensions": ["ustar"] + }, + "application/x-virtualbox-hdd": { + "compressible": true, + "extensions": ["hdd"] + }, + "application/x-virtualbox-ova": { + "compressible": true, + "extensions": ["ova"] + }, + "application/x-virtualbox-ovf": { + "compressible": true, + "extensions": ["ovf"] + }, + "application/x-virtualbox-vbox": { + "compressible": true, + "extensions": ["vbox"] + }, + "application/x-virtualbox-vbox-extpack": { + "compressible": false, + "extensions": ["vbox-extpack"] + }, + "application/x-virtualbox-vdi": { + "compressible": true, + "extensions": ["vdi"] + }, + "application/x-virtualbox-vhd": { + "compressible": true, + "extensions": ["vhd"] + }, + "application/x-virtualbox-vmdk": { + "compressible": true, + "extensions": ["vmdk"] + }, + "application/x-wais-source": { + "source": "apache", + "extensions": ["src"] + }, + "application/x-web-app-manifest+json": { + "compressible": true, + "extensions": ["webapp"] + }, + "application/x-www-form-urlencoded": { + "source": "iana", + "compressible": true + }, + "application/x-x509-ca-cert": { + "source": "iana", + "extensions": ["der","crt","pem"] + }, + "application/x-x509-ca-ra-cert": { + "source": "iana" + }, + "application/x-x509-next-ca-cert": { + "source": "iana" + }, + "application/x-xfig": { + "source": "apache", + "extensions": ["fig"] + }, + "application/x-xliff+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xlf"] + }, + "application/x-xpinstall": { + "source": "apache", + "compressible": false, + "extensions": ["xpi"] + }, + "application/x-xz": { + "source": "apache", + "extensions": ["xz"] + }, + "application/x-zmachine": { + "source": "apache", + "extensions": ["z1","z2","z3","z4","z5","z6","z7","z8"] + }, + "application/x400-bp": { + "source": "iana" + }, + "application/xacml+xml": { + "source": "iana", + "compressible": true + }, + "application/xaml+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xaml"] + }, + "application/xcap-att+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xav"] + }, + "application/xcap-caps+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xca"] + }, + "application/xcap-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdf"] + }, + "application/xcap-el+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xel"] + }, + "application/xcap-error+xml": { + "source": "iana", + "compressible": true + }, + "application/xcap-ns+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xns"] + }, + "application/xcon-conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/xcon-conference-info-diff+xml": { + "source": "iana", + "compressible": true + }, + "application/xenc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xenc"] + }, + "application/xhtml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xhtml","xht"] + }, + "application/xhtml-voice+xml": { + "source": "apache", + "compressible": true + }, + "application/xliff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xlf"] + }, + "application/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml","xsl","xsd","rng"] + }, + "application/xml-dtd": { + "source": "iana", + "compressible": true, + "extensions": ["dtd"] + }, + "application/xml-external-parsed-entity": { + "source": "iana" + }, + "application/xml-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/xmpp+xml": { + "source": "iana", + "compressible": true + }, + "application/xop+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xop"] + }, + "application/xproc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xpl"] + }, + "application/xslt+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xsl","xslt"] + }, + "application/xspf+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xspf"] + }, + "application/xv+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mxml","xhvml","xvml","xvm"] + }, + "application/yang": { + "source": "iana", + "extensions": ["yang"] + }, + "application/yang-data+json": { + "source": "iana", + "compressible": true + }, + "application/yang-data+xml": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+json": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/yin+xml": { + "source": "iana", + "compressible": true, + "extensions": ["yin"] + }, + "application/zip": { + "source": "iana", + "compressible": false, + "extensions": ["zip"] + }, + "application/zlib": { + "source": "iana" + }, + "application/zstd": { + "source": "iana" + }, + "audio/1d-interleaved-parityfec": { + "source": "iana" + }, + "audio/32kadpcm": { + "source": "iana" + }, + "audio/3gpp": { + "source": "iana", + "compressible": false, + "extensions": ["3gpp"] + }, + "audio/3gpp2": { + "source": "iana" + }, + "audio/aac": { + "source": "iana" + }, + "audio/ac3": { + "source": "iana" + }, + "audio/adpcm": { + "source": "apache", + "extensions": ["adp"] + }, + "audio/amr": { + "source": "iana", + "extensions": ["amr"] + }, + "audio/amr-wb": { + "source": "iana" + }, + "audio/amr-wb+": { + "source": "iana" + }, + "audio/aptx": { + "source": "iana" + }, + "audio/asc": { + "source": "iana" + }, + "audio/atrac-advanced-lossless": { + "source": "iana" + }, + "audio/atrac-x": { + "source": "iana" + }, + "audio/atrac3": { + "source": "iana" + }, + "audio/basic": { + "source": "iana", + "compressible": false, + "extensions": ["au","snd"] + }, + "audio/bv16": { + "source": "iana" + }, + "audio/bv32": { + "source": "iana" + }, + "audio/clearmode": { + "source": "iana" + }, + "audio/cn": { + "source": "iana" + }, + "audio/dat12": { + "source": "iana" + }, + "audio/dls": { + "source": "iana" + }, + "audio/dsr-es201108": { + "source": "iana" + }, + "audio/dsr-es202050": { + "source": "iana" + }, + "audio/dsr-es202211": { + "source": "iana" + }, + "audio/dsr-es202212": { + "source": "iana" + }, + "audio/dv": { + "source": "iana" + }, + "audio/dvi4": { + "source": "iana" + }, + "audio/eac3": { + "source": "iana" + }, + "audio/encaprtp": { + "source": "iana" + }, + "audio/evrc": { + "source": "iana" + }, + "audio/evrc-qcp": { + "source": "iana" + }, + "audio/evrc0": { + "source": "iana" + }, + "audio/evrc1": { + "source": "iana" + }, + "audio/evrcb": { + "source": "iana" + }, + "audio/evrcb0": { + "source": "iana" + }, + "audio/evrcb1": { + "source": "iana" + }, + "audio/evrcnw": { + "source": "iana" + }, + "audio/evrcnw0": { + "source": "iana" + }, + "audio/evrcnw1": { + "source": "iana" + }, + "audio/evrcwb": { + "source": "iana" + }, + "audio/evrcwb0": { + "source": "iana" + }, + "audio/evrcwb1": { + "source": "iana" + }, + "audio/evs": { + "source": "iana" + }, + "audio/flexfec": { + "source": "iana" + }, + "audio/fwdred": { + "source": "iana" + }, + "audio/g711-0": { + "source": "iana" + }, + "audio/g719": { + "source": "iana" + }, + "audio/g722": { + "source": "iana" + }, + "audio/g7221": { + "source": "iana" + }, + "audio/g723": { + "source": "iana" + }, + "audio/g726-16": { + "source": "iana" + }, + "audio/g726-24": { + "source": "iana" + }, + "audio/g726-32": { + "source": "iana" + }, + "audio/g726-40": { + "source": "iana" + }, + "audio/g728": { + "source": "iana" + }, + "audio/g729": { + "source": "iana" + }, + "audio/g7291": { + "source": "iana" + }, + "audio/g729d": { + "source": "iana" + }, + "audio/g729e": { + "source": "iana" + }, + "audio/gsm": { + "source": "iana" + }, + "audio/gsm-efr": { + "source": "iana" + }, + "audio/gsm-hr-08": { + "source": "iana" + }, + "audio/ilbc": { + "source": "iana" + }, + "audio/ip-mr_v2.5": { + "source": "iana" + }, + "audio/isac": { + "source": "apache" + }, + "audio/l16": { + "source": "iana" + }, + "audio/l20": { + "source": "iana" + }, + "audio/l24": { + "source": "iana", + "compressible": false + }, + "audio/l8": { + "source": "iana" + }, + "audio/lpc": { + "source": "iana" + }, + "audio/melp": { + "source": "iana" + }, + "audio/melp1200": { + "source": "iana" + }, + "audio/melp2400": { + "source": "iana" + }, + "audio/melp600": { + "source": "iana" + }, + "audio/mhas": { + "source": "iana" + }, + "audio/midi": { + "source": "apache", + "extensions": ["mid","midi","kar","rmi"] + }, + "audio/mobile-xmf": { + "source": "iana", + "extensions": ["mxmf"] + }, + "audio/mp3": { + "compressible": false, + "extensions": ["mp3"] + }, + "audio/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["m4a","mp4a"] + }, + "audio/mp4a-latm": { + "source": "iana" + }, + "audio/mpa": { + "source": "iana" + }, + "audio/mpa-robust": { + "source": "iana" + }, + "audio/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpga","mp2","mp2a","mp3","m2a","m3a"] + }, + "audio/mpeg4-generic": { + "source": "iana" + }, + "audio/musepack": { + "source": "apache" + }, + "audio/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["oga","ogg","spx","opus"] + }, + "audio/opus": { + "source": "iana" + }, + "audio/parityfec": { + "source": "iana" + }, + "audio/pcma": { + "source": "iana" + }, + "audio/pcma-wb": { + "source": "iana" + }, + "audio/pcmu": { + "source": "iana" + }, + "audio/pcmu-wb": { + "source": "iana" + }, + "audio/prs.sid": { + "source": "iana" + }, + "audio/qcelp": { + "source": "iana" + }, + "audio/raptorfec": { + "source": "iana" + }, + "audio/red": { + "source": "iana" + }, + "audio/rtp-enc-aescm128": { + "source": "iana" + }, + "audio/rtp-midi": { + "source": "iana" + }, + "audio/rtploopback": { + "source": "iana" + }, + "audio/rtx": { + "source": "iana" + }, + "audio/s3m": { + "source": "apache", + "extensions": ["s3m"] + }, + "audio/scip": { + "source": "iana" + }, + "audio/silk": { + "source": "apache", + "extensions": ["sil"] + }, + "audio/smv": { + "source": "iana" + }, + "audio/smv-qcp": { + "source": "iana" + }, + "audio/smv0": { + "source": "iana" + }, + "audio/sofa": { + "source": "iana" + }, + "audio/sp-midi": { + "source": "iana" + }, + "audio/speex": { + "source": "iana" + }, + "audio/t140c": { + "source": "iana" + }, + "audio/t38": { + "source": "iana" + }, + "audio/telephone-event": { + "source": "iana" + }, + "audio/tetra_acelp": { + "source": "iana" + }, + "audio/tetra_acelp_bb": { + "source": "iana" + }, + "audio/tone": { + "source": "iana" + }, + "audio/tsvcis": { + "source": "iana" + }, + "audio/uemclip": { + "source": "iana" + }, + "audio/ulpfec": { + "source": "iana" + }, + "audio/usac": { + "source": "iana" + }, + "audio/vdvi": { + "source": "iana" + }, + "audio/vmr-wb": { + "source": "iana" + }, + "audio/vnd.3gpp.iufp": { + "source": "iana" + }, + "audio/vnd.4sb": { + "source": "iana" + }, + "audio/vnd.audiokoz": { + "source": "iana" + }, + "audio/vnd.celp": { + "source": "iana" + }, + "audio/vnd.cisco.nse": { + "source": "iana" + }, + "audio/vnd.cmles.radio-events": { + "source": "iana" + }, + "audio/vnd.cns.anp1": { + "source": "iana" + }, + "audio/vnd.cns.inf1": { + "source": "iana" + }, + "audio/vnd.dece.audio": { + "source": "iana", + "extensions": ["uva","uvva"] + }, + "audio/vnd.digital-winds": { + "source": "iana", + "extensions": ["eol"] + }, + "audio/vnd.dlna.adts": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.1": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.2": { + "source": "iana" + }, + "audio/vnd.dolby.mlp": { + "source": "iana" + }, + "audio/vnd.dolby.mps": { + "source": "iana" + }, + "audio/vnd.dolby.pl2": { + "source": "iana" + }, + "audio/vnd.dolby.pl2x": { + "source": "iana" + }, + "audio/vnd.dolby.pl2z": { + "source": "iana" + }, + "audio/vnd.dolby.pulse.1": { + "source": "iana" + }, + "audio/vnd.dra": { + "source": "iana", + "extensions": ["dra"] + }, + "audio/vnd.dts": { + "source": "iana", + "extensions": ["dts"] + }, + "audio/vnd.dts.hd": { + "source": "iana", + "extensions": ["dtshd"] + }, + "audio/vnd.dts.uhd": { + "source": "iana" + }, + "audio/vnd.dvb.file": { + "source": "iana" + }, + "audio/vnd.everad.plj": { + "source": "iana" + }, + "audio/vnd.hns.audio": { + "source": "iana" + }, + "audio/vnd.lucent.voice": { + "source": "iana", + "extensions": ["lvp"] + }, + "audio/vnd.ms-playready.media.pya": { + "source": "iana", + "extensions": ["pya"] + }, + "audio/vnd.nokia.mobile-xmf": { + "source": "iana" + }, + "audio/vnd.nortel.vbk": { + "source": "iana" + }, + "audio/vnd.nuera.ecelp4800": { + "source": "iana", + "extensions": ["ecelp4800"] + }, + "audio/vnd.nuera.ecelp7470": { + "source": "iana", + "extensions": ["ecelp7470"] + }, + "audio/vnd.nuera.ecelp9600": { + "source": "iana", + "extensions": ["ecelp9600"] + }, + "audio/vnd.octel.sbc": { + "source": "iana" + }, + "audio/vnd.presonus.multitrack": { + "source": "iana" + }, + "audio/vnd.qcelp": { + "source": "iana" + }, + "audio/vnd.rhetorex.32kadpcm": { + "source": "iana" + }, + "audio/vnd.rip": { + "source": "iana", + "extensions": ["rip"] + }, + "audio/vnd.rn-realaudio": { + "compressible": false + }, + "audio/vnd.sealedmedia.softseal.mpeg": { + "source": "iana" + }, + "audio/vnd.vmx.cvsd": { + "source": "iana" + }, + "audio/vnd.wave": { + "compressible": false + }, + "audio/vorbis": { + "source": "iana", + "compressible": false + }, + "audio/vorbis-config": { + "source": "iana" + }, + "audio/wav": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/wave": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/webm": { + "source": "apache", + "compressible": false, + "extensions": ["weba"] + }, + "audio/x-aac": { + "source": "apache", + "compressible": false, + "extensions": ["aac"] + }, + "audio/x-aiff": { + "source": "apache", + "extensions": ["aif","aiff","aifc"] + }, + "audio/x-caf": { + "source": "apache", + "compressible": false, + "extensions": ["caf"] + }, + "audio/x-flac": { + "source": "apache", + "extensions": ["flac"] + }, + "audio/x-m4a": { + "source": "nginx", + "extensions": ["m4a"] + }, + "audio/x-matroska": { + "source": "apache", + "extensions": ["mka"] + }, + "audio/x-mpegurl": { + "source": "apache", + "extensions": ["m3u"] + }, + "audio/x-ms-wax": { + "source": "apache", + "extensions": ["wax"] + }, + "audio/x-ms-wma": { + "source": "apache", + "extensions": ["wma"] + }, + "audio/x-pn-realaudio": { + "source": "apache", + "extensions": ["ram","ra"] + }, + "audio/x-pn-realaudio-plugin": { + "source": "apache", + "extensions": ["rmp"] + }, + "audio/x-realaudio": { + "source": "nginx", + "extensions": ["ra"] + }, + "audio/x-tta": { + "source": "apache" + }, + "audio/x-wav": { + "source": "apache", + "extensions": ["wav"] + }, + "audio/xm": { + "source": "apache", + "extensions": ["xm"] + }, + "chemical/x-cdx": { + "source": "apache", + "extensions": ["cdx"] + }, + "chemical/x-cif": { + "source": "apache", + "extensions": ["cif"] + }, + "chemical/x-cmdf": { + "source": "apache", + "extensions": ["cmdf"] + }, + "chemical/x-cml": { + "source": "apache", + "extensions": ["cml"] + }, + "chemical/x-csml": { + "source": "apache", + "extensions": ["csml"] + }, + "chemical/x-pdb": { + "source": "apache" + }, + "chemical/x-xyz": { + "source": "apache", + "extensions": ["xyz"] + }, + "font/collection": { + "source": "iana", + "extensions": ["ttc"] + }, + "font/otf": { + "source": "iana", + "compressible": true, + "extensions": ["otf"] + }, + "font/sfnt": { + "source": "iana" + }, + "font/ttf": { + "source": "iana", + "compressible": true, + "extensions": ["ttf"] + }, + "font/woff": { + "source": "iana", + "extensions": ["woff"] + }, + "font/woff2": { + "source": "iana", + "extensions": ["woff2"] + }, + "image/aces": { + "source": "iana", + "extensions": ["exr"] + }, + "image/apng": { + "compressible": false, + "extensions": ["apng"] + }, + "image/avci": { + "source": "iana", + "extensions": ["avci"] + }, + "image/avcs": { + "source": "iana", + "extensions": ["avcs"] + }, + "image/avif": { + "source": "iana", + "compressible": false, + "extensions": ["avif"] + }, + "image/bmp": { + "source": "iana", + "compressible": true, + "extensions": ["bmp"] + }, + "image/cgm": { + "source": "iana", + "extensions": ["cgm"] + }, + "image/dicom-rle": { + "source": "iana", + "extensions": ["drle"] + }, + "image/emf": { + "source": "iana", + "extensions": ["emf"] + }, + "image/fits": { + "source": "iana", + "extensions": ["fits"] + }, + "image/g3fax": { + "source": "iana", + "extensions": ["g3"] + }, + "image/gif": { + "source": "iana", + "compressible": false, + "extensions": ["gif"] + }, + "image/heic": { + "source": "iana", + "extensions": ["heic"] + }, + "image/heic-sequence": { + "source": "iana", + "extensions": ["heics"] + }, + "image/heif": { + "source": "iana", + "extensions": ["heif"] + }, + "image/heif-sequence": { + "source": "iana", + "extensions": ["heifs"] + }, + "image/hej2k": { + "source": "iana", + "extensions": ["hej2"] + }, + "image/hsj2": { + "source": "iana", + "extensions": ["hsj2"] + }, + "image/ief": { + "source": "iana", + "extensions": ["ief"] + }, + "image/jls": { + "source": "iana", + "extensions": ["jls"] + }, + "image/jp2": { + "source": "iana", + "compressible": false, + "extensions": ["jp2","jpg2"] + }, + "image/jpeg": { + "source": "iana", + "compressible": false, + "extensions": ["jpeg","jpg","jpe"] + }, + "image/jph": { + "source": "iana", + "extensions": ["jph"] + }, + "image/jphc": { + "source": "iana", + "extensions": ["jhc"] + }, + "image/jpm": { + "source": "iana", + "compressible": false, + "extensions": ["jpm"] + }, + "image/jpx": { + "source": "iana", + "compressible": false, + "extensions": ["jpx","jpf"] + }, + "image/jxr": { + "source": "iana", + "extensions": ["jxr"] + }, + "image/jxra": { + "source": "iana", + "extensions": ["jxra"] + }, + "image/jxrs": { + "source": "iana", + "extensions": ["jxrs"] + }, + "image/jxs": { + "source": "iana", + "extensions": ["jxs"] + }, + "image/jxsc": { + "source": "iana", + "extensions": ["jxsc"] + }, + "image/jxsi": { + "source": "iana", + "extensions": ["jxsi"] + }, + "image/jxss": { + "source": "iana", + "extensions": ["jxss"] + }, + "image/ktx": { + "source": "iana", + "extensions": ["ktx"] + }, + "image/ktx2": { + "source": "iana", + "extensions": ["ktx2"] + }, + "image/naplps": { + "source": "iana" + }, + "image/pjpeg": { + "compressible": false + }, + "image/png": { + "source": "iana", + "compressible": false, + "extensions": ["png"] + }, + "image/prs.btif": { + "source": "iana", + "extensions": ["btif"] + }, + "image/prs.pti": { + "source": "iana", + "extensions": ["pti"] + }, + "image/pwg-raster": { + "source": "iana" + }, + "image/sgi": { + "source": "apache", + "extensions": ["sgi"] + }, + "image/svg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["svg","svgz"] + }, + "image/t38": { + "source": "iana", + "extensions": ["t38"] + }, + "image/tiff": { + "source": "iana", + "compressible": false, + "extensions": ["tif","tiff"] + }, + "image/tiff-fx": { + "source": "iana", + "extensions": ["tfx"] + }, + "image/vnd.adobe.photoshop": { + "source": "iana", + "compressible": true, + "extensions": ["psd"] + }, + "image/vnd.airzip.accelerator.azv": { + "source": "iana", + "extensions": ["azv"] + }, + "image/vnd.cns.inf2": { + "source": "iana" + }, + "image/vnd.dece.graphic": { + "source": "iana", + "extensions": ["uvi","uvvi","uvg","uvvg"] + }, + "image/vnd.djvu": { + "source": "iana", + "extensions": ["djvu","djv"] + }, + "image/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "image/vnd.dwg": { + "source": "iana", + "extensions": ["dwg"] + }, + "image/vnd.dxf": { + "source": "iana", + "extensions": ["dxf"] + }, + "image/vnd.fastbidsheet": { + "source": "iana", + "extensions": ["fbs"] + }, + "image/vnd.fpx": { + "source": "iana", + "extensions": ["fpx"] + }, + "image/vnd.fst": { + "source": "iana", + "extensions": ["fst"] + }, + "image/vnd.fujixerox.edmics-mmr": { + "source": "iana", + "extensions": ["mmr"] + }, + "image/vnd.fujixerox.edmics-rlc": { + "source": "iana", + "extensions": ["rlc"] + }, + "image/vnd.globalgraphics.pgb": { + "source": "iana" + }, + "image/vnd.microsoft.icon": { + "source": "iana", + "compressible": true, + "extensions": ["ico"] + }, + "image/vnd.mix": { + "source": "iana" + }, + "image/vnd.mozilla.apng": { + "source": "iana" + }, + "image/vnd.ms-dds": { + "compressible": true, + "extensions": ["dds"] + }, + "image/vnd.ms-modi": { + "source": "iana", + "extensions": ["mdi"] + }, + "image/vnd.ms-photo": { + "source": "apache", + "extensions": ["wdp"] + }, + "image/vnd.net-fpx": { + "source": "iana", + "extensions": ["npx"] + }, + "image/vnd.pco.b16": { + "source": "iana", + "extensions": ["b16"] + }, + "image/vnd.radiance": { + "source": "iana" + }, + "image/vnd.sealed.png": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.gif": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.jpg": { + "source": "iana" + }, + "image/vnd.svf": { + "source": "iana" + }, + "image/vnd.tencent.tap": { + "source": "iana", + "extensions": ["tap"] + }, + "image/vnd.valve.source.texture": { + "source": "iana", + "extensions": ["vtf"] + }, + "image/vnd.wap.wbmp": { + "source": "iana", + "extensions": ["wbmp"] + }, + "image/vnd.xiff": { + "source": "iana", + "extensions": ["xif"] + }, + "image/vnd.zbrush.pcx": { + "source": "iana", + "extensions": ["pcx"] + }, + "image/webp": { + "source": "apache", + "extensions": ["webp"] + }, + "image/wmf": { + "source": "iana", + "extensions": ["wmf"] + }, + "image/x-3ds": { + "source": "apache", + "extensions": ["3ds"] + }, + "image/x-cmu-raster": { + "source": "apache", + "extensions": ["ras"] + }, + "image/x-cmx": { + "source": "apache", + "extensions": ["cmx"] + }, + "image/x-freehand": { + "source": "apache", + "extensions": ["fh","fhc","fh4","fh5","fh7"] + }, + "image/x-icon": { + "source": "apache", + "compressible": true, + "extensions": ["ico"] + }, + "image/x-jng": { + "source": "nginx", + "extensions": ["jng"] + }, + "image/x-mrsid-image": { + "source": "apache", + "extensions": ["sid"] + }, + "image/x-ms-bmp": { + "source": "nginx", + "compressible": true, + "extensions": ["bmp"] + }, + "image/x-pcx": { + "source": "apache", + "extensions": ["pcx"] + }, + "image/x-pict": { + "source": "apache", + "extensions": ["pic","pct"] + }, + "image/x-portable-anymap": { + "source": "apache", + "extensions": ["pnm"] + }, + "image/x-portable-bitmap": { + "source": "apache", + "extensions": ["pbm"] + }, + "image/x-portable-graymap": { + "source": "apache", + "extensions": ["pgm"] + }, + "image/x-portable-pixmap": { + "source": "apache", + "extensions": ["ppm"] + }, + "image/x-rgb": { + "source": "apache", + "extensions": ["rgb"] + }, + "image/x-tga": { + "source": "apache", + "extensions": ["tga"] + }, + "image/x-xbitmap": { + "source": "apache", + "extensions": ["xbm"] + }, + "image/x-xcf": { + "compressible": false + }, + "image/x-xpixmap": { + "source": "apache", + "extensions": ["xpm"] + }, + "image/x-xwindowdump": { + "source": "apache", + "extensions": ["xwd"] + }, + "message/cpim": { + "source": "iana" + }, + "message/delivery-status": { + "source": "iana" + }, + "message/disposition-notification": { + "source": "iana", + "extensions": [ + "disposition-notification" + ] + }, + "message/external-body": { + "source": "iana" + }, + "message/feedback-report": { + "source": "iana" + }, + "message/global": { + "source": "iana", + "extensions": ["u8msg"] + }, + "message/global-delivery-status": { + "source": "iana", + "extensions": ["u8dsn"] + }, + "message/global-disposition-notification": { + "source": "iana", + "extensions": ["u8mdn"] + }, + "message/global-headers": { + "source": "iana", + "extensions": ["u8hdr"] + }, + "message/http": { + "source": "iana", + "compressible": false + }, + "message/imdn+xml": { + "source": "iana", + "compressible": true + }, + "message/news": { + "source": "iana" + }, + "message/partial": { + "source": "iana", + "compressible": false + }, + "message/rfc822": { + "source": "iana", + "compressible": true, + "extensions": ["eml","mime"] + }, + "message/s-http": { + "source": "iana" + }, + "message/sip": { + "source": "iana" + }, + "message/sipfrag": { + "source": "iana" + }, + "message/tracking-status": { + "source": "iana" + }, + "message/vnd.si.simp": { + "source": "iana" + }, + "message/vnd.wfa.wsc": { + "source": "iana", + "extensions": ["wsc"] + }, + "model/3mf": { + "source": "iana", + "extensions": ["3mf"] + }, + "model/e57": { + "source": "iana" + }, + "model/gltf+json": { + "source": "iana", + "compressible": true, + "extensions": ["gltf"] + }, + "model/gltf-binary": { + "source": "iana", + "compressible": true, + "extensions": ["glb"] + }, + "model/iges": { + "source": "iana", + "compressible": false, + "extensions": ["igs","iges"] + }, + "model/mesh": { + "source": "iana", + "compressible": false, + "extensions": ["msh","mesh","silo"] + }, + "model/mtl": { + "source": "iana", + "extensions": ["mtl"] + }, + "model/obj": { + "source": "iana", + "extensions": ["obj"] + }, + "model/step": { + "source": "iana" + }, + "model/step+xml": { + "source": "iana", + "compressible": true, + "extensions": ["stpx"] + }, + "model/step+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpz"] + }, + "model/step-xml+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpxz"] + }, + "model/stl": { + "source": "iana", + "extensions": ["stl"] + }, + "model/vnd.collada+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dae"] + }, + "model/vnd.dwf": { + "source": "iana", + "extensions": ["dwf"] + }, + "model/vnd.flatland.3dml": { + "source": "iana" + }, + "model/vnd.gdl": { + "source": "iana", + "extensions": ["gdl"] + }, + "model/vnd.gs-gdl": { + "source": "apache" + }, + "model/vnd.gs.gdl": { + "source": "iana" + }, + "model/vnd.gtw": { + "source": "iana", + "extensions": ["gtw"] + }, + "model/vnd.moml+xml": { + "source": "iana", + "compressible": true + }, + "model/vnd.mts": { + "source": "iana", + "extensions": ["mts"] + }, + "model/vnd.opengex": { + "source": "iana", + "extensions": ["ogex"] + }, + "model/vnd.parasolid.transmit.binary": { + "source": "iana", + "extensions": ["x_b"] + }, + "model/vnd.parasolid.transmit.text": { + "source": "iana", + "extensions": ["x_t"] + }, + "model/vnd.pytha.pyox": { + "source": "iana" + }, + "model/vnd.rosette.annotated-data-model": { + "source": "iana" + }, + "model/vnd.sap.vds": { + "source": "iana", + "extensions": ["vds"] + }, + "model/vnd.usdz+zip": { + "source": "iana", + "compressible": false, + "extensions": ["usdz"] + }, + "model/vnd.valve.source.compiled-map": { + "source": "iana", + "extensions": ["bsp"] + }, + "model/vnd.vtu": { + "source": "iana", + "extensions": ["vtu"] + }, + "model/vrml": { + "source": "iana", + "compressible": false, + "extensions": ["wrl","vrml"] + }, + "model/x3d+binary": { + "source": "apache", + "compressible": false, + "extensions": ["x3db","x3dbz"] + }, + "model/x3d+fastinfoset": { + "source": "iana", + "extensions": ["x3db"] + }, + "model/x3d+vrml": { + "source": "apache", + "compressible": false, + "extensions": ["x3dv","x3dvz"] + }, + "model/x3d+xml": { + "source": "iana", + "compressible": true, + "extensions": ["x3d","x3dz"] + }, + "model/x3d-vrml": { + "source": "iana", + "extensions": ["x3dv"] + }, + "multipart/alternative": { + "source": "iana", + "compressible": false + }, + "multipart/appledouble": { + "source": "iana" + }, + "multipart/byteranges": { + "source": "iana" + }, + "multipart/digest": { + "source": "iana" + }, + "multipart/encrypted": { + "source": "iana", + "compressible": false + }, + "multipart/form-data": { + "source": "iana", + "compressible": false + }, + "multipart/header-set": { + "source": "iana" + }, + "multipart/mixed": { + "source": "iana" + }, + "multipart/multilingual": { + "source": "iana" + }, + "multipart/parallel": { + "source": "iana" + }, + "multipart/related": { + "source": "iana", + "compressible": false + }, + "multipart/report": { + "source": "iana" + }, + "multipart/signed": { + "source": "iana", + "compressible": false + }, + "multipart/vnd.bint.med-plus": { + "source": "iana" + }, + "multipart/voice-message": { + "source": "iana" + }, + "multipart/x-mixed-replace": { + "source": "iana" + }, + "text/1d-interleaved-parityfec": { + "source": "iana" + }, + "text/cache-manifest": { + "source": "iana", + "compressible": true, + "extensions": ["appcache","manifest"] + }, + "text/calendar": { + "source": "iana", + "extensions": ["ics","ifb"] + }, + "text/calender": { + "compressible": true + }, + "text/cmd": { + "compressible": true + }, + "text/coffeescript": { + "extensions": ["coffee","litcoffee"] + }, + "text/cql": { + "source": "iana" + }, + "text/cql-expression": { + "source": "iana" + }, + "text/cql-identifier": { + "source": "iana" + }, + "text/css": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["css"] + }, + "text/csv": { + "source": "iana", + "compressible": true, + "extensions": ["csv"] + }, + "text/csv-schema": { + "source": "iana" + }, + "text/directory": { + "source": "iana" + }, + "text/dns": { + "source": "iana" + }, + "text/ecmascript": { + "source": "iana" + }, + "text/encaprtp": { + "source": "iana" + }, + "text/enriched": { + "source": "iana" + }, + "text/fhirpath": { + "source": "iana" + }, + "text/flexfec": { + "source": "iana" + }, + "text/fwdred": { + "source": "iana" + }, + "text/gff3": { + "source": "iana" + }, + "text/grammar-ref-list": { + "source": "iana" + }, + "text/html": { + "source": "iana", + "compressible": true, + "extensions": ["html","htm","shtml"] + }, + "text/jade": { + "extensions": ["jade"] + }, + "text/javascript": { + "source": "iana", + "compressible": true + }, + "text/jcr-cnd": { + "source": "iana" + }, + "text/jsx": { + "compressible": true, + "extensions": ["jsx"] + }, + "text/less": { + "compressible": true, + "extensions": ["less"] + }, + "text/markdown": { + "source": "iana", + "compressible": true, + "extensions": ["markdown","md"] + }, + "text/mathml": { + "source": "nginx", + "extensions": ["mml"] + }, + "text/mdx": { + "compressible": true, + "extensions": ["mdx"] + }, + "text/mizar": { + "source": "iana" + }, + "text/n3": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["n3"] + }, + "text/parameters": { + "source": "iana", + "charset": "UTF-8" + }, + "text/parityfec": { + "source": "iana" + }, + "text/plain": { + "source": "iana", + "compressible": true, + "extensions": ["txt","text","conf","def","list","log","in","ini"] + }, + "text/provenance-notation": { + "source": "iana", + "charset": "UTF-8" + }, + "text/prs.fallenstein.rst": { + "source": "iana" + }, + "text/prs.lines.tag": { + "source": "iana", + "extensions": ["dsc"] + }, + "text/prs.prop.logic": { + "source": "iana" + }, + "text/raptorfec": { + "source": "iana" + }, + "text/red": { + "source": "iana" + }, + "text/rfc822-headers": { + "source": "iana" + }, + "text/richtext": { + "source": "iana", + "compressible": true, + "extensions": ["rtx"] + }, + "text/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "text/rtp-enc-aescm128": { + "source": "iana" + }, + "text/rtploopback": { + "source": "iana" + }, + "text/rtx": { + "source": "iana" + }, + "text/sgml": { + "source": "iana", + "extensions": ["sgml","sgm"] + }, + "text/shaclc": { + "source": "iana" + }, + "text/shex": { + "source": "iana", + "extensions": ["shex"] + }, + "text/slim": { + "extensions": ["slim","slm"] + }, + "text/spdx": { + "source": "iana", + "extensions": ["spdx"] + }, + "text/strings": { + "source": "iana" + }, + "text/stylus": { + "extensions": ["stylus","styl"] + }, + "text/t140": { + "source": "iana" + }, + "text/tab-separated-values": { + "source": "iana", + "compressible": true, + "extensions": ["tsv"] + }, + "text/troff": { + "source": "iana", + "extensions": ["t","tr","roff","man","me","ms"] + }, + "text/turtle": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["ttl"] + }, + "text/ulpfec": { + "source": "iana" + }, + "text/uri-list": { + "source": "iana", + "compressible": true, + "extensions": ["uri","uris","urls"] + }, + "text/vcard": { + "source": "iana", + "compressible": true, + "extensions": ["vcard"] + }, + "text/vnd.a": { + "source": "iana" + }, + "text/vnd.abc": { + "source": "iana" + }, + "text/vnd.ascii-art": { + "source": "iana" + }, + "text/vnd.curl": { + "source": "iana", + "extensions": ["curl"] + }, + "text/vnd.curl.dcurl": { + "source": "apache", + "extensions": ["dcurl"] + }, + "text/vnd.curl.mcurl": { + "source": "apache", + "extensions": ["mcurl"] + }, + "text/vnd.curl.scurl": { + "source": "apache", + "extensions": ["scurl"] + }, + "text/vnd.debian.copyright": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.dmclientscript": { + "source": "iana" + }, + "text/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "text/vnd.esmertec.theme-descriptor": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.familysearch.gedcom": { + "source": "iana", + "extensions": ["ged"] + }, + "text/vnd.ficlab.flt": { + "source": "iana" + }, + "text/vnd.fly": { + "source": "iana", + "extensions": ["fly"] + }, + "text/vnd.fmi.flexstor": { + "source": "iana", + "extensions": ["flx"] + }, + "text/vnd.gml": { + "source": "iana" + }, + "text/vnd.graphviz": { + "source": "iana", + "extensions": ["gv"] + }, + "text/vnd.hans": { + "source": "iana" + }, + "text/vnd.hgl": { + "source": "iana" + }, + "text/vnd.in3d.3dml": { + "source": "iana", + "extensions": ["3dml"] + }, + "text/vnd.in3d.spot": { + "source": "iana", + "extensions": ["spot"] + }, + "text/vnd.iptc.newsml": { + "source": "iana" + }, + "text/vnd.iptc.nitf": { + "source": "iana" + }, + "text/vnd.latex-z": { + "source": "iana" + }, + "text/vnd.motorola.reflex": { + "source": "iana" + }, + "text/vnd.ms-mediapackage": { + "source": "iana" + }, + "text/vnd.net2phone.commcenter.command": { + "source": "iana" + }, + "text/vnd.radisys.msml-basic-layout": { + "source": "iana" + }, + "text/vnd.senx.warpscript": { + "source": "iana" + }, + "text/vnd.si.uricatalogue": { + "source": "iana" + }, + "text/vnd.sosi": { + "source": "iana" + }, + "text/vnd.sun.j2me.app-descriptor": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["jad"] + }, + "text/vnd.trolltech.linguist": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.wap.si": { + "source": "iana" + }, + "text/vnd.wap.sl": { + "source": "iana" + }, + "text/vnd.wap.wml": { + "source": "iana", + "extensions": ["wml"] + }, + "text/vnd.wap.wmlscript": { + "source": "iana", + "extensions": ["wmls"] + }, + "text/vtt": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["vtt"] + }, + "text/x-asm": { + "source": "apache", + "extensions": ["s","asm"] + }, + "text/x-c": { + "source": "apache", + "extensions": ["c","cc","cxx","cpp","h","hh","dic"] + }, + "text/x-component": { + "source": "nginx", + "extensions": ["htc"] + }, + "text/x-fortran": { + "source": "apache", + "extensions": ["f","for","f77","f90"] + }, + "text/x-gwt-rpc": { + "compressible": true + }, + "text/x-handlebars-template": { + "extensions": ["hbs"] + }, + "text/x-java-source": { + "source": "apache", + "extensions": ["java"] + }, + "text/x-jquery-tmpl": { + "compressible": true + }, + "text/x-lua": { + "extensions": ["lua"] + }, + "text/x-markdown": { + "compressible": true, + "extensions": ["mkd"] + }, + "text/x-nfo": { + "source": "apache", + "extensions": ["nfo"] + }, + "text/x-opml": { + "source": "apache", + "extensions": ["opml"] + }, + "text/x-org": { + "compressible": true, + "extensions": ["org"] + }, + "text/x-pascal": { + "source": "apache", + "extensions": ["p","pas"] + }, + "text/x-processing": { + "compressible": true, + "extensions": ["pde"] + }, + "text/x-sass": { + "extensions": ["sass"] + }, + "text/x-scss": { + "extensions": ["scss"] + }, + "text/x-setext": { + "source": "apache", + "extensions": ["etx"] + }, + "text/x-sfv": { + "source": "apache", + "extensions": ["sfv"] + }, + "text/x-suse-ymp": { + "compressible": true, + "extensions": ["ymp"] + }, + "text/x-uuencode": { + "source": "apache", + "extensions": ["uu"] + }, + "text/x-vcalendar": { + "source": "apache", + "extensions": ["vcs"] + }, + "text/x-vcard": { + "source": "apache", + "extensions": ["vcf"] + }, + "text/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml"] + }, + "text/xml-external-parsed-entity": { + "source": "iana" + }, + "text/yaml": { + "compressible": true, + "extensions": ["yaml","yml"] + }, + "video/1d-interleaved-parityfec": { + "source": "iana" + }, + "video/3gpp": { + "source": "iana", + "extensions": ["3gp","3gpp"] + }, + "video/3gpp-tt": { + "source": "iana" + }, + "video/3gpp2": { + "source": "iana", + "extensions": ["3g2"] + }, + "video/av1": { + "source": "iana" + }, + "video/bmpeg": { + "source": "iana" + }, + "video/bt656": { + "source": "iana" + }, + "video/celb": { + "source": "iana" + }, + "video/dv": { + "source": "iana" + }, + "video/encaprtp": { + "source": "iana" + }, + "video/ffv1": { + "source": "iana" + }, + "video/flexfec": { + "source": "iana" + }, + "video/h261": { + "source": "iana", + "extensions": ["h261"] + }, + "video/h263": { + "source": "iana", + "extensions": ["h263"] + }, + "video/h263-1998": { + "source": "iana" + }, + "video/h263-2000": { + "source": "iana" + }, + "video/h264": { + "source": "iana", + "extensions": ["h264"] + }, + "video/h264-rcdo": { + "source": "iana" + }, + "video/h264-svc": { + "source": "iana" + }, + "video/h265": { + "source": "iana" + }, + "video/iso.segment": { + "source": "iana", + "extensions": ["m4s"] + }, + "video/jpeg": { + "source": "iana", + "extensions": ["jpgv"] + }, + "video/jpeg2000": { + "source": "iana" + }, + "video/jpm": { + "source": "apache", + "extensions": ["jpm","jpgm"] + }, + "video/jxsv": { + "source": "iana" + }, + "video/mj2": { + "source": "iana", + "extensions": ["mj2","mjp2"] + }, + "video/mp1s": { + "source": "iana" + }, + "video/mp2p": { + "source": "iana" + }, + "video/mp2t": { + "source": "iana", + "extensions": ["ts"] + }, + "video/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["mp4","mp4v","mpg4"] + }, + "video/mp4v-es": { + "source": "iana" + }, + "video/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpeg","mpg","mpe","m1v","m2v"] + }, + "video/mpeg4-generic": { + "source": "iana" + }, + "video/mpv": { + "source": "iana" + }, + "video/nv": { + "source": "iana" + }, + "video/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogv"] + }, + "video/parityfec": { + "source": "iana" + }, + "video/pointer": { + "source": "iana" + }, + "video/quicktime": { + "source": "iana", + "compressible": false, + "extensions": ["qt","mov"] + }, + "video/raptorfec": { + "source": "iana" + }, + "video/raw": { + "source": "iana" + }, + "video/rtp-enc-aescm128": { + "source": "iana" + }, + "video/rtploopback": { + "source": "iana" + }, + "video/rtx": { + "source": "iana" + }, + "video/scip": { + "source": "iana" + }, + "video/smpte291": { + "source": "iana" + }, + "video/smpte292m": { + "source": "iana" + }, + "video/ulpfec": { + "source": "iana" + }, + "video/vc1": { + "source": "iana" + }, + "video/vc2": { + "source": "iana" + }, + "video/vnd.cctv": { + "source": "iana" + }, + "video/vnd.dece.hd": { + "source": "iana", + "extensions": ["uvh","uvvh"] + }, + "video/vnd.dece.mobile": { + "source": "iana", + "extensions": ["uvm","uvvm"] + }, + "video/vnd.dece.mp4": { + "source": "iana" + }, + "video/vnd.dece.pd": { + "source": "iana", + "extensions": ["uvp","uvvp"] + }, + "video/vnd.dece.sd": { + "source": "iana", + "extensions": ["uvs","uvvs"] + }, + "video/vnd.dece.video": { + "source": "iana", + "extensions": ["uvv","uvvv"] + }, + "video/vnd.directv.mpeg": { + "source": "iana" + }, + "video/vnd.directv.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dlna.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dvb.file": { + "source": "iana", + "extensions": ["dvb"] + }, + "video/vnd.fvt": { + "source": "iana", + "extensions": ["fvt"] + }, + "video/vnd.hns.video": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsavc": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsmpeg2": { + "source": "iana" + }, + "video/vnd.motorola.video": { + "source": "iana" + }, + "video/vnd.motorola.videop": { + "source": "iana" + }, + "video/vnd.mpegurl": { + "source": "iana", + "extensions": ["mxu","m4u"] + }, + "video/vnd.ms-playready.media.pyv": { + "source": "iana", + "extensions": ["pyv"] + }, + "video/vnd.nokia.interleaved-multimedia": { + "source": "iana" + }, + "video/vnd.nokia.mp4vr": { + "source": "iana" + }, + "video/vnd.nokia.videovoip": { + "source": "iana" + }, + "video/vnd.objectvideo": { + "source": "iana" + }, + "video/vnd.radgamettools.bink": { + "source": "iana" + }, + "video/vnd.radgamettools.smacker": { + "source": "iana" + }, + "video/vnd.sealed.mpeg1": { + "source": "iana" + }, + "video/vnd.sealed.mpeg4": { + "source": "iana" + }, + "video/vnd.sealed.swf": { + "source": "iana" + }, + "video/vnd.sealedmedia.softseal.mov": { + "source": "iana" + }, + "video/vnd.uvvu.mp4": { + "source": "iana", + "extensions": ["uvu","uvvu"] + }, + "video/vnd.vivo": { + "source": "iana", + "extensions": ["viv"] + }, + "video/vnd.youtube.yt": { + "source": "iana" + }, + "video/vp8": { + "source": "iana" + }, + "video/vp9": { + "source": "iana" + }, + "video/webm": { + "source": "apache", + "compressible": false, + "extensions": ["webm"] + }, + "video/x-f4v": { + "source": "apache", + "extensions": ["f4v"] + }, + "video/x-fli": { + "source": "apache", + "extensions": ["fli"] + }, + "video/x-flv": { + "source": "apache", + "compressible": false, + "extensions": ["flv"] + }, + "video/x-m4v": { + "source": "apache", + "extensions": ["m4v"] + }, + "video/x-matroska": { + "source": "apache", + "compressible": false, + "extensions": ["mkv","mk3d","mks"] + }, + "video/x-mng": { + "source": "apache", + "extensions": ["mng"] + }, + "video/x-ms-asf": { + "source": "apache", + "extensions": ["asf","asx"] + }, + "video/x-ms-vob": { + "source": "apache", + "extensions": ["vob"] + }, + "video/x-ms-wm": { + "source": "apache", + "extensions": ["wm"] + }, + "video/x-ms-wmv": { + "source": "apache", + "compressible": false, + "extensions": ["wmv"] + }, + "video/x-ms-wmx": { + "source": "apache", + "extensions": ["wmx"] + }, + "video/x-ms-wvx": { + "source": "apache", + "extensions": ["wvx"] + }, + "video/x-msvideo": { + "source": "apache", + "extensions": ["avi"] + }, + "video/x-sgi-movie": { + "source": "apache", + "extensions": ["movie"] + }, + "video/x-smv": { + "source": "apache", + "extensions": ["smv"] + }, + "x-conference/x-cooltalk": { + "source": "apache", + "extensions": ["ice"] + }, + "x-shader/x-fragment": { + "compressible": true + }, + "x-shader/x-vertex": { + "compressible": true + } +} diff --git a/node_modules/mime-db/index.js b/node_modules/mime-db/index.js new file mode 100644 index 0000000..ec2be30 --- /dev/null +++ b/node_modules/mime-db/index.js @@ -0,0 +1,12 @@ +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = require('./db.json') diff --git a/node_modules/mime-db/package.json b/node_modules/mime-db/package.json new file mode 100644 index 0000000..32c14b8 --- /dev/null +++ b/node_modules/mime-db/package.json @@ -0,0 +1,60 @@ +{ + "name": "mime-db", + "description": "Media Type Database", + "version": "1.52.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)", + "Robert Kieffer (http://github.com/broofa)" + ], + "license": "MIT", + "keywords": [ + "mime", + "db", + "type", + "types", + "database", + "charset", + "charsets" + ], + "repository": "jshttp/mime-db", + "devDependencies": { + "bluebird": "3.7.2", + "co": "4.6.0", + "cogent": "1.0.1", + "csv-parse": "4.16.3", + "eslint": "7.32.0", + "eslint-config-standard": "15.0.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.1.1", + "eslint-plugin-standard": "4.1.0", + "gnode": "0.1.2", + "media-typer": "1.1.0", + "mocha": "9.2.1", + "nyc": "15.1.0", + "raw-body": "2.5.0", + "stream-to-array": "2.3.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "db.json", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "build": "node scripts/build", + "fetch": "node scripts/fetch-apache && gnode scripts/fetch-iana && node scripts/fetch-nginx", + "lint": "eslint .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "update": "npm run fetch && npm run build", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/mime-types/HISTORY.md b/node_modules/mime-types/HISTORY.md new file mode 100644 index 0000000..c5043b7 --- /dev/null +++ b/node_modules/mime-types/HISTORY.md @@ -0,0 +1,397 @@ +2.1.35 / 2022-03-12 +=================== + + * deps: mime-db@1.52.0 + - Add extensions from IANA for more `image/*` types + - Add extension `.asc` to `application/pgp-keys` + - Add extensions to various XML types + - Add new upstream MIME types + +2.1.34 / 2021-11-08 +=================== + + * deps: mime-db@1.51.0 + - Add new upstream MIME types + +2.1.33 / 2021-10-01 +=================== + + * deps: mime-db@1.50.0 + - Add deprecated iWorks mime types and extensions + - Add new upstream MIME types + +2.1.32 / 2021-07-27 +=================== + + * deps: mime-db@1.49.0 + - Add extension `.trig` to `application/trig` + - Add new upstream MIME types + +2.1.31 / 2021-06-01 +=================== + + * deps: mime-db@1.48.0 + - Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + - Add new upstream MIME types + +2.1.30 / 2021-04-02 +=================== + + * deps: mime-db@1.47.0 + - Add extension `.amr` to `audio/amr` + - Remove ambigious extensions from IANA for `application/*+xml` types + - Update primary extension to `.es` for `application/ecmascript` + +2.1.29 / 2021-02-17 +=================== + + * deps: mime-db@1.46.0 + - Add extension `.amr` to `audio/amr` + - Add extension `.m4s` to `video/iso.segment` + - Add extension `.opus` to `audio/ogg` + - Add new upstream MIME types + +2.1.28 / 2021-01-01 +=================== + + * deps: mime-db@1.45.0 + - Add `application/ubjson` with extension `.ubj` + - Add `image/avif` with extension `.avif` + - Add `image/ktx2` with extension `.ktx2` + - Add extension `.dbf` to `application/vnd.dbf` + - Add extension `.rar` to `application/vnd.rar` + - Add extension `.td` to `application/urc-targetdesc+xml` + - Add new upstream MIME types + - Fix extension of `application/vnd.apple.keynote` to be `.key` + +2.1.27 / 2020-04-23 +=================== + + * deps: mime-db@1.44.0 + - Add charsets from IANA + - Add extension `.cjs` to `application/node` + - Add new upstream MIME types + +2.1.26 / 2020-01-05 +=================== + + * deps: mime-db@1.43.0 + - Add `application/x-keepass2` with extension `.kdbx` + - Add extension `.mxmf` to `audio/mobile-xmf` + - Add extensions from IANA for `application/*+xml` types + - Add new upstream MIME types + +2.1.25 / 2019-11-12 +=================== + + * deps: mime-db@1.42.0 + - Add new upstream MIME types + - Add `application/toml` with extension `.toml` + - Add `image/vnd.ms-dds` with extension `.dds` + +2.1.24 / 2019-04-20 +=================== + + * deps: mime-db@1.40.0 + - Add extensions from IANA for `model/*` types + - Add `text/mdx` with extension `.mdx` + +2.1.23 / 2019-04-17 +=================== + + * deps: mime-db@~1.39.0 + - Add extensions `.siv` and `.sieve` to `application/sieve` + - Add new upstream MIME types + +2.1.22 / 2019-02-14 +=================== + + * deps: mime-db@~1.38.0 + - Add extension `.nq` to `application/n-quads` + - Add extension `.nt` to `application/n-triples` + - Add new upstream MIME types + +2.1.21 / 2018-10-19 +=================== + + * deps: mime-db@~1.37.0 + - Add extensions to HEIC image types + - Add new upstream MIME types + +2.1.20 / 2018-08-26 +=================== + + * deps: mime-db@~1.36.0 + - Add Apple file extensions from IANA + - Add extensions from IANA for `image/*` types + - Add new upstream MIME types + +2.1.19 / 2018-07-17 +=================== + + * deps: mime-db@~1.35.0 + - Add extension `.csl` to `application/vnd.citationstyles.style+xml` + - Add extension `.es` to `application/ecmascript` + - Add extension `.owl` to `application/rdf+xml` + - Add new upstream MIME types + - Add UTF-8 as default charset for `text/turtle` + +2.1.18 / 2018-02-16 +=================== + + * deps: mime-db@~1.33.0 + - Add `application/raml+yaml` with extension `.raml` + - Add `application/wasm` with extension `.wasm` + - Add `text/shex` with extension `.shex` + - Add extensions for JPEG-2000 images + - Add extensions from IANA for `message/*` types + - Add new upstream MIME types + - Update font MIME types + - Update `text/hjson` to registered `application/hjson` + +2.1.17 / 2017-09-01 +=================== + + * deps: mime-db@~1.30.0 + - Add `application/vnd.ms-outlook` + - Add `application/x-arj` + - Add extension `.mjs` to `application/javascript` + - Add glTF types and extensions + - Add new upstream MIME types + - Add `text/x-org` + - Add VirtualBox MIME types + - Fix `source` records for `video/*` types that are IANA + - Update `font/opentype` to registered `font/otf` + +2.1.16 / 2017-07-24 +=================== + + * deps: mime-db@~1.29.0 + - Add `application/fido.trusted-apps+json` + - Add extension `.wadl` to `application/vnd.sun.wadl+xml` + - Add extension `.gz` to `application/gzip` + - Add new upstream MIME types + - Update extensions `.md` and `.markdown` to be `text/markdown` + +2.1.15 / 2017-03-23 +=================== + + * deps: mime-db@~1.27.0 + - Add new mime types + - Add `image/apng` + +2.1.14 / 2017-01-14 +=================== + + * deps: mime-db@~1.26.0 + - Add new mime types + +2.1.13 / 2016-11-18 +=================== + + * deps: mime-db@~1.25.0 + - Add new mime types + +2.1.12 / 2016-09-18 +=================== + + * deps: mime-db@~1.24.0 + - Add new mime types + - Add `audio/mp3` + +2.1.11 / 2016-05-01 +=================== + + * deps: mime-db@~1.23.0 + - Add new mime types + +2.1.10 / 2016-02-15 +=================== + + * deps: mime-db@~1.22.0 + - Add new mime types + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + +2.1.9 / 2016-01-06 +================== + + * deps: mime-db@~1.21.0 + - Add new mime types + +2.1.8 / 2015-11-30 +================== + + * deps: mime-db@~1.20.0 + - Add new mime types + +2.1.7 / 2015-09-20 +================== + + * deps: mime-db@~1.19.0 + - Add new mime types + +2.1.6 / 2015-09-03 +================== + + * deps: mime-db@~1.18.0 + - Add new mime types + +2.1.5 / 2015-08-20 +================== + + * deps: mime-db@~1.17.0 + - Add new mime types + +2.1.4 / 2015-07-30 +================== + + * deps: mime-db@~1.16.0 + - Add new mime types + +2.1.3 / 2015-07-13 +================== + + * deps: mime-db@~1.15.0 + - Add new mime types + +2.1.2 / 2015-06-25 +================== + + * deps: mime-db@~1.14.0 + - Add new mime types + +2.1.1 / 2015-06-08 +================== + + * perf: fix deopt during mapping + +2.1.0 / 2015-06-07 +================== + + * Fix incorrectly treating extension-less file name as extension + - i.e. `'path/to/json'` will no longer return `application/json` + * Fix `.charset(type)` to accept parameters + * Fix `.charset(type)` to match case-insensitive + * Improve generation of extension to MIME mapping + * Refactor internals for readability and no argument reassignment + * Prefer `application/*` MIME types from the same source + * Prefer any type over `application/octet-stream` + * deps: mime-db@~1.13.0 + - Add nginx as a source + - Add new mime types + +2.0.14 / 2015-06-06 +=================== + + * deps: mime-db@~1.12.0 + - Add new mime types + +2.0.13 / 2015-05-31 +=================== + + * deps: mime-db@~1.11.0 + - Add new mime types + +2.0.12 / 2015-05-19 +=================== + + * deps: mime-db@~1.10.0 + - Add new mime types + +2.0.11 / 2015-05-05 +=================== + + * deps: mime-db@~1.9.1 + - Add new mime types + +2.0.10 / 2015-03-13 +=================== + + * deps: mime-db@~1.8.0 + - Add new mime types + +2.0.9 / 2015-02-09 +================== + + * deps: mime-db@~1.7.0 + - Add new mime types + - Community extensions ownership transferred from `node-mime` + +2.0.8 / 2015-01-29 +================== + + * deps: mime-db@~1.6.0 + - Add new mime types + +2.0.7 / 2014-12-30 +================== + + * deps: mime-db@~1.5.0 + - Add new mime types + - Fix various invalid MIME type entries + +2.0.6 / 2014-12-30 +================== + + * deps: mime-db@~1.4.0 + - Add new mime types + - Fix various invalid MIME type entries + - Remove example template MIME types + +2.0.5 / 2014-12-29 +================== + + * deps: mime-db@~1.3.1 + - Fix missing extensions + +2.0.4 / 2014-12-10 +================== + + * deps: mime-db@~1.3.0 + - Add new mime types + +2.0.3 / 2014-11-09 +================== + + * deps: mime-db@~1.2.0 + - Add new mime types + +2.0.2 / 2014-09-28 +================== + + * deps: mime-db@~1.1.0 + - Add new mime types + - Update charsets + +2.0.1 / 2014-09-07 +================== + + * Support Node.js 0.6 + +2.0.0 / 2014-09-02 +================== + + * Use `mime-db` + * Remove `.define()` + +1.0.2 / 2014-08-04 +================== + + * Set charset=utf-8 for `text/javascript` + +1.0.1 / 2014-06-24 +================== + + * Add `text/jsx` type + +1.0.0 / 2014-05-12 +================== + + * Return `false` for unknown types + * Set charset=utf-8 for `application/json` + +0.1.0 / 2014-05-02 +================== + + * Initial release diff --git a/node_modules/mime-types/LICENSE b/node_modules/mime-types/LICENSE new file mode 100644 index 0000000..0616607 --- /dev/null +++ b/node_modules/mime-types/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-types/README.md b/node_modules/mime-types/README.md new file mode 100644 index 0000000..48d2fb4 --- /dev/null +++ b/node_modules/mime-types/README.md @@ -0,0 +1,113 @@ +# mime-types + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +The ultimate javascript content-type utility. + +Similar to [the `mime@1.x` module](https://www.npmjs.com/package/mime), except: + +- __No fallbacks.__ Instead of naively returning the first available type, + `mime-types` simply returns `false`, so do + `var type = mime.lookup('unrecognized') || 'application/octet-stream'`. +- No `new Mime()` business, so you could do `var lookup = require('mime-types').lookup`. +- No `.define()` functionality +- Bug fixes for `.lookup(path)` + +Otherwise, the API is compatible with `mime` 1.x. + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install mime-types +``` + +## Adding Types + +All mime types are based on [mime-db](https://www.npmjs.com/package/mime-db), +so open a PR there if you'd like to add mime types. + +## API + +```js +var mime = require('mime-types') +``` + +All functions return `false` if input is invalid or not found. + +### mime.lookup(path) + +Lookup the content-type associated with a file. + +```js +mime.lookup('json') // 'application/json' +mime.lookup('.md') // 'text/markdown' +mime.lookup('file.html') // 'text/html' +mime.lookup('folder/file.js') // 'application/javascript' +mime.lookup('folder/.htaccess') // false + +mime.lookup('cats') // false +``` + +### mime.contentType(type) + +Create a full content-type header given a content-type or extension. +When given an extension, `mime.lookup` is used to get the matching +content-type, otherwise the given content-type is used. Then if the +content-type does not already have a `charset` parameter, `mime.charset` +is used to get the default charset and add to the returned content-type. + +```js +mime.contentType('markdown') // 'text/x-markdown; charset=utf-8' +mime.contentType('file.json') // 'application/json; charset=utf-8' +mime.contentType('text/html') // 'text/html; charset=utf-8' +mime.contentType('text/html; charset=iso-8859-1') // 'text/html; charset=iso-8859-1' + +// from a full path +mime.contentType(path.extname('/path/to/file.json')) // 'application/json; charset=utf-8' +``` + +### mime.extension(type) + +Get the default extension for a content-type. + +```js +mime.extension('application/octet-stream') // 'bin' +``` + +### mime.charset(type) + +Lookup the implied default charset of a content-type. + +```js +mime.charset('text/markdown') // 'UTF-8' +``` + +### var type = mime.types[extension] + +A map of content-types by extension. + +### [extensions...] = mime.extensions[type] + +A map of extensions by content-type. + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/mime-types/master?label=ci +[ci-url]: https://github.com/jshttp/mime-types/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-types/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-types?branch=master +[node-version-image]: https://badgen.net/npm/node/mime-types +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-types +[npm-url]: https://npmjs.org/package/mime-types +[npm-version-image]: https://badgen.net/npm/v/mime-types diff --git a/node_modules/mime-types/index.js b/node_modules/mime-types/index.js new file mode 100644 index 0000000..b9f34d5 --- /dev/null +++ b/node_modules/mime-types/index.js @@ -0,0 +1,188 @@ +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var db = require('mime-db') +var extname = require('path').extname + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return + } + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) +} diff --git a/node_modules/mime-types/package.json b/node_modules/mime-types/package.json new file mode 100644 index 0000000..bbef696 --- /dev/null +++ b/node_modules/mime-types/package.json @@ -0,0 +1,44 @@ +{ + "name": "mime-types", + "description": "The ultimate javascript content-type utility.", + "version": "2.1.35", + "contributors": [ + "Douglas Christopher Wilson ", + "Jeremiah Senkpiel (https://searchbeam.jit.su)", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "keywords": [ + "mime", + "types" + ], + "repository": "jshttp/mime-types", + "dependencies": { + "mime-db": "1.52.0" + }, + "devDependencies": { + "eslint": "7.32.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.2.0", + "eslint-plugin-standard": "4.1.0", + "mocha": "9.2.2", + "nyc": "15.1.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec test/test.js", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/ms/index.js b/node_modules/ms/index.js new file mode 100644 index 0000000..ea734fb --- /dev/null +++ b/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function (val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/ms/license.md b/node_modules/ms/license.md new file mode 100644 index 0000000..fa5d39b --- /dev/null +++ b/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Vercel, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/ms/package.json b/node_modules/ms/package.json new file mode 100644 index 0000000..4997189 --- /dev/null +++ b/node_modules/ms/package.json @@ -0,0 +1,38 @@ +{ + "name": "ms", + "version": "2.1.3", + "description": "Tiny millisecond conversion utility", + "repository": "vercel/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.18.2", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1", + "prettier": "2.0.5" + } +} diff --git a/node_modules/ms/readme.md b/node_modules/ms/readme.md new file mode 100644 index 0000000..0fc1abb --- /dev/null +++ b/node_modules/ms/readme.md @@ -0,0 +1,59 @@ +# ms + +![CI](https://github.com/vercel/ms/workflows/CI/badge.svg) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/node-domexception/.history/README_20210527203617.md b/node_modules/node-domexception/.history/README_20210527203617.md new file mode 100644 index 0000000..38d8f85 --- /dev/null +++ b/node_modules/node-domexception/.history/README_20210527203617.md @@ -0,0 +1,2 @@ +# node-domexception +An implementation of the DOMException class from NodeJS diff --git a/node_modules/node-domexception/.history/README_20210527212714.md b/node_modules/node-domexception/.history/README_20210527212714.md new file mode 100644 index 0000000..eed1d13 --- /dev/null +++ b/node_modules/node-domexception/.history/README_20210527212714.md @@ -0,0 +1,41 @@ +# DOMException +An implementation of the DOMException class from NodeJS + +This package implements the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class, from NodeJS itself. +NodeJS has DOMException built in, but it's not globally available, and you can't require/import it from somewhere. + +The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor. +This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException. +The instanceof check would not have worked with a custom class such as the DOMexception provided by domenic which also is much larger in size. + +```js +import DOMException from 'node-domexception' + +hello().catch(err => { + if (err instanceof DOMException) { + ... + } +}) + +const e1 = new DOMException("Something went wrong", "BadThingsError"); +console.assert(e1.name === "BadThingsError"); +console.assert(e1.code === 0); + +const e2 = new DOMException("Another exciting error message", "NoModificationAllowedError"); +console.assert(e2.name === "NoModificationAllowedError"); +console.assert(e2.code === 7); + +console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10); +``` + +## APIs + +This package exposes two flavors of the `DOMException` interface depending on the imported module. + +### `domexception` module + +This module default-exports the `DOMException` interface constructor. + +### `domexception/webidl2js-wrapper` module + +This module exports the `DOMException` [interface wrapper API](https://github.com/jsdom/webidl2js#for-interfaces) generated by [webidl2js](https://github.com/jsdom/webidl2js). diff --git a/node_modules/node-domexception/.history/README_20210527213345.md b/node_modules/node-domexception/.history/README_20210527213345.md new file mode 100644 index 0000000..5825416 --- /dev/null +++ b/node_modules/node-domexception/.history/README_20210527213345.md @@ -0,0 +1,36 @@ +# DOMException +An implementation of the DOMException class from NodeJS + +This package implements the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class, from NodeJS itself. (including the legacy codes) +NodeJS has DOMException built in, but it's not globally available, and you can't require/import it from somewhere. + +The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor. +This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException. +The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size. + +```js +import DOMException from 'node-domexception' +import { MessageChannel } from 'worker_threads' + +async function hello() { + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) +} + +hello().catch(err => { + console.assert(err.name === 'DataCloneError') + console.assert(err.code === 25) + console.assert(err instanceof DOMException) +}) + +const e1 = new DOMException('Something went wrong', 'BadThingsError') +console.assert(e1.name === 'BadThingsError') +console.assert(e1.code === 0) + +const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError') +console.assert(e2.name === 'NoModificationAllowedError') +console.assert(e2.code === 7) + +console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10) +``` diff --git a/node_modules/node-domexception/.history/README_20210527213411.md b/node_modules/node-domexception/.history/README_20210527213411.md new file mode 100644 index 0000000..4c21ec8 --- /dev/null +++ b/node_modules/node-domexception/.history/README_20210527213411.md @@ -0,0 +1,36 @@ +# DOMException +An implementation of the DOMException class from NodeJS + +This package implements the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including the legacy codes) +NodeJS has DOMException built in, but it's not globally available, and you can't require/import it from somewhere. + +The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor. +This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException. +The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size. + +```js +import DOMException from 'node-domexception' +import { MessageChannel } from 'worker_threads' + +async function hello() { + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) +} + +hello().catch(err => { + console.assert(err.name === 'DataCloneError') + console.assert(err.code === 25) + console.assert(err instanceof DOMException) +}) + +const e1 = new DOMException('Something went wrong', 'BadThingsError') +console.assert(e1.name === 'BadThingsError') +console.assert(e1.code === 0) + +const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError') +console.assert(e2.name === 'NoModificationAllowedError') +console.assert(e2.code === 7) + +console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10) +``` diff --git a/node_modules/node-domexception/.history/README_20210527213803.md b/node_modules/node-domexception/.history/README_20210527213803.md new file mode 100644 index 0000000..4cb8571 --- /dev/null +++ b/node_modules/node-domexception/.history/README_20210527213803.md @@ -0,0 +1,36 @@ +# DOMException +An implementation of the DOMException class from NodeJS + +This package exposes the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including all of the deprecated legacy codes) +NodeJS has it built in, but it's not globally available, and you can't require/import it from somewhere. + +The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor. +This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException. +The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size since it has to re-construct the hole class from the ground up. + +```js +import DOMException from 'node-domexception' +import { MessageChannel } from 'worker_threads' + +async function hello() { + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) +} + +hello().catch(err => { + console.assert(err.name === 'DataCloneError') + console.assert(err.code === 25) + console.assert(err instanceof DOMException) +}) + +const e1 = new DOMException('Something went wrong', 'BadThingsError') +console.assert(e1.name === 'BadThingsError') +console.assert(e1.code === 0) + +const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError') +console.assert(e2.name === 'NoModificationAllowedError') +console.assert(e2.code === 7) + +console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10) +``` diff --git a/node_modules/node-domexception/.history/README_20210527214323.md b/node_modules/node-domexception/.history/README_20210527214323.md new file mode 100644 index 0000000..a32a91b --- /dev/null +++ b/node_modules/node-domexception/.history/README_20210527214323.md @@ -0,0 +1,38 @@ +# DOMException +An implementation of the DOMException class from NodeJS + +This package exposes the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including all of the deprecated legacy codes) +NodeJS has it built in, but it's not globally available, and you can't require/import it from somewhere. + +The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor. +This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException. +The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size since it has to re-construct the hole class from the ground up. + +(plz don't depend on this package in any other environment other than node >=10.5) + +```js +import DOMException from 'node-domexception' +import { MessageChannel } from 'worker_threads' + +async function hello() { + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) +} + +hello().catch(err => { + console.assert(err.name === 'DataCloneError') + console.assert(err.code === 25) + console.assert(err instanceof DOMException) +}) + +const e1 = new DOMException('Something went wrong', 'BadThingsError') +console.assert(e1.name === 'BadThingsError') +console.assert(e1.code === 0) + +const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError') +console.assert(e2.name === 'NoModificationAllowedError') +console.assert(e2.code === 7) + +console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10) +``` diff --git a/node_modules/node-domexception/.history/README_20210527214408.md b/node_modules/node-domexception/.history/README_20210527214408.md new file mode 100644 index 0000000..a32a91b --- /dev/null +++ b/node_modules/node-domexception/.history/README_20210527214408.md @@ -0,0 +1,38 @@ +# DOMException +An implementation of the DOMException class from NodeJS + +This package exposes the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including all of the deprecated legacy codes) +NodeJS has it built in, but it's not globally available, and you can't require/import it from somewhere. + +The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws an error and catch the constructor. +This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException. +The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size since it has to re-construct the hole class from the ground up. + +(plz don't depend on this package in any other environment other than node >=10.5) + +```js +import DOMException from 'node-domexception' +import { MessageChannel } from 'worker_threads' + +async function hello() { + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) +} + +hello().catch(err => { + console.assert(err.name === 'DataCloneError') + console.assert(err.code === 25) + console.assert(err instanceof DOMException) +}) + +const e1 = new DOMException('Something went wrong', 'BadThingsError') +console.assert(e1.name === 'BadThingsError') +console.assert(e1.code === 0) + +const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError') +console.assert(e2.name === 'NoModificationAllowedError') +console.assert(e2.code === 7) + +console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10) +``` diff --git a/node_modules/node-domexception/.history/index_20210527203842.js b/node_modules/node-domexception/.history/index_20210527203842.js new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/node-domexception/.history/index_20210527203947.js b/node_modules/node-domexception/.history/index_20210527203947.js new file mode 100644 index 0000000..b9a8b76 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527203947.js @@ -0,0 +1,8 @@ +const { MessageChannel } = require('worker_threads') + +if (!globalThis.DOMException) { + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { globalThis.DOMException = err.constructor } +} diff --git a/node_modules/node-domexception/.history/index_20210527204259.js b/node_modules/node-domexception/.history/index_20210527204259.js new file mode 100644 index 0000000..e9332a8 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527204259.js @@ -0,0 +1,9 @@ +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads') + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { globalThis.DOMException = err.constructor } +} + +module.exports diff --git a/node_modules/node-domexception/.history/index_20210527204418.js b/node_modules/node-domexception/.history/index_20210527204418.js new file mode 100644 index 0000000..cb362cc --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527204418.js @@ -0,0 +1,9 @@ +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads') + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { globalThis.DOMException = err.constructor } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527204756.js b/node_modules/node-domexception/.history/index_20210527204756.js new file mode 100644 index 0000000..87d2655 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527204756.js @@ -0,0 +1,11 @@ +/*! blob-to-buffer. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads') + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { globalThis.DOMException = err.constructor } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527204833.js b/node_modules/node-domexception/.history/index_20210527204833.js new file mode 100644 index 0000000..837ebda --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527204833.js @@ -0,0 +1,11 @@ +/*! blob-to-buffer. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads') + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { globalThis.DOMException = err.constructor } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527211208.js b/node_modules/node-domexception/.history/index_20210527211208.js new file mode 100644 index 0000000..ba215ce --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527211208.js @@ -0,0 +1,15 @@ +/*! blob-to-buffer. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + var { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527211248.js b/node_modules/node-domexception/.history/index_20210527211248.js new file mode 100644 index 0000000..f5c434e --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527211248.js @@ -0,0 +1,15 @@ +/*! blob-to-buffer. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527212722.js b/node_modules/node-domexception/.history/index_20210527212722.js new file mode 100644 index 0000000..91b3b52 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527212722.js @@ -0,0 +1,23 @@ +/*! blob-to-buffer. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException + +const e1 = new DOMException("Something went wrong", "BadThingsError"); +console.assert(e1.name === "BadThingsError"); +console.assert(e1.code === 0); + +const e2 = new DOMException("Another exciting error message", "NoModificationAllowedError"); +console.assert(e2.name === "NoModificationAllowedError"); +console.assert(e2.code === 7); diff --git a/node_modules/node-domexception/.history/index_20210527212731.js b/node_modules/node-domexception/.history/index_20210527212731.js new file mode 100644 index 0000000..cf28864 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527212731.js @@ -0,0 +1,23 @@ +/*! blob-to-buffer. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException + +const e1 = new DOMException("Something went wrong", "BadThingsError"); +console.assert(e1.name === "BadThingsError"); +console.assert(e1.code === 0); + +const e2 = new DOMException("Another exciting error message", "NoModificationAllowedError"); +console.assert(e2.name === "NoModificationAllowedError"); +console.assert(e2.code === 2); diff --git a/node_modules/node-domexception/.history/index_20210527212746.js b/node_modules/node-domexception/.history/index_20210527212746.js new file mode 100644 index 0000000..f5c434e --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527212746.js @@ -0,0 +1,15 @@ +/*! blob-to-buffer. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527212900.js b/node_modules/node-domexception/.history/index_20210527212900.js new file mode 100644 index 0000000..efa2442 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527212900.js @@ -0,0 +1,16 @@ +/*! blob-to-buffer. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { + console.log(err.code) + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527213022.js b/node_modules/node-domexception/.history/index_20210527213022.js new file mode 100644 index 0000000..e59f047 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527213022.js @@ -0,0 +1,16 @@ +/*! blob-to-buffer. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { + console.log(err.code, err.name, err.message) + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527213822.js b/node_modules/node-domexception/.history/index_20210527213822.js new file mode 100644 index 0000000..7f4e13d --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527213822.js @@ -0,0 +1,16 @@ +/*! node-DOMException. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + try { port.postMessage(ab, [ab, ab]) } + catch (err) { + console.log(err.code, err.name, err.message) + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527213843.js b/node_modules/node-domexception/.history/index_20210527213843.js new file mode 100644 index 0000000..ee75b73 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527213843.js @@ -0,0 +1,17 @@ +/*! node-DOMException. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + try { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) + catch (err) { + console.log(err.code, err.name, err.message) + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527213852.js b/node_modules/node-domexception/.history/index_20210527213852.js new file mode 100644 index 0000000..a82bee3 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527213852.js @@ -0,0 +1,17 @@ +/*! node-DOMException. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + try { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) + } catch (err) { + console.log(err.code, err.name, err.message) + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527213910.js b/node_modules/node-domexception/.history/index_20210527213910.js new file mode 100644 index 0000000..1e1ca29 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527213910.js @@ -0,0 +1,16 @@ +/*! node-DOMException. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + try { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) + } catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527214034.js b/node_modules/node-domexception/.history/index_20210527214034.js new file mode 100644 index 0000000..b7bbe95 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527214034.js @@ -0,0 +1,16 @@ +/*! node-domexception. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + try { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) + } catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/index_20210527214643.js b/node_modules/node-domexception/.history/index_20210527214643.js new file mode 100644 index 0000000..92ed847 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527214643.js @@ -0,0 +1,41 @@ +/*! node-domexception. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + try { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) + } catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException + + +const { MessageChannel } = require('worker_threads') + +async function hello() { + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) +} + +hello().catch(err => { + console.assert(err.name === 'DataCloneError') + console.assert(err.code === 25) + console.assert(err instanceof DOMException) +}) + +const e1 = new DOMException('Something went wrong', 'BadThingsError') +console.assert(e1.name === 'BadThingsError') +console.assert(e1.code === 0) + +const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError') +console.assert(e2.name === 'NoModificationAllowedError') +console.assert(e2.code === 7) + +console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10) diff --git a/node_modules/node-domexception/.history/index_20210527214654.js b/node_modules/node-domexception/.history/index_20210527214654.js new file mode 100644 index 0000000..6d5cb8e --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527214654.js @@ -0,0 +1,41 @@ +/*! node-domexception. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + try { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) + } catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException + + +const { MessageChannel } = require('worker_threads') + +async function hello() { + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) +} + +hello().catch(err => { + console.assert(err.name === 'DataCloneError') + console.assert(err.code === 21) + console.assert(err instanceof DOMException) +}) + +const e1 = new DOMException('Something went wrong', 'BadThingsError') +console.assert(e1.name === 'BadThingsError') +console.assert(e1.code === 0) + +const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError') +console.assert(e2.name === 'NoModificationAllowedError') +console.assert(e2.code === 7) + +console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10) diff --git a/node_modules/node-domexception/.history/index_20210527214700.js b/node_modules/node-domexception/.history/index_20210527214700.js new file mode 100644 index 0000000..b7bbe95 --- /dev/null +++ b/node_modules/node-domexception/.history/index_20210527214700.js @@ -0,0 +1,16 @@ +/*! node-domexception. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + try { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) + } catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/.history/package_20210527203733.json b/node_modules/node-domexception/.history/package_20210527203733.json new file mode 100644 index 0000000..5eeb306 --- /dev/null +++ b/node_modules/node-domexception/.history/package_20210527203733.json @@ -0,0 +1,19 @@ +{ + "name": "domexception", + "version": "1.0.0", + "description": "An implementation of the DOMException class from NodeJS", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/jimmywarting/node-domexception.git" + }, + "author": "", + "license": "ISC", + "bugs": { + "url": "https://github.com/jimmywarting/node-domexception/issues" + }, + "homepage": "https://github.com/jimmywarting/node-domexception#readme" +} diff --git a/node_modules/node-domexception/.history/package_20210527203825.json b/node_modules/node-domexception/.history/package_20210527203825.json new file mode 100644 index 0000000..4ca1713 --- /dev/null +++ b/node_modules/node-domexception/.history/package_20210527203825.json @@ -0,0 +1,16 @@ +{ + "name": "node-domexception", + "version": "1.0.0", + "description": "An implementation of the DOMException class from NodeJS", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/jimmywarting/node-domexception.git" + }, + "author": "Jimmy Wärting", + "license": "MIT", + "bugs": { + "url": "https://github.com/jimmywarting/node-domexception/issues" + }, + "homepage": "https://github.com/jimmywarting/node-domexception#readme" +} diff --git a/node_modules/node-domexception/.history/package_20210527204621.json b/node_modules/node-domexception/.history/package_20210527204621.json new file mode 100644 index 0000000..3c414e9 --- /dev/null +++ b/node_modules/node-domexception/.history/package_20210527204621.json @@ -0,0 +1,19 @@ +{ + "name": "node-domexception", + "version": "1.0.0", + "description": "An implementation of the DOMException class from NodeJS", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/jimmywarting/node-domexception.git" + }, + "engines": { + "node": ">=10.5.0" + }, + "author": "Jimmy Wärting", + "license": "MIT", + "bugs": { + "url": "https://github.com/jimmywarting/node-domexception/issues" + }, + "homepage": "https://github.com/jimmywarting/node-domexception#readme" +} diff --git a/node_modules/node-domexception/.history/package_20210527204913.json b/node_modules/node-domexception/.history/package_20210527204913.json new file mode 100644 index 0000000..dbbb5d2 --- /dev/null +++ b/node_modules/node-domexception/.history/package_20210527204913.json @@ -0,0 +1,25 @@ +{ + "name": "node-domexception", + "version": "1.0.0", + "description": "An implementation of the DOMException class from NodeJS", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/jimmywarting/node-domexception.git" + }, + "engines": { + "node": ">=10.5.0" + }, + "author": "Jimmy Wärting", + "license": "MIT", + "bugs": { + "url": "https://github.com/jimmywarting/node-domexception/issues" + }, + "homepage": "https://github.com/jimmywarting/node-domexception#readme", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + } + ] +} diff --git a/node_modules/node-domexception/.history/package_20210527204925.json b/node_modules/node-domexception/.history/package_20210527204925.json new file mode 100644 index 0000000..dbbb5d2 --- /dev/null +++ b/node_modules/node-domexception/.history/package_20210527204925.json @@ -0,0 +1,25 @@ +{ + "name": "node-domexception", + "version": "1.0.0", + "description": "An implementation of the DOMException class from NodeJS", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/jimmywarting/node-domexception.git" + }, + "engines": { + "node": ">=10.5.0" + }, + "author": "Jimmy Wärting", + "license": "MIT", + "bugs": { + "url": "https://github.com/jimmywarting/node-domexception/issues" + }, + "homepage": "https://github.com/jimmywarting/node-domexception#readme", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + } + ] +} diff --git a/node_modules/node-domexception/.history/package_20210527205145.json b/node_modules/node-domexception/.history/package_20210527205145.json new file mode 100644 index 0000000..cd08e70 --- /dev/null +++ b/node_modules/node-domexception/.history/package_20210527205145.json @@ -0,0 +1,29 @@ +{ + "name": "node-domexception", + "version": "1.0.0", + "description": "An implementation of the DOMException class from NodeJS", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/jimmywarting/node-domexception.git" + }, + "engines": { + "node": ">=10.5.0" + }, + "author": "Jimmy Wärting", + "license": "MIT", + "bugs": { + "url": "https://github.com/jimmywarting/node-domexception/issues" + }, + "homepage": "https://github.com/jimmywarting/node-domexception#readme", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ] +} diff --git a/node_modules/node-domexception/.history/package_20210527205156.json b/node_modules/node-domexception/.history/package_20210527205156.json new file mode 100644 index 0000000..cd08e70 --- /dev/null +++ b/node_modules/node-domexception/.history/package_20210527205156.json @@ -0,0 +1,29 @@ +{ + "name": "node-domexception", + "version": "1.0.0", + "description": "An implementation of the DOMException class from NodeJS", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/jimmywarting/node-domexception.git" + }, + "engines": { + "node": ">=10.5.0" + }, + "author": "Jimmy Wärting", + "license": "MIT", + "bugs": { + "url": "https://github.com/jimmywarting/node-domexception/issues" + }, + "homepage": "https://github.com/jimmywarting/node-domexception#readme", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ] +} diff --git a/node_modules/node-domexception/.history/test_20210527205603.js b/node_modules/node-domexception/.history/test_20210527205603.js new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/node-domexception/.history/test_20210527205957.js b/node_modules/node-domexception/.history/test_20210527205957.js new file mode 100644 index 0000000..73feac5 --- /dev/null +++ b/node_modules/node-domexception/.history/test_20210527205957.js @@ -0,0 +1,3 @@ +require('./index.js') + +console.log(DOMException.INDEX_SIZE_ERR) diff --git a/node_modules/node-domexception/.history/test_20210527210021.js b/node_modules/node-domexception/.history/test_20210527210021.js new file mode 100644 index 0000000..be47491 --- /dev/null +++ b/node_modules/node-domexception/.history/test_20210527210021.js @@ -0,0 +1,3 @@ +const e = require('./index.js') + +console.log(e.INDEX_SIZE_ERR) diff --git a/node_modules/node-domexception/LICENSE b/node_modules/node-domexception/LICENSE new file mode 100644 index 0000000..bc8ceb7 --- /dev/null +++ b/node_modules/node-domexception/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Jimmy Wärting + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/node-domexception/README.md b/node_modules/node-domexception/README.md new file mode 100644 index 0000000..a369461 --- /dev/null +++ b/node_modules/node-domexception/README.md @@ -0,0 +1,46 @@ +# DOMException +An implementation of the DOMException class from NodeJS + +NodeJS has DOMException built in, but it's not globally available, and you can't require/import it from somewhere. + +This package exposes the [`DOMException`](https://developer.mozilla.org/en-US/docs/Web/API/DOMException) class that comes from NodeJS itself. (including all of the legacy codes) + +(plz don't depend on this package in any other environment other than node >=10.5) + +```js +import DOMException from 'node-domexception' +import { MessageChannel } from 'worker_threads' + +async function hello() { + const port = new MessageChannel().port1 + const ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) +} + +hello().catch(err => { + console.assert(err.name === 'DataCloneError') + console.assert(err.code === 25) + console.assert(err instanceof DOMException) +}) + +const e1 = new DOMException('Something went wrong', 'BadThingsError') +console.assert(e1.name === 'BadThingsError') +console.assert(e1.code === 0) + +const e2 = new DOMException('Another exciting error message', 'NoModificationAllowedError') +console.assert(e2.name === 'NoModificationAllowedError') +console.assert(e2.code === 7) + +console.assert(DOMException.INUSE_ATTRIBUTE_ERR === 10) +``` + +# Background + +The only possible way is to use some web-ish tools that have been introduced into NodeJS that throws a DOMException and catch the constructor. This is exactly what this package dose for you and exposes it.
+This way you will have the same class that NodeJS has and you can check if the error is a instance of DOMException.
+The instanceof check would not have worked with a custom class such as the DOMException provided by domenic which also is much larger in size since it has to re-construct the hole class from the ground up. + +The DOMException is used in many places such as the Fetch API, File & Blobs, PostMessaging and more.
+Why they decided to call it **DOM**, I don't know + +Please consider sponsoring if you find this helpful diff --git a/node_modules/node-domexception/index.js b/node_modules/node-domexception/index.js new file mode 100644 index 0000000..b7bbe95 --- /dev/null +++ b/node_modules/node-domexception/index.js @@ -0,0 +1,16 @@ +/*! node-domexception. MIT License. Jimmy Wärting */ + +if (!globalThis.DOMException) { + try { + const { MessageChannel } = require('worker_threads'), + port = new MessageChannel().port1, + ab = new ArrayBuffer() + port.postMessage(ab, [ab, ab]) + } catch (err) { + err.constructor.name === 'DOMException' && ( + globalThis.DOMException = err.constructor + ) + } +} + +module.exports = globalThis.DOMException diff --git a/node_modules/node-domexception/package.json b/node_modules/node-domexception/package.json new file mode 100644 index 0000000..cd08e70 --- /dev/null +++ b/node_modules/node-domexception/package.json @@ -0,0 +1,29 @@ +{ + "name": "node-domexception", + "version": "1.0.0", + "description": "An implementation of the DOMException class from NodeJS", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/jimmywarting/node-domexception.git" + }, + "engines": { + "node": ">=10.5.0" + }, + "author": "Jimmy Wärting", + "license": "MIT", + "bugs": { + "url": "https://github.com/jimmywarting/node-domexception/issues" + }, + "homepage": "https://github.com/jimmywarting/node-domexception#readme", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ] +} diff --git a/node_modules/node-fetch/LICENSE.md b/node_modules/node-fetch/LICENSE.md new file mode 100644 index 0000000..660ffec --- /dev/null +++ b/node_modules/node-fetch/LICENSE.md @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/node-fetch/README.md b/node_modules/node-fetch/README.md new file mode 100644 index 0000000..55f09b7 --- /dev/null +++ b/node_modules/node-fetch/README.md @@ -0,0 +1,634 @@ +node-fetch +========== + +[![npm version][npm-image]][npm-url] +[![build status][travis-image]][travis-url] +[![coverage status][codecov-image]][codecov-url] +[![install size][install-size-image]][install-size-url] +[![Discord][discord-image]][discord-url] + +A light-weight module that brings `window.fetch` to Node.js + +(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567)) + +[![Backers][opencollective-image]][opencollective-url] + + + +- [Motivation](#motivation) +- [Features](#features) +- [Difference from client-side fetch](#difference-from-client-side-fetch) +- [Installation](#installation) +- [Loading and configuring the module](#loading-and-configuring-the-module) +- [Common Usage](#common-usage) + - [Plain text or HTML](#plain-text-or-html) + - [JSON](#json) + - [Simple Post](#simple-post) + - [Post with JSON](#post-with-json) + - [Post with form parameters](#post-with-form-parameters) + - [Handling exceptions](#handling-exceptions) + - [Handling client and server errors](#handling-client-and-server-errors) +- [Advanced Usage](#advanced-usage) + - [Streams](#streams) + - [Buffer](#buffer) + - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data) + - [Extract Set-Cookie Header](#extract-set-cookie-header) + - [Post data using a file stream](#post-data-using-a-file-stream) + - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart) + - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal) +- [API](#api) + - [fetch(url[, options])](#fetchurl-options) + - [Options](#options) + - [Class: Request](#class-request) + - [Class: Response](#class-response) + - [Class: Headers](#class-headers) + - [Interface: Body](#interface-body) + - [Class: FetchError](#class-fetcherror) +- [License](#license) +- [Acknowledgement](#acknowledgement) + + + +## Motivation + +Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence, `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime. + +See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side). + +## Features + +- Stay consistent with `window.fetch` API. +- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences. +- Use native promise but allow substituting it with [insert your favorite promise library]. +- Use native Node streams for body on both request and response. +- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically. +- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting. + +## Difference from client-side fetch + +- See [Known Differences](LIMITS.md) for details. +- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue. +- Pull requests are welcomed too! + +## Installation + +Current stable release (`2.x`) + +```sh +$ npm install node-fetch +``` + +## Loading and configuring the module +We suggest you load the module via `require` until the stabilization of ES modules in node: +```js +const fetch = require('node-fetch'); +``` + +If you are using a Promise library other than native, set it through `fetch.Promise`: +```js +const Bluebird = require('bluebird'); + +fetch.Promise = Bluebird; +``` + +## Common Usage + +NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences. + +#### Plain text or HTML +```js +fetch('https://github.com/') + .then(res => res.text()) + .then(body => console.log(body)); +``` + +#### JSON + +```js + +fetch('https://api.github.com/users/github') + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Simple Post +```js +fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' }) + .then(res => res.json()) // expecting a json response + .then(json => console.log(json)); +``` + +#### Post with JSON + +```js +const body = { a: 1 }; + +fetch('https://httpbin.org/post', { + method: 'post', + body: JSON.stringify(body), + headers: { 'Content-Type': 'application/json' }, + }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form parameters +`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods. + +NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such: + +```js +const { URLSearchParams } = require('url'); + +const params = new URLSearchParams(); +params.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: params }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Handling exceptions +NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information. + +Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details. + +```js +fetch('https://domain.invalid/') + .catch(err => console.error(err)); +``` + +#### Handling client and server errors +It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses: + +```js +function checkStatus(res) { + if (res.ok) { // res.status >= 200 && res.status < 300 + return res; + } else { + throw MyCustomError(res.statusText); + } +} + +fetch('https://httpbin.org/status/400') + .then(checkStatus) + .then(res => console.log('will not get here...')) +``` + +## Advanced Usage + +#### Streams +The "Node.js way" is to use streams when possible: + +```js +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => { + const dest = fs.createWriteStream('./octocat.png'); + res.body.pipe(dest); + }); +``` + +In Node.js 14 you can also use async iterators to read `body`; however, be careful to catch +errors -- the longer a response runs, the more likely it is to encounter an error. + +```js +const fetch = require('node-fetch'); +const response = await fetch('https://httpbin.org/stream/3'); +try { + for await (const chunk of response.body) { + console.dir(JSON.parse(chunk.toString())); + } +} catch (err) { + console.error(err.stack); +} +``` + +In Node.js 12 you can also use async iterators to read `body`; however, async iterators with streams +did not mature until Node.js 14, so you need to do some extra work to ensure you handle errors +directly from the stream and wait on it response to fully close. + +```js +const fetch = require('node-fetch'); +const read = async body => { + let error; + body.on('error', err => { + error = err; + }); + for await (const chunk of body) { + console.dir(JSON.parse(chunk.toString())); + } + return new Promise((resolve, reject) => { + body.on('close', () => { + error ? reject(error) : resolve(); + }); + }); +}; +try { + const response = await fetch('https://httpbin.org/stream/3'); + await read(response.body); +} catch (err) { + console.error(err.stack); +} +``` + +#### Buffer +If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API) + +```js +const fileType = require('file-type'); + +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => res.buffer()) + .then(buffer => fileType(buffer)) + .then(type => { /* ... */ }); +``` + +#### Accessing Headers and other Meta data +```js +fetch('https://github.com/') + .then(res => { + console.log(res.ok); + console.log(res.status); + console.log(res.statusText); + console.log(res.headers.raw()); + console.log(res.headers.get('content-type')); + }); +``` + +#### Extract Set-Cookie Header + +Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API. + +```js +fetch(url).then(res => { + // returns an array of values, instead of a string of comma-separated values + console.log(res.headers.raw()['set-cookie']); +}); +``` + +#### Post data using a file stream + +```js +const { createReadStream } = require('fs'); + +const stream = createReadStream('input.txt'); + +fetch('https://httpbin.org/post', { method: 'POST', body: stream }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form-data (detect multipart) + +```js +const FormData = require('form-data'); + +const form = new FormData(); +form.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: form }) + .then(res => res.json()) + .then(json => console.log(json)); + +// OR, using custom headers +// NOTE: getHeaders() is non-standard API + +const form = new FormData(); +form.append('a', 1); + +const options = { + method: 'POST', + body: form, + headers: form.getHeaders() +} + +fetch('https://httpbin.org/post', options) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Request cancellation with AbortSignal + +> NOTE: You may cancel streamed requests only on Node >= v8.0.0 + +You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller). + +An example of timing out a request after 150ms could be achieved as the following: + +```js +import AbortController from 'abort-controller'; + +const controller = new AbortController(); +const timeout = setTimeout( + () => { controller.abort(); }, + 150, +); + +fetch(url, { signal: controller.signal }) + .then(res => res.json()) + .then( + data => { + useData(data) + }, + err => { + if (err.name === 'AbortError') { + // request was aborted + } + }, + ) + .finally(() => { + clearTimeout(timeout); + }); +``` + +See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples. + + +## API + +### fetch(url[, options]) + +- `url` A string representing the URL for fetching +- `options` [Options](#fetch-options) for the HTTP(S) request +- Returns: Promise<[Response](#class-response)> + +Perform an HTTP(S) fetch. + +`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`. + + +### Options + +The default values are shown after each option key. + +```js +{ + // These properties are part of the Fetch Standard + method: 'GET', + headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below) + body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream + redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect + signal: null, // pass an instance of AbortSignal to optionally abort requests + + // The following properties are node-fetch extensions + follow: 20, // maximum redirect count. 0 to not follow redirect + timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead. + compress: true, // support gzip/deflate content encoding. false to disable + size: 0, // maximum response body size in bytes. 0 to disable + agent: null // http(s).Agent instance or function that returns an instance (see below) +} +``` + +##### Default Headers + +If no values are set, the following request headers will be sent automatically: + +Header | Value +------------------- | -------------------------------------------------------- +`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_ +`Accept` | `*/*` +`Content-Length` | _(automatically calculated, if possible)_ +`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_ +`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)` + +Note: when `body` is a `Stream`, `Content-Length` is not set automatically. + +##### Custom Agent + +The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following: + +- Support self-signed certificate +- Use only IPv4 or IPv6 +- Custom DNS Lookup + +See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information. + +If no agent is specified, the default agent provided by Node.js is used. Note that [this changed in Node.js 19](https://github.com/nodejs/node/blob/4267b92604ad78584244488e7f7508a690cb80d0/lib/_http_agent.js#L564) to have `keepalive` true by default. If you wish to enable `keepalive` in an earlier version of Node.js, you can override the agent as per the following code sample. + +In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol. + +```js +const httpAgent = new http.Agent({ + keepAlive: true +}); +const httpsAgent = new https.Agent({ + keepAlive: true +}); + +const options = { + agent: function (_parsedURL) { + if (_parsedURL.protocol == 'http:') { + return httpAgent; + } else { + return httpsAgent; + } + } +} +``` + + +### Class: Request + +An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface. + +Due to the nature of Node.js, the following properties are not implemented at this moment: + +- `type` +- `destination` +- `referrer` +- `referrerPolicy` +- `mode` +- `credentials` +- `cache` +- `integrity` +- `keepalive` + +The following node-fetch extension properties are provided: + +- `follow` +- `compress` +- `counter` +- `agent` + +See [options](#fetch-options) for exact meaning of these extensions. + +#### new Request(input[, options]) + +*(spec-compliant)* + +- `input` A string representing a URL, or another `Request` (which will be cloned) +- `options` [Options][#fetch-options] for the HTTP(S) request + +Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request). + +In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object. + + +### Class: Response + +An HTTP(S) response. This class implements the [Body](#iface-body) interface. + +The following properties are not implemented in node-fetch at this moment: + +- `Response.error()` +- `Response.redirect()` +- `type` +- `trailer` + +#### new Response([body[, options]]) + +*(spec-compliant)* + +- `body` A `String` or [`Readable` stream][node-readable] +- `options` A [`ResponseInit`][response-init] options dictionary + +Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response). + +Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly. + +#### response.ok + +*(spec-compliant)* + +Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300. + +#### response.redirected + +*(spec-compliant)* + +Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0. + + +### Class: Headers + +This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented. + +#### new Headers([init]) + +*(spec-compliant)* + +- `init` Optional argument to pre-fill the `Headers` object + +Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object or any iterable object. + +```js +// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class + +const meta = { + 'Content-Type': 'text/xml', + 'Breaking-Bad': '<3' +}; +const headers = new Headers(meta); + +// The above is equivalent to +const meta = [ + [ 'Content-Type', 'text/xml' ], + [ 'Breaking-Bad', '<3' ] +]; +const headers = new Headers(meta); + +// You can in fact use any iterable objects, like a Map or even another Headers +const meta = new Map(); +meta.set('Content-Type', 'text/xml'); +meta.set('Breaking-Bad', '<3'); +const headers = new Headers(meta); +const copyOfHeaders = new Headers(headers); +``` + + +### Interface: Body + +`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes. + +The following methods are not yet implemented in node-fetch at this moment: + +- `formData()` + +#### body.body + +*(deviation from spec)* + +* Node.js [`Readable` stream][node-readable] + +Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable]. + +#### body.bodyUsed + +*(spec-compliant)* + +* `Boolean` + +A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again. + +#### body.arrayBuffer() +#### body.blob() +#### body.json() +#### body.text() + +*(spec-compliant)* + +* Returns: Promise + +Consume the body and return a promise that will resolve to one of these formats. + +#### body.buffer() + +*(node-fetch extension)* + +* Returns: Promise<Buffer> + +Consume the body and return a promise that will resolve to a Buffer. + +#### body.textConverted() + +*(node-fetch extension)* + +* Returns: Promise<String> + +Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible. + +(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.) + + +### Class: FetchError + +*(node-fetch extension)* + +An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info. + + +### Class: AbortError + +*(node-fetch extension)* + +An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info. + +## Acknowledgement + +Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference. + +`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr). + +## License + +MIT + +[npm-image]: https://flat.badgen.net/npm/v/node-fetch +[npm-url]: https://www.npmjs.com/package/node-fetch +[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch +[travis-url]: https://travis-ci.org/bitinn/node-fetch +[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master +[codecov-url]: https://codecov.io/gh/bitinn/node-fetch +[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch +[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch +[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square +[discord-url]: https://discord.gg/Zxbndcm +[opencollective-image]: https://opencollective.com/node-fetch/backers.svg +[opencollective-url]: https://opencollective.com/node-fetch +[whatwg-fetch]: https://fetch.spec.whatwg.org/ +[response-init]: https://fetch.spec.whatwg.org/#responseinit +[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams +[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers +[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md +[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md +[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md diff --git a/node_modules/node-fetch/browser.js b/node_modules/node-fetch/browser.js new file mode 100644 index 0000000..ee86265 --- /dev/null +++ b/node_modules/node-fetch/browser.js @@ -0,0 +1,25 @@ +"use strict"; + +// ref: https://github.com/tc39/proposal-global +var getGlobal = function () { + // the only reliable means to get the global object is + // `Function('return this')()` + // However, this causes CSP violations in Chrome apps. + if (typeof self !== 'undefined') { return self; } + if (typeof window !== 'undefined') { return window; } + if (typeof global !== 'undefined') { return global; } + throw new Error('unable to locate global object'); +} + +var globalObject = getGlobal(); + +module.exports = exports = globalObject.fetch; + +// Needed for TypeScript and Webpack. +if (globalObject.fetch) { + exports.default = globalObject.fetch.bind(globalObject); +} + +exports.Headers = globalObject.Headers; +exports.Request = globalObject.Request; +exports.Response = globalObject.Response; diff --git a/node_modules/node-fetch/lib/index.es.js b/node_modules/node-fetch/lib/index.es.js new file mode 100644 index 0000000..aae9799 --- /dev/null +++ b/node_modules/node-fetch/lib/index.es.js @@ -0,0 +1,1777 @@ +process.emitWarning("The .es.js file is deprecated. Use .mjs instead."); + +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/node_modules/node-fetch/lib/index.js b/node_modules/node-fetch/lib/index.js new file mode 100644 index 0000000..567ff5d --- /dev/null +++ b/node_modules/node-fetch/lib/index.js @@ -0,0 +1,1787 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(require('stream')); +var http = _interopDefault(require('http')); +var Url = _interopDefault(require('url')); +var whatwgUrl = _interopDefault(require('whatwg-url')); +var https = _interopDefault(require('https')); +var zlib = _interopDefault(require('zlib')); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; +exports.AbortError = AbortError; diff --git a/node_modules/node-fetch/lib/index.mjs b/node_modules/node-fetch/lib/index.mjs new file mode 100644 index 0000000..2863dd9 --- /dev/null +++ b/node_modules/node-fetch/lib/index.mjs @@ -0,0 +1,1775 @@ +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/node_modules/node-fetch/package.json b/node_modules/node-fetch/package.json new file mode 100644 index 0000000..e0be176 --- /dev/null +++ b/node_modules/node-fetch/package.json @@ -0,0 +1,89 @@ +{ + "name": "node-fetch", + "version": "2.7.0", + "description": "A light-weight module that brings window.fetch to node.js", + "main": "lib/index.js", + "browser": "./browser.js", + "module": "lib/index.mjs", + "files": [ + "lib/index.js", + "lib/index.mjs", + "lib/index.es.js", + "browser.js" + ], + "engines": { + "node": "4.x || >=6.0.0" + }, + "scripts": { + "build": "cross-env BABEL_ENV=rollup rollup -c", + "prepare": "npm run build", + "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js", + "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js", + "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json" + }, + "repository": { + "type": "git", + "url": "https://github.com/bitinn/node-fetch.git" + }, + "keywords": [ + "fetch", + "http", + "promise" + ], + "author": "David Frank", + "license": "MIT", + "bugs": { + "url": "https://github.com/bitinn/node-fetch/issues" + }, + "homepage": "https://github.com/bitinn/node-fetch", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + }, + "devDependencies": { + "@ungap/url-search-params": "^0.1.2", + "abort-controller": "^1.1.0", + "abortcontroller-polyfill": "^1.3.0", + "babel-core": "^6.26.3", + "babel-plugin-istanbul": "^4.1.6", + "babel-plugin-transform-async-generator-functions": "^6.24.1", + "babel-polyfill": "^6.26.0", + "babel-preset-env": "1.4.0", + "babel-register": "^6.16.3", + "chai": "^3.5.0", + "chai-as-promised": "^7.1.1", + "chai-iterator": "^1.1.1", + "chai-string": "~1.3.0", + "codecov": "3.3.0", + "cross-env": "^5.2.0", + "form-data": "^2.3.3", + "is-builtin-module": "^1.0.0", + "mocha": "^5.0.0", + "nyc": "11.9.0", + "parted": "^0.1.1", + "promise": "^8.0.3", + "resumer": "0.0.0", + "rollup": "^0.63.4", + "rollup-plugin-babel": "^3.0.7", + "string-to-arraybuffer": "^1.0.2", + "teeny-request": "3.7.0" + }, + "release": { + "branches": [ + "+([0-9]).x", + "main", + "next", + { + "name": "beta", + "prerelease": true + } + ] + } +} diff --git a/node_modules/once/LICENSE b/node_modules/once/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/once/README.md b/node_modules/once/README.md new file mode 100644 index 0000000..1f1ffca --- /dev/null +++ b/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/node_modules/once/once.js b/node_modules/once/once.js new file mode 100644 index 0000000..2354067 --- /dev/null +++ b/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/node_modules/once/package.json b/node_modules/once/package.json new file mode 100644 index 0000000..16815b2 --- /dev/null +++ b/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.4.0", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.0.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/openai/CHANGELOG.md b/node_modules/openai/CHANGELOG.md new file mode 100644 index 0000000..d33ce4c --- /dev/null +++ b/node_modules/openai/CHANGELOG.md @@ -0,0 +1,2020 @@ +# Changelog + +## 4.77.0 (2024-12-17) + +Full Changelog: [v4.76.3...v4.77.0](https://github.com/openai/openai-node/compare/v4.76.3...v4.77.0) + +### Features + +* **api:** new o1 and GPT-4o models + preference fine-tuning ([#1229](https://github.com/openai/openai-node/issues/1229)) ([2e872d4](https://github.com/openai/openai-node/commit/2e872d4ac3717ab8f61741efffb7a31acd798338)) + + +### Chores + +* **internal:** fix some typos ([#1227](https://github.com/openai/openai-node/issues/1227)) ([d51fcfe](https://github.com/openai/openai-node/commit/d51fcfe3a66550a684eeeb0e6f17e1d9825cdf78)) +* **internal:** spec update ([#1230](https://github.com/openai/openai-node/issues/1230)) ([ed2b61d](https://github.com/openai/openai-node/commit/ed2b61d32703b64d9f91223bc02627a607f60483)) + +## 4.76.3 (2024-12-13) + +Full Changelog: [v4.76.2...v4.76.3](https://github.com/openai/openai-node/compare/v4.76.2...v4.76.3) + +### Chores + +* **internal:** better ecosystem test debugging ([86fc0a8](https://github.com/openai/openai-node/commit/86fc0a81ede2780d3fcebaabff3d9fa9a36cc9c0)) + + +### Documentation + +* **README:** fix helpers section links ([#1224](https://github.com/openai/openai-node/issues/1224)) ([efbe30a](https://github.com/openai/openai-node/commit/efbe30a156cec1836d3db28f663066b33be57ba2)) + +## 4.76.2 (2024-12-12) + +Full Changelog: [v4.76.1...v4.76.2](https://github.com/openai/openai-node/compare/v4.76.1...v4.76.2) + +### Chores + +* **internal:** update isAbsoluteURL ([#1223](https://github.com/openai/openai-node/issues/1223)) ([e908ed7](https://github.com/openai/openai-node/commit/e908ed759996fb7706baf46d094fc77419423971)) +* **types:** nicer error class types + jsdocs ([#1219](https://github.com/openai/openai-node/issues/1219)) ([576d24c](https://github.com/openai/openai-node/commit/576d24cc4b3d766dfe28a6031bdc24ac1b711655)) + +## 4.76.1 (2024-12-10) + +Full Changelog: [v4.76.0...v4.76.1](https://github.com/openai/openai-node/compare/v4.76.0...v4.76.1) + +### Chores + +* **internal:** bump cross-spawn to v7.0.6 ([#1217](https://github.com/openai/openai-node/issues/1217)) ([c07ad29](https://github.com/openai/openai-node/commit/c07ad298d58e5aeaf816ee3de65fd59bf3fc8b66)) +* **internal:** remove unnecessary getRequestClient function ([#1215](https://github.com/openai/openai-node/issues/1215)) ([bef3925](https://github.com/openai/openai-node/commit/bef392526cd339f45c574bc476649c77be36c612)) + +## 4.76.0 (2024-12-05) + +Full Changelog: [v4.75.0...v4.76.0](https://github.com/openai/openai-node/compare/v4.75.0...v4.76.0) + +### Features + +* **api:** updates ([#1212](https://github.com/openai/openai-node/issues/1212)) ([e0fedf2](https://github.com/openai/openai-node/commit/e0fedf2c5a91d0c03d8dad6854b366f77eab4923)) + + +### Chores + +* bump openapi url ([#1210](https://github.com/openai/openai-node/issues/1210)) ([3fa95a4](https://github.com/openai/openai-node/commit/3fa95a429d4b2adecce35a7b96b73f6d5e88eeeb)) + +## 4.75.0 (2024-12-03) + +Full Changelog: [v4.74.0...v4.75.0](https://github.com/openai/openai-node/compare/v4.74.0...v4.75.0) + +### Features + +* improve docs for jsr README.md ([#1208](https://github.com/openai/openai-node/issues/1208)) ([338527e](https://github.com/openai/openai-node/commit/338527e40361e2de899a63f280d4ec2db5e87f3c)) + +## 4.74.0 (2024-12-02) + +Full Changelog: [v4.73.1...v4.74.0](https://github.com/openai/openai-node/compare/v4.73.1...v4.74.0) + +### Features + +* **internal:** make git install file structure match npm ([#1204](https://github.com/openai/openai-node/issues/1204)) ([e7c4c6d](https://github.com/openai/openai-node/commit/e7c4c6d23adbe52300053a8d35db6e341c438703)) + +## 4.73.1 (2024-11-25) + +Full Changelog: [v4.73.0...v4.73.1](https://github.com/openai/openai-node/compare/v4.73.0...v4.73.1) + +### Documentation + +* **readme:** mention `.withResponse()` for streaming request ID ([#1202](https://github.com/openai/openai-node/issues/1202)) ([b6800d4](https://github.com/openai/openai-node/commit/b6800d4dea2729fe3b0864171ce8fb3b2cc1b21c)) + +## 4.73.0 (2024-11-20) + +Full Changelog: [v4.72.0...v4.73.0](https://github.com/openai/openai-node/compare/v4.72.0...v4.73.0) + +### Features + +* **api:** add gpt-4o-2024-11-20 model ([#1201](https://github.com/openai/openai-node/issues/1201)) ([0feeafd](https://github.com/openai/openai-node/commit/0feeafd21ba4b6281cc3b9dafa2919b1e2e4d1c3)) +* bump model in all example snippets to gpt-4o ([6961c37](https://github.com/openai/openai-node/commit/6961c37f2e581bcc12ec2bbe77df2b9b260fe297)) + + +### Bug Fixes + +* **docs:** add missing await to pagination example ([#1190](https://github.com/openai/openai-node/issues/1190)) ([524b9e8](https://github.com/openai/openai-node/commit/524b9e82ae13a3b5093dcfbfd1169a798cf99ab4)) + + +### Chores + +* **client:** drop unused devDependency ([#1191](https://github.com/openai/openai-node/issues/1191)) ([8ee6c03](https://github.com/openai/openai-node/commit/8ee6c0335673f2ecf84ea11bdfc990adab607e20)) +* **internal:** spec update ([#1195](https://github.com/openai/openai-node/issues/1195)) ([12f9334](https://github.com/openai/openai-node/commit/12f93346857196b93f94865cc3744d769e5e519c)) +* **internal:** use reexports not destructuring ([#1181](https://github.com/openai/openai-node/issues/1181)) ([f555dd6](https://github.com/openai/openai-node/commit/f555dd6503bc4ccd4d13f4e1a1d36fbbfd51c369)) + + +### Documentation + +* bump models in example snippets to gpt-4o ([#1184](https://github.com/openai/openai-node/issues/1184)) ([4ec4027](https://github.com/openai/openai-node/commit/4ec402790cf3cfbccbf3ef9b61d577b0118977e8)) +* change readme title ([#1198](https://github.com/openai/openai-node/issues/1198)) ([e34981c](https://github.com/openai/openai-node/commit/e34981c00f2f0360baffe870bcc38786030671bf)) +* improve jsr documentation ([#1197](https://github.com/openai/openai-node/issues/1197)) ([ebdb4f7](https://github.com/openai/openai-node/commit/ebdb4f72cc01afbee649aca009fdaf413e61c507)) +* **readme:** fix incorrect fileBatches.uploadAndPoll params ([#1200](https://github.com/openai/openai-node/issues/1200)) ([3968ef1](https://github.com/openai/openai-node/commit/3968ef1c4fa860ff246e0e803808752b261c18ce)) + +## 4.72.0 (2024-11-12) + +Full Changelog: [v4.71.1...v4.72.0](https://github.com/openai/openai-node/compare/v4.71.1...v4.72.0) + +### Features + +* add back deno runtime testing without type checks ([1626cf5](https://github.com/openai/openai-node/commit/1626cf57e94706e1fc8b2f9ff4f173fe486d5150)) + + +### Chores + +* **ecosystem-tests:** bump wrangler version ([#1178](https://github.com/openai/openai-node/issues/1178)) ([4dfb0c6](https://github.com/openai/openai-node/commit/4dfb0c6aa7c4530665bc7d6beebcd04aa1490e27)) + +## 4.71.1 (2024-11-06) + +Full Changelog: [v4.71.0...v4.71.1](https://github.com/openai/openai-node/compare/v4.71.0...v4.71.1) + +### Bug Fixes + +* change release please configuration for jsr.json ([#1174](https://github.com/openai/openai-node/issues/1174)) ([c39efba](https://github.com/openai/openai-node/commit/c39efba812209c8906315596cc0a56e54ae8590a)) + +## 4.71.0 (2024-11-04) + +Full Changelog: [v4.70.3...v4.71.0](https://github.com/openai/openai-node/compare/v4.70.3...v4.71.0) + +### Features + +* **api:** add support for predicted outputs ([#1172](https://github.com/openai/openai-node/issues/1172)) ([08a7bb4](https://github.com/openai/openai-node/commit/08a7bb4d4b751aeed9655bfcb9fa27fc79a767c4)) + +## 4.70.3 (2024-11-04) + +Full Changelog: [v4.70.2...v4.70.3](https://github.com/openai/openai-node/compare/v4.70.2...v4.70.3) + +### Bug Fixes + +* change streaming helper imports to be relative ([e73b7cf](https://github.com/openai/openai-node/commit/e73b7cf84272bd02a39a67795d49db23db2d970f)) + +## 4.70.2 (2024-11-01) + +Full Changelog: [v4.70.1...v4.70.2](https://github.com/openai/openai-node/compare/v4.70.1...v4.70.2) + +### Bug Fixes + +* add permissions to github workflow ([ee75e00](https://github.com/openai/openai-node/commit/ee75e00b0fbf82553b219ee8948a8077e9c26a24)) +* skip deno ecosystem test ([5b181b0](https://github.com/openai/openai-node/commit/5b181b01b62139f8da35d426914c82b8425af141)) + +## 4.70.1 (2024-11-01) + +Full Changelog: [v4.70.0...v4.70.1](https://github.com/openai/openai-node/compare/v4.70.0...v4.70.1) + +### Bug Fixes + +* don't require deno to run build-deno ([#1167](https://github.com/openai/openai-node/issues/1167)) ([9d857bc](https://github.com/openai/openai-node/commit/9d857bc531a0bb3939f7660e49b31ccc38f60dd3)) + +## 4.70.0 (2024-11-01) + +Full Changelog: [v4.69.0...v4.70.0](https://github.com/openai/openai-node/compare/v4.69.0...v4.70.0) + +### Features + +* publish to jsr ([#1165](https://github.com/openai/openai-node/issues/1165)) ([5aa93a7](https://github.com/openai/openai-node/commit/5aa93a7fe704ef1ad077787852db38dc29104534)) + + +### Chores + +* **internal:** fix isolated modules exports ([9cd1958](https://github.com/openai/openai-node/commit/9cd19584dcc6f4004ea1adcee917aa88a37d5f1c)) + + +### Refactors + +* use type imports for type-only imports ([#1159](https://github.com/openai/openai-node/issues/1159)) ([07bbaf6](https://github.com/openai/openai-node/commit/07bbaf6ecac9a5e36471a35488020853ddf9214f)) + +## 4.69.0 (2024-10-30) + +Full Changelog: [v4.68.4...v4.69.0](https://github.com/openai/openai-node/compare/v4.68.4...v4.69.0) + +### Features + +* **api:** add new, expressive voices for Realtime and Audio in Chat Completions ([#1157](https://github.com/openai/openai-node/issues/1157)) ([12e501c](https://github.com/openai/openai-node/commit/12e501c8a215a2af29b9b8fceedc5935b6f2feef)) + + +### Bug Fixes + +* **internal:** support pnpm git installs ([#1156](https://github.com/openai/openai-node/issues/1156)) ([b744c5b](https://github.com/openai/openai-node/commit/b744c5b609533e9a6694d6cae0425fe9cd37e26c)) + + +### Documentation + +* **readme:** minor typo fixes ([#1154](https://github.com/openai/openai-node/issues/1154)) ([c6c9f9a](https://github.com/openai/openai-node/commit/c6c9f9aaf75f643016ad73574a7e24a228b5c60f)) + +## 4.68.4 (2024-10-23) + +Full Changelog: [v4.68.3...v4.68.4](https://github.com/openai/openai-node/compare/v4.68.3...v4.68.4) + +### Chores + +* **internal:** update spec version ([#1146](https://github.com/openai/openai-node/issues/1146)) ([0165a8d](https://github.com/openai/openai-node/commit/0165a8d79340ede49557e05fd00d6fff9d69d930)) + +## 4.68.3 (2024-10-23) + +Full Changelog: [v4.68.2...v4.68.3](https://github.com/openai/openai-node/compare/v4.68.2...v4.68.3) + +### Chores + +* **internal:** bumps eslint and related dependencies ([#1143](https://github.com/openai/openai-node/issues/1143)) ([2643f42](https://github.com/openai/openai-node/commit/2643f42a36208c36daf23470ffcd227a891284eb)) + +## 4.68.2 (2024-10-22) + +Full Changelog: [v4.68.1...v4.68.2](https://github.com/openai/openai-node/compare/v4.68.1...v4.68.2) + +### Chores + +* **internal:** update spec version ([#1141](https://github.com/openai/openai-node/issues/1141)) ([2ccb3e3](https://github.com/openai/openai-node/commit/2ccb3e357aa2f3eb0fa32c619d8336c3b94cc882)) + +## 4.68.1 (2024-10-18) + +Full Changelog: [v4.68.0...v4.68.1](https://github.com/openai/openai-node/compare/v4.68.0...v4.68.1) + +### Bug Fixes + +* **client:** respect x-stainless-retry-count default headers ([#1138](https://github.com/openai/openai-node/issues/1138)) ([266717b](https://github.com/openai/openai-node/commit/266717b3301828c7df735064a380a055576183bc)) + +## 4.68.0 (2024-10-17) + +Full Changelog: [v4.67.3...v4.68.0](https://github.com/openai/openai-node/compare/v4.67.3...v4.68.0) + +### Features + +* **api:** add gpt-4o-audio-preview model for chat completions ([#1135](https://github.com/openai/openai-node/issues/1135)) ([17a623f](https://github.com/openai/openai-node/commit/17a623f70050bca4538ad2939055cd9d9b165f89)) + +## 4.67.3 (2024-10-08) + +Full Changelog: [v4.67.2...v4.67.3](https://github.com/openai/openai-node/compare/v4.67.2...v4.67.3) + +### Chores + +* **internal:** pass props through internal parser ([#1125](https://github.com/openai/openai-node/issues/1125)) ([5ef8aa8](https://github.com/openai/openai-node/commit/5ef8aa8d308f7374dd01d8079cd76e0d96999ec2)) + +## 4.67.2 (2024-10-07) + +Full Changelog: [v4.67.1...v4.67.2](https://github.com/openai/openai-node/compare/v4.67.1...v4.67.2) + +### Chores + +* **internal:** move LineDecoder to a separate file ([#1120](https://github.com/openai/openai-node/issues/1120)) ([0a4be65](https://github.com/openai/openai-node/commit/0a4be6506bf26d2b9552ff3fd13a22c04b53ea18)) + +## 4.67.1 (2024-10-02) + +Full Changelog: [v4.67.0...v4.67.1](https://github.com/openai/openai-node/compare/v4.67.0...v4.67.1) + +### Documentation + +* improve and reference contributing documentation ([#1115](https://github.com/openai/openai-node/issues/1115)) ([7fa30b3](https://github.com/openai/openai-node/commit/7fa30b3ebf276556141df95ba8e824a0276b61f8)) + +## 4.67.0 (2024-10-01) + +Full Changelog: [v4.66.1...v4.67.0](https://github.com/openai/openai-node/compare/v4.66.1...v4.67.0) + +### Features + +* **api:** support storing chat completions, enabling evals and model distillation in the dashboard ([#1112](https://github.com/openai/openai-node/issues/1112)) ([6424924](https://github.com/openai/openai-node/commit/6424924b6361e54f07c04fce9075ab16fcb712fb)) + +## 4.66.1 (2024-09-30) + +Full Changelog: [v4.66.0...v4.66.1](https://github.com/openai/openai-node/compare/v4.66.0...v4.66.1) + +### Bug Fixes + +* **audio:** add fallback overload types ([0c00a13](https://github.com/openai/openai-node/commit/0c00a13dd864b974d3376c905647209e4a79f244)) +* **audio:** use export type ([1519100](https://github.com/openai/openai-node/commit/1519100e530e08e7683549d0bcdd919b9c2d1654)) + +## 4.66.0 (2024-09-27) + +Full Changelog: [v4.65.0...v4.66.0](https://github.com/openai/openai-node/compare/v4.65.0...v4.66.0) + +### Features + +* **client:** add request_id to `.withResponse()` ([#1095](https://github.com/openai/openai-node/issues/1095)) ([2d0f565](https://github.com/openai/openai-node/commit/2d0f565f124a8862bc24214cc3ddce9db0ba75bc)) + + +### Bug Fixes + +* **audio:** correct types for transcriptions / translations ([#1104](https://github.com/openai/openai-node/issues/1104)) ([96e86c2](https://github.com/openai/openai-node/commit/96e86c214ba79d50035b61e5daa3489f082512c4)) +* **client:** correct types for transcriptions / translations ([#1105](https://github.com/openai/openai-node/issues/1105)) ([fa16ebb](https://github.com/openai/openai-node/commit/fa16ebbb314ebc7c274d27f0148d248edf48e055)) + +## 4.65.0 (2024-09-26) + +Full Changelog: [v4.64.0...v4.65.0](https://github.com/openai/openai-node/compare/v4.64.0...v4.65.0) + +### Features + +* **api:** add omni-moderation model ([#1100](https://github.com/openai/openai-node/issues/1100)) ([66c0f21](https://github.com/openai/openai-node/commit/66c0f21fad3be9c57b810c4a7eebb71eb6ccbcc1)) + +## 4.64.0 (2024-09-25) + +Full Changelog: [v4.63.0...v4.64.0](https://github.com/openai/openai-node/compare/v4.63.0...v4.64.0) + +### Features + +* **client:** allow overriding retry count header ([#1098](https://github.com/openai/openai-node/issues/1098)) ([a466ff7](https://github.com/openai/openai-node/commit/a466ff78a436db82d79a8f53066a85a3b1dbe039)) + + +### Bug Fixes + +* **audio:** correct response_format translations type ([#1097](https://github.com/openai/openai-node/issues/1097)) ([9a5f461](https://github.com/openai/openai-node/commit/9a5f461306e84b62ce1ed8aedbfee90798def5fb)) + + +### Chores + +* **internal:** fix ecosystem tests error output ([#1096](https://github.com/openai/openai-node/issues/1096)) ([ecdb4e9](https://github.com/openai/openai-node/commit/ecdb4e923f94e828d8758559aea78c82417b8f12)) +* **internal:** fix slow ecosystem test ([#1093](https://github.com/openai/openai-node/issues/1093)) ([80ed9ec](https://github.com/openai/openai-node/commit/80ed9ecbd60129164cb407e46dddbc06ef1c54ab)) + +## 4.63.0 (2024-09-20) + +Full Changelog: [v4.62.1...v4.63.0](https://github.com/openai/openai-node/compare/v4.62.1...v4.63.0) + +### Features + +* **client:** send retry count header ([#1087](https://github.com/openai/openai-node/issues/1087)) ([7bcebc0](https://github.com/openai/openai-node/commit/7bcebc0e3965c2decd1dffb1e67f5197260ca89e)) + + +### Chores + +* **types:** improve type name for embedding models ([#1089](https://github.com/openai/openai-node/issues/1089)) ([d6966d9](https://github.com/openai/openai-node/commit/d6966d9872a14b7fbee85a7bb1fae697852b8ce0)) + +## 4.62.1 (2024-09-18) + +Full Changelog: [v4.62.0...v4.62.1](https://github.com/openai/openai-node/compare/v4.62.0...v4.62.1) + +### Bug Fixes + +* **types:** remove leftover polyfill usage ([#1084](https://github.com/openai/openai-node/issues/1084)) ([b7c9538](https://github.com/openai/openai-node/commit/b7c9538981a11005fb0a00774683d979d3ca663a)) + +## 4.62.0 (2024-09-17) + +Full Changelog: [v4.61.1...v4.62.0](https://github.com/openai/openai-node/compare/v4.61.1...v4.62.0) + +### Features + +* **client:** add ._request_id property to object responses ([#1078](https://github.com/openai/openai-node/issues/1078)) ([d5c2131](https://github.com/openai/openai-node/commit/d5c21314449091dd1c668c7358b25e041466f588)) + + +### Chores + +* **internal:** add ecosystem test for qs reproduction ([0199dd8](https://github.com/openai/openai-node/commit/0199dd85981497fac2b60f786acc00ea30683897)) +* **internal:** add query string encoder ([#1079](https://github.com/openai/openai-node/issues/1079)) ([f870682](https://github.com/openai/openai-node/commit/f870682d5c490182547c428b0b5c75da0e34d15a)) +* **internal:** fix some types ([#1082](https://github.com/openai/openai-node/issues/1082)) ([1ec41a7](https://github.com/openai/openai-node/commit/1ec41a7d768502a31abb33bf86b0539e5b4b6541)) +* **tests:** add query string tests to ecosystem tests ([36be724](https://github.com/openai/openai-node/commit/36be724384401bb697d8b07b0a1965be721cfa51)) + +## 4.61.1 (2024-09-16) + +Full Changelog: [v4.61.0...v4.61.1](https://github.com/openai/openai-node/compare/v4.61.0...v4.61.1) + +### Bug Fixes + +* **runTools:** correct request options type ([#1073](https://github.com/openai/openai-node/issues/1073)) ([399f971](https://github.com/openai/openai-node/commit/399f9710f9a1406fe2dd048a1d26418c0de7ff0c)) + + +### Chores + +* **internal:** update spec link ([#1076](https://github.com/openai/openai-node/issues/1076)) ([20f1bcc](https://github.com/openai/openai-node/commit/20f1bcce2b5d03c5185989212a5c5271a8d4209c)) + +## 4.61.0 (2024-09-13) + +Full Changelog: [v4.60.1...v4.61.0](https://github.com/openai/openai-node/compare/v4.60.1...v4.61.0) + +### Bug Fixes + +* **client:** partial parsing update to handle strings ([46e8eb6](https://github.com/openai/openai-node/commit/46e8eb6a9a45b11f9e4c97474ed6c02b1faa43af)) +* **examples:** handle usage chunk in tool call streaming ([#1068](https://github.com/openai/openai-node/issues/1068)) ([e4188c4](https://github.com/openai/openai-node/commit/e4188c4ba443a21d1ef94658df5366f80f0e573b)) + + +### Chores + +* **examples:** add a small delay to tool-calls example streaming ([a3fc659](https://github.com/openai/openai-node/commit/a3fc65928af7085d1d8d785ad4765fedc3955641)) + + +### Documentation + +* update CONTRIBUTING.md ([#1071](https://github.com/openai/openai-node/issues/1071)) ([5de81c9](https://github.com/openai/openai-node/commit/5de81c95d7326602865e007715a76d5595824fd9)) + +## 4.60.1 (2024-09-13) + +Full Changelog: [v4.60.0...v4.60.1](https://github.com/openai/openai-node/compare/v4.60.0...v4.60.1) + +### Bug Fixes + +* **zod:** correctly add $ref definitions for transformed schemas ([#1065](https://github.com/openai/openai-node/issues/1065)) ([9b93b24](https://github.com/openai/openai-node/commit/9b93b24b8ae267fe403fb9cd4876d9772f40878b)) + +## 4.60.0 (2024-09-12) + +Full Changelog: [v4.59.0...v4.60.0](https://github.com/openai/openai-node/compare/v4.59.0...v4.60.0) + +### Features + +* **api:** add o1 models ([#1061](https://github.com/openai/openai-node/issues/1061)) ([224cc04](https://github.com/openai/openai-node/commit/224cc045200cd1ce1517b4376c505de9b9a74ccc)) + +## 4.59.0 (2024-09-11) + +Full Changelog: [v4.58.2...v4.59.0](https://github.com/openai/openai-node/compare/v4.58.2...v4.59.0) + +### Features + +* **structured outputs:** support accessing raw responses ([#1058](https://github.com/openai/openai-node/issues/1058)) ([af17697](https://github.com/openai/openai-node/commit/af176979894ee95a51e09abc239a8fd3a639dcde)) + + +### Documentation + +* **azure:** example for custom base URL ([#1055](https://github.com/openai/openai-node/issues/1055)) ([20defc8](https://github.com/openai/openai-node/commit/20defc80801e1f1f489a07bd1264be71c56c586f)) +* **azure:** remove locale from docs link ([#1054](https://github.com/openai/openai-node/issues/1054)) ([f9b7eac](https://github.com/openai/openai-node/commit/f9b7eac58020cff0e367a15b9a2ca4e7c95cbb89)) + +## 4.58.2 (2024-09-09) + +Full Changelog: [v4.58.1...v4.58.2](https://github.com/openai/openai-node/compare/v4.58.1...v4.58.2) + +### Bug Fixes + +* **errors:** pass message through to APIConnectionError ([#1050](https://github.com/openai/openai-node/issues/1050)) ([5a34316](https://github.com/openai/openai-node/commit/5a3431672e200a6bc161d39873e914434557801e)) + + +### Chores + +* better object fallback behaviour for casting errors ([#1053](https://github.com/openai/openai-node/issues/1053)) ([b7d4619](https://github.com/openai/openai-node/commit/b7d46190d2bb775145a9a3de6408c38abacfa055)) + +## 4.58.1 (2024-09-06) + +Full Changelog: [v4.58.0...v4.58.1](https://github.com/openai/openai-node/compare/v4.58.0...v4.58.1) + +### Chores + +* **docs:** update browser support information ([#1045](https://github.com/openai/openai-node/issues/1045)) ([d326cc5](https://github.com/openai/openai-node/commit/d326cc54a77c450672fbf07d736cec80a9ba72fb)) + +## 4.58.0 (2024-09-05) + +Full Changelog: [v4.57.3...v4.58.0](https://github.com/openai/openai-node/compare/v4.57.3...v4.58.0) + +### Features + +* **vector store:** improve chunking strategy type names ([#1041](https://github.com/openai/openai-node/issues/1041)) ([471cec3](https://github.com/openai/openai-node/commit/471cec3228886253f07c13a362827a31e9ec7b63)) + + +### Bug Fixes + +* **uploads:** avoid making redundant memory copies ([#1043](https://github.com/openai/openai-node/issues/1043)) ([271297b](https://github.com/openai/openai-node/commit/271297bd32393d4c5663023adf82f8fb19dc3d25)) + +## 4.57.3 (2024-09-04) + +Full Changelog: [v4.57.2...v4.57.3](https://github.com/openai/openai-node/compare/v4.57.2...v4.57.3) + +### Bug Fixes + +* **helpers/zod:** avoid import issue in certain environments ([#1039](https://github.com/openai/openai-node/issues/1039)) ([e238daa](https://github.com/openai/openai-node/commit/e238daa7c12f3fb13369f58b9d405365f5efcc8f)) + + +### Chores + +* **internal:** minor bump qs version ([#1037](https://github.com/openai/openai-node/issues/1037)) ([8ec218e](https://github.com/openai/openai-node/commit/8ec218e9efb657927b3c0346822a96872aeaf137)) + +## 4.57.2 (2024-09-04) + +Full Changelog: [v4.57.1...v4.57.2](https://github.com/openai/openai-node/compare/v4.57.1...v4.57.2) + +### Chores + +* **internal:** dependency updates ([#1035](https://github.com/openai/openai-node/issues/1035)) ([e815fb6](https://github.com/openai/openai-node/commit/e815fb6dee75219563d3a7776774ba1c2984560e)) + +## 4.57.1 (2024-09-03) + +Full Changelog: [v4.57.0...v4.57.1](https://github.com/openai/openai-node/compare/v4.57.0...v4.57.1) + +### Bug Fixes + +* **assistants:** correctly accumulate tool calls when streaming ([#1031](https://github.com/openai/openai-node/issues/1031)) ([d935ad3](https://github.com/openai/openai-node/commit/d935ad3fa37b2701f4c9f6e433ada6074280a871)) +* **client:** correct File construction from node-fetch Responses ([#1029](https://github.com/openai/openai-node/issues/1029)) ([22ebdc2](https://github.com/openai/openai-node/commit/22ebdc2ca7d98e0f266110c4cf827e53a0a22026)) +* runTools without stream should not emit user message events ([#1005](https://github.com/openai/openai-node/issues/1005)) ([22ded4d](https://github.com/openai/openai-node/commit/22ded4d549a157482a8de2faf65e92c5be07fa95)) + + +### Chores + +* **internal/tests:** workaround bug in recent types/node release ([3c7bdfd](https://github.com/openai/openai-node/commit/3c7bdfdb373bff77db0e3fecd5d49ddfa4284cd9)) + +## 4.57.0 (2024-08-29) + +Full Changelog: [v4.56.2...v4.57.0](https://github.com/openai/openai-node/compare/v4.56.2...v4.57.0) + +### Features + +* **api:** add file search result details to run steps ([#1023](https://github.com/openai/openai-node/issues/1023)) ([d9acd0a](https://github.com/openai/openai-node/commit/d9acd0a2c52b27983f8db6a8de6a776078b1d41b)) + + +### Bug Fixes + +* install examples deps as part of bootstrap script ([#1022](https://github.com/openai/openai-node/issues/1022)) ([eae8e36](https://github.com/openai/openai-node/commit/eae8e36fd5514eb60773646ec775badde50e783c)) + +## 4.56.2 (2024-08-29) + +Full Changelog: [v4.56.1...v4.56.2](https://github.com/openai/openai-node/compare/v4.56.1...v4.56.2) + +### Chores + +* run tsc as part of lint script ([#1020](https://github.com/openai/openai-node/issues/1020)) ([4942347](https://github.com/openai/openai-node/commit/49423472f2b0a0b63961174bedfc00bfd99d47f9)) + +## 4.56.1 (2024-08-27) + +Full Changelog: [v4.56.0...v4.56.1](https://github.com/openai/openai-node/compare/v4.56.0...v4.56.1) + +### Chores + +* **ci:** check for build errors ([#1013](https://github.com/openai/openai-node/issues/1013)) ([7ff2127](https://github.com/openai/openai-node/commit/7ff21273091a605e05173502654cfb9c90a4382e)) + +## 4.56.0 (2024-08-16) + +Full Changelog: [v4.55.9...v4.56.0](https://github.com/openai/openai-node/compare/v4.55.9...v4.56.0) + +### Features + +* **api:** add chatgpt-4o-latest model ([edc4398](https://github.com/openai/openai-node/commit/edc43986ba96a0fda48f7eea368efe706f68dcac)) + +## 4.55.9 (2024-08-16) + +Full Changelog: [v4.55.8...v4.55.9](https://github.com/openai/openai-node/compare/v4.55.8...v4.55.9) + +### Bug Fixes + +* **azure/tts:** avoid stripping model param ([#999](https://github.com/openai/openai-node/issues/999)) ([c3a7ccd](https://github.com/openai/openai-node/commit/c3a7ccdbd6d9a2576509c2dc6c1605bc73c6dde7)) + +## 4.55.8 (2024-08-15) + +Full Changelog: [v4.55.7...v4.55.8](https://github.com/openai/openai-node/compare/v4.55.7...v4.55.8) + +### Chores + +* **types:** define FilePurpose enum ([#997](https://github.com/openai/openai-node/issues/997)) ([19b941b](https://github.com/openai/openai-node/commit/19b941be4ff3e4fa7e67b820a5aac51e5c8d4f60)) + +## 4.55.7 (2024-08-13) + +Full Changelog: [v4.55.6...v4.55.7](https://github.com/openai/openai-node/compare/v4.55.6...v4.55.7) + +### Bug Fixes + +* **json-schema:** correct handling of nested recursive schemas ([#992](https://github.com/openai/openai-node/issues/992)) ([ac309ab](https://github.com/openai/openai-node/commit/ac309abee3419594f45680c7d0ab11e13ce28c5b)) + +## 4.55.6 (2024-08-13) + +Full Changelog: [v4.55.5...v4.55.6](https://github.com/openai/openai-node/compare/v4.55.5...v4.55.6) + +### Bug Fixes + +* **zod-to-json-schema:** correct licensing ([#986](https://github.com/openai/openai-node/issues/986)) ([bd2051e](https://github.com/openai/openai-node/commit/bd2051e501e2ceafcd095f82205c2e668e1d68d7)) + +## 4.55.5 (2024-08-12) + +Full Changelog: [v4.55.4...v4.55.5](https://github.com/openai/openai-node/compare/v4.55.4...v4.55.5) + +### Chores + +* **examples:** minor formatting changes ([#987](https://github.com/openai/openai-node/issues/987)) ([8e6b615](https://github.com/openai/openai-node/commit/8e6b615ada09fa4e50dc8e0b5decf662eed19856)) +* sync openapi url ([#989](https://github.com/openai/openai-node/issues/989)) ([02ff1c5](https://github.com/openai/openai-node/commit/02ff1c55b5eefd8b6193ba2bf10dd5515945bd7a)) + +## 4.55.4 (2024-08-09) + +Full Changelog: [v4.55.3...v4.55.4](https://github.com/openai/openai-node/compare/v4.55.3...v4.55.4) + +### Bug Fixes + +* **helpers/zod:** nested union schema extraction ([#979](https://github.com/openai/openai-node/issues/979)) ([31b05aa](https://github.com/openai/openai-node/commit/31b05aa6fa0445141ae17a1b1eff533b83735f3a)) + + +### Chores + +* **ci:** bump prism mock server version ([#982](https://github.com/openai/openai-node/issues/982)) ([7442643](https://github.com/openai/openai-node/commit/7442643e8445eea15da54843a7c9d7580a402979)) +* **ci:** codeowners file ([#980](https://github.com/openai/openai-node/issues/980)) ([17a42b2](https://github.com/openai/openai-node/commit/17a42b2f6e2de2dce338358a48f6d7d4ed723f6f)) + +## 4.55.3 (2024-08-08) + +Full Changelog: [v4.55.2...v4.55.3](https://github.com/openai/openai-node/compare/v4.55.2...v4.55.3) + +### Chores + +* **internal:** updates ([#975](https://github.com/openai/openai-node/issues/975)) ([313a190](https://github.com/openai/openai-node/commit/313a19059a61893887ac0b57bb488c24bc40f099)) + +## 4.55.2 (2024-08-08) + +Full Changelog: [v4.55.1...v4.55.2](https://github.com/openai/openai-node/compare/v4.55.1...v4.55.2) + +### Bug Fixes + +* **helpers/zod:** add `extract-to-root` ref strategy ([ef3c73c](https://github.com/openai/openai-node/commit/ef3c73cfdf1a8e45346417812168e476fea65690)) +* **helpers/zod:** add `nullableStrategy` option ([ad89892](https://github.com/openai/openai-node/commit/ad89892f4ac0daba161ce97267a165a12f67c341)) +* **helpers/zod:** correct logic for adding root schema to definitions ([e4a247a](https://github.com/openai/openai-node/commit/e4a247a2a87b4d3bde55891b31e07413d3a9f00d)) + + +### Chores + +* **internal:** add README for vendored zod-to-json-schema ([d8a80a9](https://github.com/openai/openai-node/commit/d8a80a915dfe723a59f512e7128aecf857324388)) +* **tests:** add more API request tests ([04c1590](https://github.com/openai/openai-node/commit/04c1590a64127c43898c3c88bcbdd624d54008f6)) + +## 4.55.1 (2024-08-07) + +Full Changelog: [v4.55.0...v4.55.1](https://github.com/openai/openai-node/compare/v4.55.0...v4.55.1) + +### Bug Fixes + +* **helpers/zod:** correct schema generation for recursive schemas ([cb54d93](https://github.com/openai/openai-node/commit/cb54d93162c86ecfd476733805a431aab25d86d6)) + + +### Chores + +* **api:** remove old `AssistantResponseFormat` type ([#967](https://github.com/openai/openai-node/issues/967)) ([9fd94bf](https://github.com/openai/openai-node/commit/9fd94bfc35128d3bc45fbf0a65e6a8d2ea4562d5)) +* **internal:** update test snapshots ([bceea60](https://github.com/openai/openai-node/commit/bceea60e461c40a9e59d52772122dd612a2ff1c4)) +* **vendor/zodJsonSchema:** add option to duplicate top-level ref ([84b8a38](https://github.com/openai/openai-node/commit/84b8a3820b0ce1c78bfd3db468d8d2962875b4ab)) + + +### Documentation + +* **examples:** add UI generation example script ([c75c017](https://github.com/openai/openai-node/commit/c75c017c16cbfe3fc60ea4ee5779782005e64463)) + +## 4.55.0 (2024-08-06) + +Full Changelog: [v4.54.0...v4.55.0](https://github.com/openai/openai-node/compare/v4.54.0...v4.55.0) + +### Features + +* **api:** add structured outputs support ([573787c](https://github.com/openai/openai-node/commit/573787cf3ea8eea593eeeb5e24a9256951e2cc35)) + +## 4.54.0 (2024-08-02) + +Full Changelog: [v4.53.2...v4.54.0](https://github.com/openai/openai-node/compare/v4.53.2...v4.54.0) + +### Features + +* extract out `ImageModel`, `AudioModel`, `SpeechModel` ([#964](https://github.com/openai/openai-node/issues/964)) ([1edf957](https://github.com/openai/openai-node/commit/1edf957e1cb86c2a7b2d29e28f2b8f428ea0cd7d)) +* make enums not nominal ([#965](https://github.com/openai/openai-node/issues/965)) ([0dd0cd1](https://github.com/openai/openai-node/commit/0dd0cd158d6765c3a04ac983aad03c2ecad14502)) + + +### Chores + +* **ci:** correctly tag pre-release npm packages ([#963](https://github.com/openai/openai-node/issues/963)) ([f1a4a68](https://github.com/openai/openai-node/commit/f1a4a686bbf4a38919b8597f008d895d1b99d8df)) +* **internal:** add constant for default timeout ([#960](https://github.com/openai/openai-node/issues/960)) ([55c01f4](https://github.com/openai/openai-node/commit/55c01f4dc5d132c21713f9e8606b95abc76fcd44)) +* **internal:** cleanup event stream helpers ([#950](https://github.com/openai/openai-node/issues/950)) ([8f49956](https://github.com/openai/openai-node/commit/8f499566c47bd7d4799a8cbe0d980553348b8f48)) + + +### Documentation + +* **README:** link Lifecycle in Polling Helpers section ([#962](https://github.com/openai/openai-node/issues/962)) ([c610c81](https://github.com/openai/openai-node/commit/c610c813e8d7f96b5b8315ae194e0a9ff565f43d)) + +## 4.53.2 (2024-07-26) + +Full Changelog: [v4.53.1...v4.53.2](https://github.com/openai/openai-node/compare/v4.53.1...v4.53.2) + +### Chores + +* **docs:** fix incorrect client var names ([#955](https://github.com/openai/openai-node/issues/955)) ([cc91be8](https://github.com/openai/openai-node/commit/cc91be867bf7042abb2ee6c6d5ef69082ac64280)) + +## 4.53.1 (2024-07-25) + +Full Changelog: [v4.53.0...v4.53.1](https://github.com/openai/openai-node/compare/v4.53.0...v4.53.1) + +### Bug Fixes + +* **compat:** remove ReadableStream polyfill redundant since node v16 ([#954](https://github.com/openai/openai-node/issues/954)) ([78b2a83](https://github.com/openai/openai-node/commit/78b2a83f085bb7ddf6a5f429636de1e3eef20f9d)) + + +### Chores + +* **tests:** update prism version ([#948](https://github.com/openai/openai-node/issues/948)) ([9202c91](https://github.com/openai/openai-node/commit/9202c91d697a116eb1b834e01f4073d254438149)) + +## 4.53.0 (2024-07-22) + +Full Changelog: [v4.52.7...v4.53.0](https://github.com/openai/openai-node/compare/v4.52.7...v4.53.0) + +### Features + +* **api:** add new gpt-4o-mini models ([#942](https://github.com/openai/openai-node/issues/942)) ([7ac10dd](https://github.com/openai/openai-node/commit/7ac10ddbb87e9eb0e8e34d58a13a4775cbba1c24)) +* **api:** add uploads endpoints ([#946](https://github.com/openai/openai-node/issues/946)) ([8709ceb](https://github.com/openai/openai-node/commit/8709ceb0e01c5a1f96704c998f35ca1117ecadac)) + + +### Chores + +* **docs:** mention support of web browser runtimes ([#938](https://github.com/openai/openai-node/issues/938)) ([123d19d](https://github.com/openai/openai-node/commit/123d19d5a157110c8ada556c107caf0eb8b2ccc6)) +* **docs:** use client instead of package name in Node examples ([#941](https://github.com/openai/openai-node/issues/941)) ([8b5db1f](https://github.com/openai/openai-node/commit/8b5db1f53e66ce4b6e554f40a8dd2fd474085027)) + +## 4.52.7 (2024-07-11) + +Full Changelog: [v4.52.6...v4.52.7](https://github.com/openai/openai-node/compare/v4.52.6...v4.52.7) + +### Documentation + +* **examples:** update example values ([#933](https://github.com/openai/openai-node/issues/933)) ([92512ab](https://github.com/openai/openai-node/commit/92512abcd7ab5d7c452dfae007c3a25041062656)) + +## 4.52.6 (2024-07-11) + +Full Changelog: [v4.52.5...v4.52.6](https://github.com/openai/openai-node/compare/v4.52.5...v4.52.6) + +### Chores + +* **ci:** also run workflows for PRs targeting `next` ([#931](https://github.com/openai/openai-node/issues/931)) ([e3f979a](https://github.com/openai/openai-node/commit/e3f979ae94b2252b9552d1e03de5b92d398a3e28)) + +## 4.52.5 (2024-07-10) + +Full Changelog: [v4.52.4...v4.52.5](https://github.com/openai/openai-node/compare/v4.52.4...v4.52.5) + +### Bug Fixes + +* **vectorStores:** correctly handle missing `files` in `uploadAndPoll()` ([#926](https://github.com/openai/openai-node/issues/926)) ([945fca6](https://github.com/openai/openai-node/commit/945fca646b02b52bbc9163cb51f5d87e7db8afd6)) + +## 4.52.4 (2024-07-08) + +Full Changelog: [v4.52.3...v4.52.4](https://github.com/openai/openai-node/compare/v4.52.3...v4.52.4) + +### Refactors + +* **examples:** removedduplicated 'messageDelta' streaming event. ([#909](https://github.com/openai/openai-node/issues/909)) ([7b0b3d2](https://github.com/openai/openai-node/commit/7b0b3d2e228532fca19f49390a2831a1abac72a4)) + +## 4.52.3 (2024-07-02) + +Full Changelog: [v4.52.2...v4.52.3](https://github.com/openai/openai-node/compare/v4.52.2...v4.52.3) + +### Chores + +* minor change to tests ([#916](https://github.com/openai/openai-node/issues/916)) ([b8a33e3](https://github.com/openai/openai-node/commit/b8a33e31697b52d733f28d9380e0c02a2d179474)) + +## 4.52.2 (2024-06-28) + +Full Changelog: [v4.52.1...v4.52.2](https://github.com/openai/openai-node/compare/v4.52.1...v4.52.2) + +### Chores + +* gitignore test server logs ([#914](https://github.com/openai/openai-node/issues/914)) ([6316720](https://github.com/openai/openai-node/commit/6316720c3fdd0422965ae3890275062bc0fe3c2b)) + +## 4.52.1 (2024-06-25) + +Full Changelog: [v4.52.0...v4.52.1](https://github.com/openai/openai-node/compare/v4.52.0...v4.52.1) + +### Chores + +* **doc:** clarify service tier default value ([#908](https://github.com/openai/openai-node/issues/908)) ([e4c8100](https://github.com/openai/openai-node/commit/e4c8100c7732bdc336b52a48d09945782c0fa2a3)) +* **internal:** minor reformatting ([#911](https://github.com/openai/openai-node/issues/911)) ([78c9377](https://github.com/openai/openai-node/commit/78c9377fcd563645081629a89f3fda2c1ff4e175)) +* **internal:** re-order some imports ([#904](https://github.com/openai/openai-node/issues/904)) ([dbd5c40](https://github.com/openai/openai-node/commit/dbd5c4053ba2f255dfc56676ced5b30381843c75)) + +## 4.52.0 (2024-06-18) + +Full Changelog: [v4.51.0...v4.52.0](https://github.com/openai/openai-node/compare/v4.51.0...v4.52.0) + +### Features + +* **api:** add service tier argument for chat completions ([#900](https://github.com/openai/openai-node/issues/900)) ([91e6651](https://github.com/openai/openai-node/commit/91e66514037a8d6f9c39d3c96cd5769885925a4b)) + +## 4.51.0 (2024-06-12) + +Full Changelog: [v4.50.0...v4.51.0](https://github.com/openai/openai-node/compare/v4.50.0...v4.51.0) + +### Features + +* **api:** updates ([#894](https://github.com/openai/openai-node/issues/894)) ([b58f5a1](https://github.com/openai/openai-node/commit/b58f5a1344f631dac0fb8ecfa4fbae49af070189)) + +## 4.50.0 (2024-06-10) + +Full Changelog: [v4.49.1...v4.50.0](https://github.com/openai/openai-node/compare/v4.49.1...v4.50.0) + +### Features + +* support `application/octet-stream` request bodies ([#892](https://github.com/openai/openai-node/issues/892)) ([51661c8](https://github.com/openai/openai-node/commit/51661c8068d4990df6916becb6bb85353b54ef4d)) + +## 4.49.1 (2024-06-07) + +Full Changelog: [v4.49.0...v4.49.1](https://github.com/openai/openai-node/compare/v4.49.0...v4.49.1) + +### Bug Fixes + +* remove erroneous thread create argument ([#889](https://github.com/openai/openai-node/issues/889)) ([a9f898e](https://github.com/openai/openai-node/commit/a9f898ee109a0b35a672e41c6497f3a75eff7734)) + +## 4.49.0 (2024-06-06) + +Full Changelog: [v4.48.3...v4.49.0](https://github.com/openai/openai-node/compare/v4.48.3...v4.49.0) + +### Features + +* **api:** updates ([#887](https://github.com/openai/openai-node/issues/887)) ([359eeb3](https://github.com/openai/openai-node/commit/359eeb33b08b371451f216d1e21dd3334ec15f36)) + +## 4.48.3 (2024-06-06) + +Full Changelog: [v4.48.2...v4.48.3](https://github.com/openai/openai-node/compare/v4.48.2...v4.48.3) + +### Chores + +* **internal:** minor refactor of tests ([#884](https://github.com/openai/openai-node/issues/884)) ([0b71f2b](https://github.com/openai/openai-node/commit/0b71f2b2cb67e5714476b6f63b4ef93a0140bff2)) + +## 4.48.2 (2024-06-05) + +Full Changelog: [v4.48.1...v4.48.2](https://github.com/openai/openai-node/compare/v4.48.1...v4.48.2) + +### Chores + +* **internal:** minor change to tests ([#881](https://github.com/openai/openai-node/issues/881)) ([5e2d608](https://github.com/openai/openai-node/commit/5e2d608ca9a2bcb3f261ad13c848d327b60b6fb1)) + +## 4.48.1 (2024-06-04) + +Full Changelog: [v4.48.0...v4.48.1](https://github.com/openai/openai-node/compare/v4.48.0...v4.48.1) + +### Bug Fixes + +* resolve typescript issue ([1129707](https://github.com/openai/openai-node/commit/11297073b1a370fc9c8676446f939a48071999b2)) + +## 4.48.0 (2024-06-03) + +Full Changelog: [v4.47.3...v4.48.0](https://github.com/openai/openai-node/compare/v4.47.3...v4.48.0) + +### Features + +* **api:** updates ([#874](https://github.com/openai/openai-node/issues/874)) ([295c248](https://github.com/openai/openai-node/commit/295c2486005f6f1eb81cbbd6994b4382801d0707)) + +## 4.47.3 (2024-05-31) + +Full Changelog: [v4.47.2...v4.47.3](https://github.com/openai/openai-node/compare/v4.47.2...v4.47.3) + +### Bug Fixes + +* allow git imports for pnpm ([#873](https://github.com/openai/openai-node/issues/873)) ([9da9809](https://github.com/openai/openai-node/commit/9da98090e80cbe988a3d695e4c9b57439080ec3e)) + + +### Documentation + +* **azure:** update example and readme to use Entra ID ([#857](https://github.com/openai/openai-node/issues/857)) ([722eff1](https://github.com/openai/openai-node/commit/722eff1a7aeaa2ce3c40301709db61258c9afa16)) + +## 4.47.2 (2024-05-28) + +Full Changelog: [v4.47.1...v4.47.2](https://github.com/openai/openai-node/compare/v4.47.1...v4.47.2) + +### Documentation + +* **readme:** add bundle size badge ([#869](https://github.com/openai/openai-node/issues/869)) ([e252132](https://github.com/openai/openai-node/commit/e2521327b7b4f5abe97e4c58c417b37d00079ef8)) + +## 4.47.1 (2024-05-14) + +Full Changelog: [v4.47.0...v4.47.1](https://github.com/openai/openai-node/compare/v4.47.0...v4.47.1) + +### Chores + +* **internal:** add slightly better logging to scripts ([#848](https://github.com/openai/openai-node/issues/848)) ([139e690](https://github.com/openai/openai-node/commit/139e690546775b3568934dd990dd329fce2fbc2f)) + +## 4.47.0 (2024-05-14) + +Full Changelog: [v4.46.1...v4.47.0](https://github.com/openai/openai-node/compare/v4.46.1...v4.47.0) + +### Features + +* **api:** add incomplete state ([#846](https://github.com/openai/openai-node/issues/846)) ([5f663a1](https://github.com/openai/openai-node/commit/5f663a167361b905c6d0c1242e8a78037a7e4a57)) + +## 4.46.1 (2024-05-13) + +Full Changelog: [v4.46.0...v4.46.1](https://github.com/openai/openai-node/compare/v4.46.0...v4.46.1) + +### Refactors + +* change import paths to be relative ([#843](https://github.com/openai/openai-node/issues/843)) ([7913574](https://github.com/openai/openai-node/commit/7913574bdb6fcbcf68e56e8def351add6c43310a)) + +## 4.46.0 (2024-05-13) + +Full Changelog: [v4.45.0...v4.46.0](https://github.com/openai/openai-node/compare/v4.45.0...v4.46.0) + +### Features + +* **api:** add gpt-4o model ([#841](https://github.com/openai/openai-node/issues/841)) ([c818ed1](https://github.com/openai/openai-node/commit/c818ed139bfba81af6ca3c4eda08d52366758529)) + +## 4.45.0 (2024-05-11) + +Full Changelog: [v4.44.0...v4.45.0](https://github.com/openai/openai-node/compare/v4.44.0...v4.45.0) + +### Features + +* **azure:** batch api ([#839](https://github.com/openai/openai-node/issues/839)) ([e279f8c](https://github.com/openai/openai-node/commit/e279f8c51aa80cb913ccb6df647407bea1f2f071)) + + +### Chores + +* **dependency:** bumped Next.js version ([#836](https://github.com/openai/openai-node/issues/836)) ([babb140](https://github.com/openai/openai-node/commit/babb1404751059bdd171b792d03fd21272dd8f8b)) +* **docs:** add SECURITY.md ([#838](https://github.com/openai/openai-node/issues/838)) ([6e556d9](https://github.com/openai/openai-node/commit/6e556d9e12341155cc13fe226ab110d63858370e)) + +## 4.44.0 (2024-05-09) + +Full Changelog: [v4.43.0...v4.44.0](https://github.com/openai/openai-node/compare/v4.43.0...v4.44.0) + +### Features + +* **api:** add message image content ([#834](https://github.com/openai/openai-node/issues/834)) ([7757b3e](https://github.com/openai/openai-node/commit/7757b3ea54a2c5cc251f55af0b676952ba12e8a6)) + +## 4.43.0 (2024-05-08) + +Full Changelog: [v4.42.0...v4.43.0](https://github.com/openai/openai-node/compare/v4.42.0...v4.43.0) + +### Features + +* **api:** adding file purposes ([#831](https://github.com/openai/openai-node/issues/831)) ([a62b877](https://github.com/openai/openai-node/commit/a62b8779ff7261cdd6aa7bf72fb6407cc7e3fd21)) + +## 4.42.0 (2024-05-06) + +Full Changelog: [v4.41.1...v4.42.0](https://github.com/openai/openai-node/compare/v4.41.1...v4.42.0) + +### Features + +* **api:** add usage metadata when streaming ([#829](https://github.com/openai/openai-node/issues/829)) ([6707f11](https://github.com/openai/openai-node/commit/6707f119a191ad98d634ad208be852f9f39c6c0e)) + + +### Bug Fixes + +* **example:** fix fine tuning example ([#827](https://github.com/openai/openai-node/issues/827)) ([6480a50](https://github.com/openai/openai-node/commit/6480a506c096a2664bd2ad296481e51017ff4185)) + +## 4.41.1 (2024-05-06) + +Full Changelog: [v4.41.0...v4.41.1](https://github.com/openai/openai-node/compare/v4.41.0...v4.41.1) + +### Bug Fixes + +* **azure:** update build script ([#825](https://github.com/openai/openai-node/issues/825)) ([8afc6e7](https://github.com/openai/openai-node/commit/8afc6e7b49507b3be0228e93913d51b4c3211add)) + +## 4.41.0 (2024-05-05) + +Full Changelog: [v4.40.2...v4.41.0](https://github.com/openai/openai-node/compare/v4.40.2...v4.41.0) + +### Features + +* **client:** add Azure client ([#822](https://github.com/openai/openai-node/issues/822)) ([92f9049](https://github.com/openai/openai-node/commit/92f90499f0bbee79ba9c8342c8d58dbcaf88bdd1)) + +## 4.40.2 (2024-05-03) + +Full Changelog: [v4.40.1...v4.40.2](https://github.com/openai/openai-node/compare/v4.40.1...v4.40.2) + +### Bug Fixes + +* **package:** revert recent client file change ([#819](https://github.com/openai/openai-node/issues/819)) ([fa722c9](https://github.com/openai/openai-node/commit/fa722c97859e55a0e766332c3a2f0cb3673128a2)) +* **vectorStores:** correct uploadAndPoll method ([#817](https://github.com/openai/openai-node/issues/817)) ([d63f22c](https://github.com/openai/openai-node/commit/d63f22c303761710e6eac7ef883c45e34d223df1)) + +## 4.40.1 (2024-05-02) + +Full Changelog: [v4.40.0...v4.40.1](https://github.com/openai/openai-node/compare/v4.40.0...v4.40.1) + +### Chores + +* **internal:** bump prism version ([#813](https://github.com/openai/openai-node/issues/813)) ([81a6c28](https://github.com/openai/openai-node/commit/81a6c28c4773a0245ce9c505fc5b98d43df21beb)) +* **internal:** move client class to separate file ([#815](https://github.com/openai/openai-node/issues/815)) ([d0b915a](https://github.com/openai/openai-node/commit/d0b915a7514eda5b23d7d1e4420d1d1485ed8d0f)) + +## 4.40.0 (2024-05-01) + +Full Changelog: [v4.39.1...v4.40.0](https://github.com/openai/openai-node/compare/v4.39.1...v4.40.0) + +### Features + +* **api:** delete messages ([#811](https://github.com/openai/openai-node/issues/811)) ([9e37dbd](https://github.com/openai/openai-node/commit/9e37dbd554e4ca48fda1577b1aad612e9d30534d)) + +## 4.39.1 (2024-04-30) + +Full Changelog: [v4.39.0...v4.39.1](https://github.com/openai/openai-node/compare/v4.39.0...v4.39.1) + +### Chores + +* **internal:** add link to openapi spec ([#810](https://github.com/openai/openai-node/issues/810)) ([61b5b83](https://github.com/openai/openai-node/commit/61b5b83e82dd723e9584232f3b805ed13e58e13d)) +* **internal:** fix release please for deno ([#808](https://github.com/openai/openai-node/issues/808)) ([ecc2eae](https://github.com/openai/openai-node/commit/ecc2eaec602eb9fe518f011920d8500e01fde01b)) +* **internal:** refactor scripts ([#806](https://github.com/openai/openai-node/issues/806)) ([9283519](https://github.com/openai/openai-node/commit/928351928054feb56f8797587c70f74d06c2737c)) + +## 4.39.0 (2024-04-29) + +Full Changelog: [v4.38.5...v4.39.0](https://github.com/openai/openai-node/compare/v4.38.5...v4.39.0) + +### Features + +* **api:** add required tool_choice ([#803](https://github.com/openai/openai-node/issues/803)) ([99693e6](https://github.com/openai/openai-node/commit/99693e61debc67327a45dffb2c10c113341bffd6)) + + +### Chores + +* **internal:** add scripts/test and scripts/mock ([#801](https://github.com/openai/openai-node/issues/801)) ([6656105](https://github.com/openai/openai-node/commit/6656105fa1346a91d17e2b7a5e075f3091310c2f)) + +## 4.38.5 (2024-04-24) + +Full Changelog: [v4.38.4...v4.38.5](https://github.com/openai/openai-node/compare/v4.38.4...v4.38.5) + +### Chores + +* **internal:** use actions/checkout@v4 for codeflow ([#799](https://github.com/openai/openai-node/issues/799)) ([5ab7780](https://github.com/openai/openai-node/commit/5ab7780ea8889818f403a9a89ab19585a7e8972e)) + +## 4.38.4 (2024-04-24) + +Full Changelog: [v4.38.3...v4.38.4](https://github.com/openai/openai-node/compare/v4.38.3...v4.38.4) + +### Bug Fixes + +* **api:** change timestamps to unix integers ([#798](https://github.com/openai/openai-node/issues/798)) ([7271a6c](https://github.com/openai/openai-node/commit/7271a6cdc7d37151d2cae18fdd20b87d97624a84)) +* **docs:** doc improvements ([#796](https://github.com/openai/openai-node/issues/796)) ([49fcc86](https://github.com/openai/openai-node/commit/49fcc86b44958795a6f5e0901f369653dfbcc637)) + +## 4.38.3 (2024-04-22) + +Full Changelog: [v4.38.2...v4.38.3](https://github.com/openai/openai-node/compare/v4.38.2...v4.38.3) + +### Chores + +* **internal:** use @swc/jest for running tests ([#793](https://github.com/openai/openai-node/issues/793)) ([8947f19](https://github.com/openai/openai-node/commit/8947f195b2dfab7ceebe1e0bb5c886e229cd541f)) + +## 4.38.2 (2024-04-19) + +Full Changelog: [v4.38.1...v4.38.2](https://github.com/openai/openai-node/compare/v4.38.1...v4.38.2) + +### Bug Fixes + +* **api:** correct types for message attachment tools ([#787](https://github.com/openai/openai-node/issues/787)) ([8626884](https://github.com/openai/openai-node/commit/8626884abd2494aa081db9e50a2f268b6cebc5df)) + +## 4.38.1 (2024-04-18) + +Full Changelog: [v4.38.0...v4.38.1](https://github.com/openai/openai-node/compare/v4.38.0...v4.38.1) + +### Bug Fixes + +* **api:** correct types for attachments ([#783](https://github.com/openai/openai-node/issues/783)) ([6893631](https://github.com/openai/openai-node/commit/6893631334f75e232ba130f5dd67f1230b1e5fa0)) + +## 4.38.0 (2024-04-18) + +Full Changelog: [v4.37.1...v4.38.0](https://github.com/openai/openai-node/compare/v4.37.1...v4.38.0) + +### Features + +* **api:** batch list endpoint ([#781](https://github.com/openai/openai-node/issues/781)) ([d226759](https://github.com/openai/openai-node/commit/d226759164fbed33198d8bdc315c98e1052dade8)) + +## 4.37.1 (2024-04-17) + +Full Changelog: [v4.37.0...v4.37.1](https://github.com/openai/openai-node/compare/v4.37.0...v4.37.1) + +### Chores + +* **api:** docs and response_format response property ([#778](https://github.com/openai/openai-node/issues/778)) ([78f5c35](https://github.com/openai/openai-node/commit/78f5c3568d95d8e854c04049dc7d5643aa49e93f)) + +## 4.37.0 (2024-04-17) + +Full Changelog: [v4.36.0...v4.37.0](https://github.com/openai/openai-node/compare/v4.36.0...v4.37.0) + +### Features + +* **api:** add vector stores ([#776](https://github.com/openai/openai-node/issues/776)) ([8bb929b](https://github.com/openai/openai-node/commit/8bb929b2ee91c1bec0a00347bf4f7628652d1be3)) + +## 4.36.0 (2024-04-16) + +Full Changelog: [v4.35.0...v4.36.0](https://github.com/openai/openai-node/compare/v4.35.0...v4.36.0) + +### Features + +* **client:** add header OpenAI-Project ([#772](https://github.com/openai/openai-node/issues/772)) ([bb4df37](https://github.com/openai/openai-node/commit/bb4df3722082fb44b7d4feb7a47df796149150a2)) +* extract chat models to a named enum ([#775](https://github.com/openai/openai-node/issues/775)) ([141d2ed](https://github.com/openai/openai-node/commit/141d2ed308141dc751869353208e4d0632d3650c)) + + +### Build System + +* configure UTF-8 locale in devcontainer ([#774](https://github.com/openai/openai-node/issues/774)) ([bebf4f0](https://github.com/openai/openai-node/commit/bebf4f0ca1f884f8747caff0f0e065aafffde096)) + +## 4.35.0 (2024-04-15) + +Full Changelog: [v4.34.0...v4.35.0](https://github.com/openai/openai-node/compare/v4.34.0...v4.35.0) + +### Features + +* **errors:** add request_id property ([#769](https://github.com/openai/openai-node/issues/769)) ([43aa6a1](https://github.com/openai/openai-node/commit/43aa6a19cfb1448903dfaddc4da3def2eda9cbab)) + +## 4.34.0 (2024-04-15) + +Full Changelog: [v4.33.1...v4.34.0](https://github.com/openai/openai-node/compare/v4.33.1...v4.34.0) + +### Features + +* **api:** add batch API ([#768](https://github.com/openai/openai-node/issues/768)) ([7fe34f2](https://github.com/openai/openai-node/commit/7fe34f2d0bda9c1cb116a593f02bd0cc15a52e12)) +* **api:** updates ([#766](https://github.com/openai/openai-node/issues/766)) ([52bcc47](https://github.com/openai/openai-node/commit/52bcc47043e4c3ffe15ae9e7ac0fa87e2493aad9)) + +## 4.33.1 (2024-04-12) + +Full Changelog: [v4.33.0...v4.33.1](https://github.com/openai/openai-node/compare/v4.33.0...v4.33.1) + +### Chores + +* **internal:** formatting ([#763](https://github.com/openai/openai-node/issues/763)) ([b6acf54](https://github.com/openai/openai-node/commit/b6acf54baab7e6cbf6ce3ad1d6c70197cc0181d0)) +* **internal:** improve ecosystem tests ([#761](https://github.com/openai/openai-node/issues/761)) ([fcf748d](https://github.com/openai/openai-node/commit/fcf748dbbd23f972ff9fd81a8b2a35232a2d6e5c)) + +## 4.33.0 (2024-04-05) + +Full Changelog: [v4.32.2...v4.33.0](https://github.com/openai/openai-node/compare/v4.32.2...v4.33.0) + +### Features + +* **api:** add additional messages when creating thread run ([#759](https://github.com/openai/openai-node/issues/759)) ([f1fdb41](https://github.com/openai/openai-node/commit/f1fdb410e087f9b94faeda0558de573ec1118601)) + +## 4.32.2 (2024-04-04) + +Full Changelog: [v4.32.1...v4.32.2](https://github.com/openai/openai-node/compare/v4.32.1...v4.32.2) + +### Bug Fixes + +* **streaming:** handle special line characters and fix multi-byte character decoding ([#757](https://github.com/openai/openai-node/issues/757)) ([8dcdda2](https://github.com/openai/openai-node/commit/8dcdda2b0d1d86486eea5fd47d24a8d26fde4c19)) +* **tests:** update wrangler to v3.19.0 (CVE-2023-7080) ([#755](https://github.com/openai/openai-node/issues/755)) ([47ca41d](https://github.com/openai/openai-node/commit/47ca41da9a739b2e04b721cb1fe843e5dd152465)) + + +### Chores + +* **tests:** bump ecosystem tests dependencies ([#753](https://github.com/openai/openai-node/issues/753)) ([3f86ea2](https://github.com/openai/openai-node/commit/3f86ea2205c90e05bcbe582491a4bed01075a5b1)) + +## 4.32.1 (2024-04-02) + +Full Changelog: [v4.32.0...v4.32.1](https://github.com/openai/openai-node/compare/v4.32.0...v4.32.1) + +### Chores + +* **deps:** bump yarn to v1.22.22 ([#751](https://github.com/openai/openai-node/issues/751)) ([5b41d10](https://github.com/openai/openai-node/commit/5b41d1077f219b8feb7557cfab98caf7b5de560d)) + +## 4.32.0 (2024-04-01) + +Full Changelog: [v4.31.0...v4.32.0](https://github.com/openai/openai-node/compare/v4.31.0...v4.32.0) + +### Features + +* **api:** add support for filtering messages by run_id ([#747](https://github.com/openai/openai-node/issues/747)) ([9a397ac](https://github.com/openai/openai-node/commit/9a397acffa9f10c3f48e86e3bdb3851770f87b42)) +* **api:** run polling helpers ([#749](https://github.com/openai/openai-node/issues/749)) ([02920ae](https://github.com/openai/openai-node/commit/02920ae082480fc7a7ffe9fa583d053a40dc7120)) + + +### Chores + +* **deps:** remove unused dependency digest-fetch ([#748](https://github.com/openai/openai-node/issues/748)) ([5376837](https://github.com/openai/openai-node/commit/537683734d39dd956a7dcef4339c1167ce6fe13c)) + + +### Documentation + +* **readme:** change undocumented params wording ([#744](https://github.com/openai/openai-node/issues/744)) ([8796691](https://github.com/openai/openai-node/commit/87966911045275db86844dfdcde59653edaef264)) + + +### Refactors + +* rename createAndStream to stream ([02920ae](https://github.com/openai/openai-node/commit/02920ae082480fc7a7ffe9fa583d053a40dc7120)) + +## 4.31.0 (2024-03-30) + +Full Changelog: [v4.30.0...v4.31.0](https://github.com/openai/openai-node/compare/v4.30.0...v4.31.0) + +### Features + +* **api:** adding temperature parameter ([#742](https://github.com/openai/openai-node/issues/742)) ([b173b05](https://github.com/openai/openai-node/commit/b173b05eb52266d8f2c835ec4ed71cba8cdc609b)) + + +### Bug Fixes + +* **streaming:** trigger all event handlers with fromReadableStream ([#741](https://github.com/openai/openai-node/issues/741)) ([7b1e593](https://github.com/openai/openai-node/commit/7b1e5937d97b309ed51928b4388dcde74abda8dc)) + +## 4.30.0 (2024-03-28) + +Full Changelog: [v4.29.2...v4.30.0](https://github.com/openai/openai-node/compare/v4.29.2...v4.30.0) + +### Features + +* assistant fromReadableStream ([#738](https://github.com/openai/openai-node/issues/738)) ([8f4ba18](https://github.com/openai/openai-node/commit/8f4ba18268797d6c54c393d701b13c7ff2aa71bc)) + + +### Bug Fixes + +* **client:** correctly send deno version header ([#736](https://github.com/openai/openai-node/issues/736)) ([b7ea175](https://github.com/openai/openai-node/commit/b7ea175b2854909de77b920dd25613f1d2daefd6)) +* **example:** correcting example ([#739](https://github.com/openai/openai-node/issues/739)) ([a819551](https://github.com/openai/openai-node/commit/a81955175da24e196490a38850bbf6f9b6779ea8)) +* handle process.env being undefined in debug func ([#733](https://github.com/openai/openai-node/issues/733)) ([2baa149](https://github.com/openai/openai-node/commit/2baa1491f7834f779ca49c3027d2344ead412dd2)) +* **internal:** make toFile use input file's options ([#727](https://github.com/openai/openai-node/issues/727)) ([15880d7](https://github.com/openai/openai-node/commit/15880d77b6c1cf58a6b9cfdbf7ae4442cdbddbd6)) + + +### Chores + +* **internal:** add type ([#737](https://github.com/openai/openai-node/issues/737)) ([18c1989](https://github.com/openai/openai-node/commit/18c19891f783019517d7961fe03c4d98de0fcf93)) + + +### Documentation + +* **readme:** consistent use of sentence case in headings ([#729](https://github.com/openai/openai-node/issues/729)) ([7e515fd](https://github.com/openai/openai-node/commit/7e515fde433ebfb7871d75d53915eef05a08a916)) +* **readme:** document how to make undocumented requests ([#730](https://github.com/openai/openai-node/issues/730)) ([a06d861](https://github.com/openai/openai-node/commit/a06d861a015eeee411fa2c6ed9bf3000313cfc03)) + +## 4.29.2 (2024-03-19) + +Full Changelog: [v4.29.1...v4.29.2](https://github.com/openai/openai-node/compare/v4.29.1...v4.29.2) + +### Chores + +* **internal:** update generated pragma comment ([#724](https://github.com/openai/openai-node/issues/724)) ([139e205](https://github.com/openai/openai-node/commit/139e205ed1ed30cb1df982d852a093dcea945aba)) + + +### Documentation + +* assistant improvements ([#725](https://github.com/openai/openai-node/issues/725)) ([6a2c41b](https://github.com/openai/openai-node/commit/6a2c41b0ce833eba0cdea6a7d221697f3be26abb)) +* fix typo in CONTRIBUTING.md ([#722](https://github.com/openai/openai-node/issues/722)) ([05ff8f7](https://github.com/openai/openai-node/commit/05ff8f7671fe6ce5d9517034f76a166a0bd27803)) + +## 4.29.1 (2024-03-15) + +Full Changelog: [v4.29.0...v4.29.1](https://github.com/openai/openai-node/compare/v4.29.0...v4.29.1) + +### Documentation + +* **readme:** assistant streaming ([#719](https://github.com/openai/openai-node/issues/719)) ([bc9a1ca](https://github.com/openai/openai-node/commit/bc9a1ca308020a88c29d409edc06cdfca8cbf8f5)) + +## 4.29.0 (2024-03-13) + +Full Changelog: [v4.28.5...v4.29.0](https://github.com/openai/openai-node/compare/v4.28.5...v4.29.0) + +### Features + +* **assistants:** add support for streaming ([#714](https://github.com/openai/openai-node/issues/714)) ([7d27d28](https://github.com/openai/openai-node/commit/7d27d286876d0a575d91a4752f401126fe93d2a3)) + +## 4.28.5 (2024-03-13) + +Full Changelog: [v4.28.4...v4.28.5](https://github.com/openai/openai-node/compare/v4.28.4...v4.28.5) + +### Bug Fixes + +* **ChatCompletionStream:** abort on async iterator break and handle errors ([#699](https://github.com/openai/openai-node/issues/699)) ([ac417a2](https://github.com/openai/openai-node/commit/ac417a2db31919d2b52f2eb2e38f9c67a8f73254)) +* **streaming:** correctly handle trailing new lines in byte chunks ([#708](https://github.com/openai/openai-node/issues/708)) ([4753be2](https://github.com/openai/openai-node/commit/4753be272b1d1dade7a769cf350b829fc639f36e)) + + +### Chores + +* **api:** update docs ([#703](https://github.com/openai/openai-node/issues/703)) ([e1db98b](https://github.com/openai/openai-node/commit/e1db98bef29d200e2e401e3f5d7b2db6839c7836)) +* **docs:** mention install from git repo ([#700](https://github.com/openai/openai-node/issues/700)) ([c081bdb](https://github.com/openai/openai-node/commit/c081bdbb55585e63370496d324dc6f94d86424d1)) +* fix error handler in readme ([#704](https://github.com/openai/openai-node/issues/704)) ([4ff790a](https://github.com/openai/openai-node/commit/4ff790a67cf876191e04ad0e369e447e080b78a7)) +* **internal:** add explicit type annotation to decoder ([#712](https://github.com/openai/openai-node/issues/712)) ([d728e99](https://github.com/openai/openai-node/commit/d728e9923554e4c72c9efa3bd528561400d50ad8)) +* **types:** fix accidental exposure of Buffer type to cloudflare ([#709](https://github.com/openai/openai-node/issues/709)) ([0323ecb](https://github.com/openai/openai-node/commit/0323ecb98ddbd8910fc5719c8bab5175b945d2ab)) + + +### Documentation + +* **contributing:** improve wording ([#696](https://github.com/openai/openai-node/issues/696)) ([940d569](https://github.com/openai/openai-node/commit/940d5695f4cacddbb58e3bfc50fec28c468c7e63)) +* **readme:** fix https proxy example ([#705](https://github.com/openai/openai-node/issues/705)) ([d144789](https://github.com/openai/openai-node/commit/d1447890a556d37928b628f6449bb80de224d207)) +* **readme:** fix typo in custom fetch implementation ([#698](https://github.com/openai/openai-node/issues/698)) ([64041fd](https://github.com/openai/openai-node/commit/64041fd33da569eccae64afe4e50ee803017b20b)) +* remove extraneous --save and yarn install instructions ([#710](https://github.com/openai/openai-node/issues/710)) ([8ec216d](https://github.com/openai/openai-node/commit/8ec216d6b72ee4d67e26786f06c93af18d042117)) +* use [@deprecated](https://github.com/deprecated) decorator for deprecated params ([#711](https://github.com/openai/openai-node/issues/711)) ([4688ef4](https://github.com/openai/openai-node/commit/4688ef4b36e9f383a3abf6cdb31d498163a7bb9e)) + +## 4.28.4 (2024-02-28) + +Full Changelog: [v4.28.3...v4.28.4](https://github.com/openai/openai-node/compare/v4.28.3...v4.28.4) + +### Features + +* **api:** add wav and pcm to response_format ([#691](https://github.com/openai/openai-node/issues/691)) ([b1c6171](https://github.com/openai/openai-node/commit/b1c61711961a62a4d7b47909a68ecd65231a66af)) + + +### Chores + +* **ci:** update actions/setup-node action to v4 ([#685](https://github.com/openai/openai-node/issues/685)) ([f2704d5](https://github.com/openai/openai-node/commit/f2704d5f1580c0f1d31584ef88702cde8f6804d4)) +* **internal:** fix ecosystem tests ([#693](https://github.com/openai/openai-node/issues/693)) ([616624d](https://github.com/openai/openai-node/commit/616624d3d9fd10ce254ce0d435b2b73ed11679f2)) +* **types:** extract run status to a named type ([#686](https://github.com/openai/openai-node/issues/686)) ([b3b3b8e](https://github.com/openai/openai-node/commit/b3b3b8ea20e0f311d3bd53dfd22ccc04f5dce5f7)) +* update @types/react to 18.2.58, @types/react-dom to 18.2.19 ([#688](https://github.com/openai/openai-node/issues/688)) ([2a0d0b1](https://github.com/openai/openai-node/commit/2a0d0b1cb197eef25e42bbba88ee90c37d623f24)) +* update dependency @types/node to v20.11.20 ([#690](https://github.com/openai/openai-node/issues/690)) ([4ca005b](https://github.com/openai/openai-node/commit/4ca005be082d6c50fe95da6148896b62080bfe07)) +* update dependency @types/ws to v8.5.10 ([#683](https://github.com/openai/openai-node/issues/683)) ([a617268](https://github.com/openai/openai-node/commit/a6172683a3390422984ad282ac4940781493e772)) +* update dependency next to v13.5.6 ([#689](https://github.com/openai/openai-node/issues/689)) ([abb3b66](https://github.com/openai/openai-node/commit/abb3b6674b8f9f8ff9c2cc61629a31883ae4d8c8)) + +## 4.28.3 (2024-02-20) + +Full Changelog: [v4.28.2...v4.28.3](https://github.com/openai/openai-node/compare/v4.28.2...v4.28.3) + +### Bug Fixes + +* **ci:** revert "move github release logic to github app" ([#680](https://github.com/openai/openai-node/issues/680)) ([8b4009a](https://github.com/openai/openai-node/commit/8b4009af05a2e0824f99d3cf8cd9063f234ae470)) + +## 4.28.2 (2024-02-19) + +Full Changelog: [v4.28.1...v4.28.2](https://github.com/openai/openai-node/compare/v4.28.1...v4.28.2) + +### Bug Fixes + +* **api:** remove non-GA instance_id param ([#677](https://github.com/openai/openai-node/issues/677)) ([4d0d4da](https://github.com/openai/openai-node/commit/4d0d4daf3bfca0089c5258a136542513e6b372e6)) + +## 4.28.1 (2024-02-19) + +Full Changelog: [v4.28.0...v4.28.1](https://github.com/openai/openai-node/compare/v4.28.0...v4.28.1) + +### Chores + +* **ci:** move github release logic to github app ([#671](https://github.com/openai/openai-node/issues/671)) ([ecca6bc](https://github.com/openai/openai-node/commit/ecca6bc2eea391ee53f1a1d6cac9665199447ae0)) +* **internal:** refactor release environment script ([#674](https://github.com/openai/openai-node/issues/674)) ([27d3770](https://github.com/openai/openai-node/commit/27d37705d17e05c3761ccefcf09c4e2018eb5772)) + +## 4.28.0 (2024-02-13) + +Full Changelog: [v4.27.1...v4.28.0](https://github.com/openai/openai-node/compare/v4.27.1...v4.28.0) + +### Features + +* **api:** updates ([#669](https://github.com/openai/openai-node/issues/669)) ([e1900f9](https://github.com/openai/openai-node/commit/e1900f97ee3f4758d47a7eb4659e30abe3750c99)) + +## 4.27.1 (2024-02-12) + +Full Changelog: [v4.27.0...v4.27.1](https://github.com/openai/openai-node/compare/v4.27.0...v4.27.1) + +## 4.27.0 (2024-02-08) + +Full Changelog: [v4.26.1...v4.27.0](https://github.com/openai/openai-node/compare/v4.26.1...v4.27.0) + +### Features + +* **api:** add `timestamp_granularities`, add `gpt-3.5-turbo-0125` model ([#661](https://github.com/openai/openai-node/issues/661)) ([5016806](https://github.com/openai/openai-node/commit/50168066862f66b529bae29f4564741300303246)) + + +### Chores + +* **internal:** fix retry mechanism for ecosystem-test ([#663](https://github.com/openai/openai-node/issues/663)) ([0eb7ed5](https://github.com/openai/openai-node/commit/0eb7ed5ca3f7c7b29c316fc7d725d834cee73989)) +* respect `application/vnd.api+json` content-type header ([#664](https://github.com/openai/openai-node/issues/664)) ([f4fad54](https://github.com/openai/openai-node/commit/f4fad549c5c366d8dd8b936b7699639b895e82a1)) + +## 4.26.1 (2024-02-05) + +Full Changelog: [v4.26.0...v4.26.1](https://github.com/openai/openai-node/compare/v4.26.0...v4.26.1) + +### Chores + +* **internal:** enable building when git installed ([#657](https://github.com/openai/openai-node/issues/657)) ([8c80a7d](https://github.com/openai/openai-node/commit/8c80a7d6d36155901a19d1f9cd1fec17b89e261e)) +* **internal:** re-order pagination import ([#656](https://github.com/openai/openai-node/issues/656)) ([21ae54e](https://github.com/openai/openai-node/commit/21ae54ea2cc2779e440909782a6ac8b70f88ec1f)) +* **internal:** support pre-release versioning ([#653](https://github.com/openai/openai-node/issues/653)) ([0c3859f](https://github.com/openai/openai-node/commit/0c3859f88164ae3eb6ec8c29e8889a50861cb35b)) +* **test:** add delay between ecosystem tests retry ([#651](https://github.com/openai/openai-node/issues/651)) ([6a4cc5c](https://github.com/openai/openai-node/commit/6a4cc5cea36ae408c8c1eb2ea0ea02f96ffb77b7)) + + +### Documentation + +* add a CONTRIBUTING.md ([#659](https://github.com/openai/openai-node/issues/659)) ([8ea58b0](https://github.com/openai/openai-node/commit/8ea58b0b9e7382a3b3af852a9a3a288a485ad33a)) + +## 4.26.0 (2024-01-25) + +Full Changelog: [v4.25.0...v4.26.0](https://github.com/openai/openai-node/compare/v4.25.0...v4.26.0) + +### Features + +* **api:** add text embeddings dimensions param ([#650](https://github.com/openai/openai-node/issues/650)) ([1b5a977](https://github.com/openai/openai-node/commit/1b5a977d0eef7f5cf97daf27333cbbeb6bb479f3)) + + +### Chores + +* **internal:** add internal helpers & improve build scripts ([#643](https://github.com/openai/openai-node/issues/643)) ([9392f50](https://github.com/openai/openai-node/commit/9392f50e47f26b16632c9eb12187ea7f8a565e09)) +* **internal:** adjust ecosystem-tests logging in CI ([#646](https://github.com/openai/openai-node/issues/646)) ([156084b](https://github.com/openai/openai-node/commit/156084b8734194a5856612378115b948c82ec6e4)) +* **internal:** don't re-export streaming type ([#648](https://github.com/openai/openai-node/issues/648)) ([4c4be94](https://github.com/openai/openai-node/commit/4c4be945fa3f54036183e2d0877060db47ea564b)) +* **internal:** fix binary files ([#645](https://github.com/openai/openai-node/issues/645)) ([e1fbc39](https://github.com/openai/openai-node/commit/e1fbc396f4d1dd8ba980c25ba03b670dfed887a0)) +* **internal:** minor streaming updates ([#647](https://github.com/openai/openai-node/issues/647)) ([2f073e4](https://github.com/openai/openai-node/commit/2f073e4e6c9cd0ff3ad434907da710704765a005)) +* **internal:** pin deno version ([#649](https://github.com/openai/openai-node/issues/649)) ([7e4b903](https://github.com/openai/openai-node/commit/7e4b9039320e4ccbafb45f57dce273bedc9b7cb3)) + +## 4.25.0 (2024-01-21) + +Full Changelog: [v4.24.7...v4.25.0](https://github.com/openai/openai-node/compare/v4.24.7...v4.25.0) + +### Features + +* **api:** add usage to runs and run steps ([#640](https://github.com/openai/openai-node/issues/640)) ([3caa416](https://github.com/openai/openai-node/commit/3caa4166b8abb5bffb4c8be1495834b7f16af32d)) + + +### Bug Fixes + +* allow body type in RequestOptions to be null ([#637](https://github.com/openai/openai-node/issues/637)) ([c4f8a36](https://github.com/openai/openai-node/commit/c4f8a3698dc1d80439131c5097975d6a5db1b4e2)) +* handle system_fingerprint in streaming helpers ([#636](https://github.com/openai/openai-node/issues/636)) ([f273530](https://github.com/openai/openai-node/commit/f273530ac491300842aef463852821a1a27805fb)) +* **types:** accept undefined for optional client options ([#635](https://github.com/openai/openai-node/issues/635)) ([e48cd57](https://github.com/openai/openai-node/commit/e48cd57931cd0e81a77b55653cb1f663111dd733)) + + +### Chores + +* **internal:** debug logging for retries; speculative retry-after-ms support ([#633](https://github.com/openai/openai-node/issues/633)) ([fd64971](https://github.com/openai/openai-node/commit/fd64971612d1d7fcbd8a63885d333485bff68ab1)) +* **internal:** update comment ([#631](https://github.com/openai/openai-node/issues/631)) ([e109d40](https://github.com/openai/openai-node/commit/e109d40a5c02c5bf4586e54d92bf0e355d254c1b)) + +## 4.24.7 (2024-01-13) + +Full Changelog: [v4.24.6...v4.24.7](https://github.com/openai/openai-node/compare/v4.24.6...v4.24.7) + +### Chores + +* **ecosystem-tests:** fix flaky vercel-edge, cloudflare-worker, and deno tests ([#626](https://github.com/openai/openai-node/issues/626)) ([ae412a5](https://github.com/openai/openai-node/commit/ae412a5f12e701e07e71bd9791c55a56858e8383)) +* **ecosystem-tests:** fix typo in deno test ([#628](https://github.com/openai/openai-node/issues/628)) ([048ec94](https://github.com/openai/openai-node/commit/048ec943f8d12acba9829c35ebf0b2d3f24930c8)) + +## 4.24.6 (2024-01-12) + +Full Changelog: [v4.24.5...v4.24.6](https://github.com/openai/openai-node/compare/v4.24.5...v4.24.6) + +### Chores + +* **ecosystem-tests:** fix flaky tests and remove fine tuning calls ([#623](https://github.com/openai/openai-node/issues/623)) ([258d79f](https://github.com/openai/openai-node/commit/258d79f52bb31f4f3723f6f4b97ebe8f3fa187bd)) +* **ecosystem-tests:** fix flaky tests and remove fine tuning calls ([#625](https://github.com/openai/openai-node/issues/625)) ([58e5fd8](https://github.com/openai/openai-node/commit/58e5fd8f27052be6ac9587256b161f4bf3a3805f)) + +## 4.24.5 (2024-01-12) + +Full Changelog: [v4.24.4...v4.24.5](https://github.com/openai/openai-node/compare/v4.24.4...v4.24.5) + +### Refactors + +* **api:** remove deprecated endpoints ([#621](https://github.com/openai/openai-node/issues/621)) ([2054d71](https://github.com/openai/openai-node/commit/2054d71e6b0d407229a4c5aecd75e38c336c2c02)) + +## 4.24.4 (2024-01-11) + +Full Changelog: [v4.24.3...v4.24.4](https://github.com/openai/openai-node/compare/v4.24.3...v4.24.4) + +### Chores + +* **internal:** narrow type into stringifyQuery ([#619](https://github.com/openai/openai-node/issues/619)) ([88fb9cd](https://github.com/openai/openai-node/commit/88fb9cd1bb415850b0b4868944617282d0b92e2a)) + +## 4.24.3 (2024-01-10) + +Full Changelog: [v4.24.2...v4.24.3](https://github.com/openai/openai-node/compare/v4.24.2...v4.24.3) + +### Bug Fixes + +* use default base url if BASE_URL env var is blank ([#615](https://github.com/openai/openai-node/issues/615)) ([a27ad3d](https://github.com/openai/openai-node/commit/a27ad3d4e06f2202daa169668d0e7d89e87a38a7)) + +## 4.24.2 (2024-01-08) + +Full Changelog: [v4.24.1...v4.24.2](https://github.com/openai/openai-node/compare/v4.24.1...v4.24.2) + +### Bug Fixes + +* **headers:** always send lowercase headers and strip undefined (BREAKING in rare cases) ([#608](https://github.com/openai/openai-node/issues/608)) ([4ea159f](https://github.com/openai/openai-node/commit/4ea159f0aa9a1f4c365c74ee726714fe692ddf9f)) + + +### Chores + +* add .keep files for examples and custom code directories ([#612](https://github.com/openai/openai-node/issues/612)) ([5e0f733](https://github.com/openai/openai-node/commit/5e0f733d3cd3c8e6d41659141168cd0708e017a3)) +* **internal:** bump license ([#605](https://github.com/openai/openai-node/issues/605)) ([045ee74](https://github.com/openai/openai-node/commit/045ee74fd3ffba9e6d1301fe1ffd8bd3c63720a2)) +* **internal:** improve type signatures ([#609](https://github.com/openai/openai-node/issues/609)) ([e1ccc82](https://github.com/openai/openai-node/commit/e1ccc82e4991262a631dcffa4d09bdc553e50fbb)) + + +### Documentation + +* fix docstring typos ([#600](https://github.com/openai/openai-node/issues/600)) ([1934fa1](https://github.com/openai/openai-node/commit/1934fa15f654ea89e226457f76febe6015616f6c)) +* improve audio example to show how to stream to a file ([#598](https://github.com/openai/openai-node/issues/598)) ([e950ad9](https://github.com/openai/openai-node/commit/e950ad969e845d608ed71bd3e3095cd6c941d93d)) + +## 4.24.1 (2023-12-22) + +Full Changelog: [v4.24.0...v4.24.1](https://github.com/openai/openai-node/compare/v4.24.0...v4.24.1) + +### Bug Fixes + +* **pagination:** correct type annotation object field ([#590](https://github.com/openai/openai-node/issues/590)) ([4066eda](https://github.com/openai/openai-node/commit/4066edad4b5305e82e610f44f4720843f2b69d39)) + + +### Documentation + +* **messages:** improvements to helpers reference + typos ([#595](https://github.com/openai/openai-node/issues/595)) ([96a59b9](https://github.com/openai/openai-node/commit/96a59b91c424db67b8a5bdb7cab5da68c57282d4)) +* reformat README.md ([#592](https://github.com/openai/openai-node/issues/592)) ([8ffc7f8](https://github.com/openai/openai-node/commit/8ffc7f876cc8f4b7afaf68a37f94f826ef22a6b8)) + + +### Refactors + +* write jest config in typescript ([#588](https://github.com/openai/openai-node/issues/588)) ([eb6ceeb](https://github.com/openai/openai-node/commit/eb6ceebf90ba45ec5b803f32b9b080829f6a973a)) + +## 4.24.0 (2023-12-19) + +Full Changelog: [v4.23.0...v4.24.0](https://github.com/openai/openai-node/compare/v4.23.0...v4.24.0) + +### Features + +* **api:** add additional instructions for runs ([#586](https://github.com/openai/openai-node/issues/586)) ([401d93e](https://github.com/openai/openai-node/commit/401d93ea39fe0e90088799858299322035c0a7e8)) + + +### Chores + +* **deps:** update dependency start-server-and-test to v2.0.3 ([#580](https://github.com/openai/openai-node/issues/580)) ([8e1aca1](https://github.com/openai/openai-node/commit/8e1aca1f8be6e583483919ed9ef9b04fab076066)) +* **deps:** update dependency ts-jest to v29.1.1 ([#578](https://github.com/openai/openai-node/issues/578)) ([a6edb7b](https://github.com/openai/openai-node/commit/a6edb7bc3cfc447d0c55ae23cc1c2219105d3666)) +* **deps:** update jest ([#582](https://github.com/openai/openai-node/issues/582)) ([e49e471](https://github.com/openai/openai-node/commit/e49e471ec7a136f2cbaf82551ccaaea366c87a91)) +* **internal:** bump deps ([#583](https://github.com/openai/openai-node/issues/583)) ([2e07b4c](https://github.com/openai/openai-node/commit/2e07b4c66ab1fdbb353fdd00994e293f93e981db)) +* **internal:** update deps ([#581](https://github.com/openai/openai-node/issues/581)) ([7b690dc](https://github.com/openai/openai-node/commit/7b690dca67ee8c3b0a89caf7f786ede5dc612a76)) + + +### Documentation + +* upgrade models in examples to latest version ([#585](https://github.com/openai/openai-node/issues/585)) ([60101a4](https://github.com/openai/openai-node/commit/60101a4117b1a8223d09fb9fe21d89af32431939)) + +## 4.23.0 (2023-12-17) + +Full Changelog: [v4.22.1...v4.23.0](https://github.com/openai/openai-node/compare/v4.22.1...v4.23.0) + +### Features + +* **api:** add token logprobs to chat completions ([#576](https://github.com/openai/openai-node/issues/576)) ([8d4292e](https://github.com/openai/openai-node/commit/8d4292e6358920b2c9d8df49c6a154231c468512)) + + +### Chores + +* **ci:** run release workflow once per day ([#574](https://github.com/openai/openai-node/issues/574)) ([529f09f](https://github.com/openai/openai-node/commit/529f09f827a675d6e851590acff4e6f4f2af2d26)) + +## 4.22.1 (2023-12-15) + +Full Changelog: [v4.22.0...v4.22.1](https://github.com/openai/openai-node/compare/v4.22.0...v4.22.1) + +### Chores + +* update dependencies ([#572](https://github.com/openai/openai-node/issues/572)) ([a51e620](https://github.com/openai/openai-node/commit/a51e62065224a516b17dd850ae564f5436d8db52)) + + +### Documentation + +* replace runFunctions with runTools in readme ([#570](https://github.com/openai/openai-node/issues/570)) ([c3b9ad5](https://github.com/openai/openai-node/commit/c3b9ad58e5f74d3339889aeb1d758c8c18f54de7)) + +## 4.22.0 (2023-12-15) + +Full Changelog: [v4.21.0...v4.22.0](https://github.com/openai/openai-node/compare/v4.21.0...v4.22.0) + +### Features + +* **api:** add optional `name` argument + improve docs ([#569](https://github.com/openai/openai-node/issues/569)) ([3b68ace](https://github.com/openai/openai-node/commit/3b68ace533976aedbf642d9b018d0de8d9a8bb88)) + + +### Chores + +* update prettier ([#567](https://github.com/openai/openai-node/issues/567)) ([83dec2a](https://github.com/openai/openai-node/commit/83dec2af62c481d7de16d8a3644aa239ded9e30c)) + +## 4.21.0 (2023-12-11) + +Full Changelog: [v4.20.1...v4.21.0](https://github.com/openai/openai-node/compare/v4.20.1...v4.21.0) + +### Features + +* **client:** support reading the base url from an env variable ([#547](https://github.com/openai/openai-node/issues/547)) ([06fb68d](https://github.com/openai/openai-node/commit/06fb68de1ff80983e349b6715d1037e2072c8dd4)) + + +### Bug Fixes + +* correct some runTools behavior and deprecate runFunctions ([#562](https://github.com/openai/openai-node/issues/562)) ([f5cdd0f](https://github.com/openai/openai-node/commit/f5cdd0f704d3d075cdfc5bc2df1f7a8bae5cd9f1)) +* prevent 400 when using runTools/runFunctions with Azure OpenAI API ([#544](https://github.com/openai/openai-node/issues/544)) ([735d9b8](https://github.com/openai/openai-node/commit/735d9b86acdc067e1ee6ebe1ea50de2955431050)) + + +### Documentation + +* **readme:** update example snippets ([#546](https://github.com/openai/openai-node/issues/546)) ([566d290](https://github.com/openai/openai-node/commit/566d290006920f536788bb77f4d24a6906e2971f)) + + +### Build System + +* specify `packageManager: yarn` ([#561](https://github.com/openai/openai-node/issues/561)) ([935b898](https://github.com/openai/openai-node/commit/935b8983c74f7b03b67d22f4d194989838f963f3)) + +## 4.20.1 (2023-11-24) + +Full Changelog: [v4.20.0...v4.20.1](https://github.com/openai/openai-node/compare/v4.20.0...v4.20.1) + +### Chores + +* **internal:** remove file import and conditionally run prepare ([#533](https://github.com/openai/openai-node/issues/533)) ([48cb729](https://github.com/openai/openai-node/commit/48cb729bfc484ce3d04273be417b307a0d20644f)) + + +### Documentation + +* **readme:** fix typo and add examples link ([#529](https://github.com/openai/openai-node/issues/529)) ([cf959b1](https://github.com/openai/openai-node/commit/cf959b17db0a4f8dd7eb59add333c4a461b02459)) + +## 4.20.0 (2023-11-22) + +Full Changelog: [v4.19.1...v4.20.0](https://github.com/openai/openai-node/compare/v4.19.1...v4.20.0) + +### Features + +* allow installing package directly from github ([#522](https://github.com/openai/openai-node/issues/522)) ([51926d7](https://github.com/openai/openai-node/commit/51926d7a0092744e49de39f4988feddf313adafa)) + + +### Chores + +* **internal:** don't call prepare in dist ([#525](https://github.com/openai/openai-node/issues/525)) ([d09411e](https://github.com/openai/openai-node/commit/d09411ebaa28d6610e1b880d03339d520b4a1833)) + +## 4.19.1 (2023-11-20) + +Full Changelog: [v4.19.0...v4.19.1](https://github.com/openai/openai-node/compare/v4.19.0...v4.19.1) + +## 4.19.0 (2023-11-15) + +Full Changelog: [v4.18.0...v4.19.0](https://github.com/openai/openai-node/compare/v4.18.0...v4.19.0) + +### Features + +* **api:** updates ([#501](https://github.com/openai/openai-node/issues/501)) ([944d58e](https://github.com/openai/openai-node/commit/944d58e5fc46f1a0671aaa2b809d28e67edf6023)) + +## 4.18.0 (2023-11-14) + +Full Changelog: [v4.17.5...v4.18.0](https://github.com/openai/openai-node/compare/v4.17.5...v4.18.0) + +### Features + +* **api:** add gpt-3.5-turbo-1106 ([#496](https://github.com/openai/openai-node/issues/496)) ([45f7672](https://github.com/openai/openai-node/commit/45f7672ccf4856ac309b08c6c96f0e73ab48b525)) + +## 4.17.5 (2023-11-13) + +Full Changelog: [v4.17.4...v4.17.5](https://github.com/openai/openai-node/compare/v4.17.4...v4.17.5) + +### Chores + +* fix typo in docs and add request header for function calls ([#494](https://github.com/openai/openai-node/issues/494)) ([22ce244](https://github.com/openai/openai-node/commit/22ce2443a77f10988b3215bd81ba17d4eda4b10e)) + +## 4.17.4 (2023-11-10) + +Full Changelog: [v4.17.3...v4.17.4](https://github.com/openai/openai-node/compare/v4.17.3...v4.17.4) + +### Chores + +* **internal:** update jest config ([#482](https://github.com/openai/openai-node/issues/482)) ([3013e8c](https://github.com/openai/openai-node/commit/3013e8c73a61a397a418ca75b996f0a7dd03a744)) + +## 4.17.3 (2023-11-09) + +Full Changelog: [v4.17.2...v4.17.3](https://github.com/openai/openai-node/compare/v4.17.2...v4.17.3) + +## 4.17.2 (2023-11-09) + +Full Changelog: [v4.17.1...v4.17.2](https://github.com/openai/openai-node/compare/v4.17.1...v4.17.2) + +### Chores + +* **internal:** bump deno version number ([#478](https://github.com/openai/openai-node/issues/478)) ([69913f3](https://github.com/openai/openai-node/commit/69913f3a4b0123394029759375445dae7b4f15ab)) + +## 4.17.1 (2023-11-09) + +Full Changelog: [v4.17.0...v4.17.1](https://github.com/openai/openai-node/compare/v4.17.0...v4.17.1) + +### Refactors + +* **client:** deprecate files.retrieveContent in favour of files.content ([#474](https://github.com/openai/openai-node/issues/474)) ([7c7bfc2](https://github.com/openai/openai-node/commit/7c7bfc2fad5a786c9172110e90c9566a943e49f9)) + +## 4.17.0 (2023-11-08) + +Full Changelog: [v4.16.2...v4.17.0](https://github.com/openai/openai-node/compare/v4.16.2...v4.17.0) + +### Features + +* **api:** unify function types ([#467](https://github.com/openai/openai-node/issues/467)) ([d51cd94](https://github.com/openai/openai-node/commit/d51cd94b3103219789447e2e9afc4762ae672e5a)) + + +### Refactors + +* **api:** rename FunctionObject to FunctionDefinition ([#470](https://github.com/openai/openai-node/issues/470)) ([f3990c7](https://github.com/openai/openai-node/commit/f3990c779e596309b62f41d7a1253d8629aca3bf)) + +## 4.16.2 (2023-11-08) + +Full Changelog: [v4.16.1...v4.16.2](https://github.com/openai/openai-node/compare/v4.16.1...v4.16.2) + +### Bug Fixes + +* **api:** accidentally required params, add new models & other fixes ([#463](https://github.com/openai/openai-node/issues/463)) ([1cb403e](https://github.com/openai/openai-node/commit/1cb403e4ccde61bb1613d362f6bdbca8b1681e00)) +* **api:** update embedding response object type ([#466](https://github.com/openai/openai-node/issues/466)) ([53b7e25](https://github.com/openai/openai-node/commit/53b7e2539cca0b272be96136c123d2b33745e7f6)) +* asssitant_deleted -> assistant_deleted ([#452](https://github.com/openai/openai-node/issues/452)) ([ef89bd7](https://github.com/openai/openai-node/commit/ef89bd74d85c833bf7de500eecd1b092a0ad3f37)) +* **types:** ensure all code paths return a value ([#458](https://github.com/openai/openai-node/issues/458)) ([19402c3](https://github.com/openai/openai-node/commit/19402c365572a99cbee58bcd34a9942e741269bf)) + + +### Chores + +* **docs:** fix github links ([#457](https://github.com/openai/openai-node/issues/457)) ([6b9b94e](https://github.com/openai/openai-node/commit/6b9b94e4e123349a908b708cd574ff107f40a8e1)) +* **internal:** fix typo in comment ([#456](https://github.com/openai/openai-node/issues/456)) ([fe24342](https://github.com/openai/openai-node/commit/fe2434284a91d424510873a18079b8870469c672)) + + +### Documentation + +* update deno deploy link to include v ([#441](https://github.com/openai/openai-node/issues/441)) ([47b13aa](https://github.com/openai/openai-node/commit/47b13aaa6fac86fffabee1f752ee6d2efc3def9b)) + +## 4.16.1 (2023-11-06) + +Full Changelog: [v4.16.0...v4.16.1](https://github.com/openai/openai-node/compare/v4.16.0...v4.16.1) + +### Bug Fixes + +* **api:** retreival -> retrieval ([#437](https://github.com/openai/openai-node/issues/437)) ([b4bd3ee](https://github.com/openai/openai-node/commit/b4bd3eefdd3903abcc57c431382cc2124d39307b)) + + +### Documentation + +* **api:** improve docstrings ([#435](https://github.com/openai/openai-node/issues/435)) ([ee8b24c](https://github.com/openai/openai-node/commit/ee8b24c70a5ccb944e02ff2201668d6bc2b597b3)) + +## 4.16.0 (2023-11-06) + +Full Changelog: [v4.15.4...v4.16.0](https://github.com/openai/openai-node/compare/v4.15.4...v4.16.0) + +### Features + +* **api:** releases from DevDay; assistants, multimodality, tools, dall-e-3, tts, and more ([#433](https://github.com/openai/openai-node/issues/433)) ([fb92f5e](https://github.com/openai/openai-node/commit/fb92f5e6e3b6e7969b3d91f4ccdaef87e5fea0a4)) + + +### Bug Fixes + +* improve deno readme ([#429](https://github.com/openai/openai-node/issues/429)) ([871ceac](https://github.com/openai/openai-node/commit/871ceac2b37f53f7fc7c0163454115c709cd7ced)) + + +### Documentation + +* deno version ([#432](https://github.com/openai/openai-node/issues/432)) ([74bf336](https://github.com/openai/openai-node/commit/74bf3364379fd23252fde01401c44b2fa796cba4)) +* update deno link in more places ([#431](https://github.com/openai/openai-node/issues/431)) ([5da63d4](https://github.com/openai/openai-node/commit/5da63d4a9143c0ab493b742f7fde22b01a372844)) + +## 4.15.4 (2023-11-05) + +Full Changelog: [v4.15.3...v4.15.4](https://github.com/openai/openai-node/compare/v4.15.3...v4.15.4) + +### Documentation + +* **readme:** remove redundant whitespace ([#427](https://github.com/openai/openai-node/issues/427)) ([aa3a178](https://github.com/openai/openai-node/commit/aa3a1782914a4a285263e4d070bca73e72ed47ec)) + +## 4.15.3 (2023-11-04) + +Full Changelog: [v4.15.2...v4.15.3](https://github.com/openai/openai-node/compare/v4.15.2...v4.15.3) + +### Bug Fixes + +* improve deno releases ([#425](https://github.com/openai/openai-node/issues/425)) ([19469f2](https://github.com/openai/openai-node/commit/19469f266ff69a4e549402188d9f6ad87f5a7778)) + +## 4.15.2 (2023-11-04) + +Full Changelog: [v4.15.1...v4.15.2](https://github.com/openai/openai-node/compare/v4.15.1...v4.15.2) + +### Documentation + +* fix deno.land import ([#423](https://github.com/openai/openai-node/issues/423)) ([e5415a2](https://github.com/openai/openai-node/commit/e5415a29ab447ced8535fafda7928b0a6748c8d1)) + +## 4.15.1 (2023-11-04) + +Full Changelog: [v4.15.0...v4.15.1](https://github.com/openai/openai-node/compare/v4.15.0...v4.15.1) + +### Documentation + +* document customizing fetch ([#420](https://github.com/openai/openai-node/issues/420)) ([1ca982f](https://github.com/openai/openai-node/commit/1ca982f192daf49e33b7acb5505ed26c9d891255)) + +## 4.15.0 (2023-11-03) + +Full Changelog: [v4.14.2...v4.15.0](https://github.com/openai/openai-node/compare/v4.14.2...v4.15.0) + +### Features + +* **beta:** add streaming and function calling helpers ([#409](https://github.com/openai/openai-node/issues/409)) ([510c1f3](https://github.com/openai/openai-node/commit/510c1f325ee55197b4c2f434475128c265500746)) +* **client:** allow binary returns ([#416](https://github.com/openai/openai-node/issues/416)) ([02f7ad7](https://github.com/openai/openai-node/commit/02f7ad7f736751e0e7687e6744bae464d4e40b79)) +* **github:** include a devcontainer setup ([#413](https://github.com/openai/openai-node/issues/413)) ([fb2996f](https://github.com/openai/openai-node/commit/fb2996f0d291210878145aacf9b952f8133d9414)) +* streaming improvements ([#411](https://github.com/openai/openai-node/issues/411)) ([37b622c](https://github.com/openai/openai-node/commit/37b622c79ddbd6c286b730e740403c82b542e796)) + +## 4.14.2 (2023-10-30) + +Full Changelog: [v4.14.1...v4.14.2](https://github.com/openai/openai-node/compare/v4.14.1...v4.14.2) + +### Chores + +* **docs:** update deno link ([#407](https://github.com/openai/openai-node/issues/407)) ([0328882](https://github.com/openai/openai-node/commit/0328882cccb3e5386283ffa5eb9cd8ad9442f3a0)) + +## 4.14.1 (2023-10-27) + +Full Changelog: [v4.14.0...v4.14.1](https://github.com/openai/openai-node/compare/v4.14.0...v4.14.1) + +### Bug Fixes + +* deploy deno in a github workflow instead of postpublish step ([#405](https://github.com/openai/openai-node/issues/405)) ([3a6dba0](https://github.com/openai/openai-node/commit/3a6dba074258274bffcfe3a4260ca1b95bcd6bdc)) +* typo in build script ([#403](https://github.com/openai/openai-node/issues/403)) ([76c5c96](https://github.com/openai/openai-node/commit/76c5c96a359f750f58ea38b5d32365db7e34409a)) + + +### Chores + +* **internal:** update gitignore ([#406](https://github.com/openai/openai-node/issues/406)) ([986b0bb](https://github.com/openai/openai-node/commit/986b0bbac9f5ca43a0df6f29f2a468dd4223e053)) + +## 4.14.0 (2023-10-25) + +Full Changelog: [v4.13.0...v4.14.0](https://github.com/openai/openai-node/compare/v4.13.0...v4.14.0) + +### Features + +* **client:** adjust retry behavior to be exponential backoff ([#400](https://github.com/openai/openai-node/issues/400)) ([2bc14ce](https://github.com/openai/openai-node/commit/2bc14ce300ef020bc045199fe3d76dd352d78ef9)) + + +### Chores + +* **docs:** update deno version ([#399](https://github.com/openai/openai-node/issues/399)) ([cdee077](https://github.com/openai/openai-node/commit/cdee0770690d4b66b357d970827e9ba1597ffb89)) + +## 4.13.0 (2023-10-22) + +Full Changelog: [v4.12.4...v4.13.0](https://github.com/openai/openai-node/compare/v4.12.4...v4.13.0) + +### Features + +* **api:** add embeddings encoding_format ([#390](https://github.com/openai/openai-node/issues/390)) ([cf70dea](https://github.com/openai/openai-node/commit/cf70deaba1426786aba9b938d280c61aeb516e34)) +* handle 204 No Content gracefully ([#391](https://github.com/openai/openai-node/issues/391)) ([2dd005c](https://github.com/openai/openai-node/commit/2dd005c1c497605036d3524f19d130b3fc5f8d8b)) + +## 4.12.4 (2023-10-17) + +Full Changelog: [v4.12.3...v4.12.4](https://github.com/openai/openai-node/compare/v4.12.3...v4.12.4) + +### Bug Fixes + +* import web-streams-polyfill without overriding globals ([#385](https://github.com/openai/openai-node/issues/385)) ([be8e18b](https://github.com/openai/openai-node/commit/be8e18ba4c6a16e7b6413c77246f83230e0b8fc2)) + +## 4.12.3 (2023-10-16) + +Full Changelog: [v4.12.2...v4.12.3](https://github.com/openai/openai-node/compare/v4.12.2...v4.12.3) + +### Documentation + +* organisation -> organization (UK to US English) ([#382](https://github.com/openai/openai-node/issues/382)) ([516f0ad](https://github.com/openai/openai-node/commit/516f0ade1ec1fd8fc4c78999ee0f656cc2b5ae58)) + +## 4.12.2 (2023-10-16) + +Full Changelog: [v4.12.1...v4.12.2](https://github.com/openai/openai-node/compare/v4.12.1...v4.12.2) + +### Bug Fixes + +* **client:** correctly handle errors during streaming ([#377](https://github.com/openai/openai-node/issues/377)) ([09233b1](https://github.com/openai/openai-node/commit/09233b1ccc80ee900be19050f438cc8aa9dbb513)) +* **client:** correctly handle errors during streaming ([#379](https://github.com/openai/openai-node/issues/379)) ([9ced580](https://github.com/openai/openai-node/commit/9ced5804777a5857d6775a49ddf30ed9cc016fab)) +* improve status code in error messages ([#381](https://github.com/openai/openai-node/issues/381)) ([68dfb17](https://github.com/openai/openai-node/commit/68dfb17cce300ade8d29afc854d616833b3283ca)) + + +### Chores + +* add case insensitive get header function ([#373](https://github.com/openai/openai-node/issues/373)) ([b088998](https://github.com/openai/openai-node/commit/b088998ae610de54bb8700eefd6b664eb9a2fcc3)) +* **internal:** add debug logs for stream responses ([#380](https://github.com/openai/openai-node/issues/380)) ([689db0b](https://github.com/openai/openai-node/commit/689db0b8058527ae5c3af5e457c962d8a6635297)) +* show deprecation notice on re-export ([#368](https://github.com/openai/openai-node/issues/368)) ([b176703](https://github.com/openai/openai-node/commit/b176703102998f0e9d8ca2ed93ccd495fd10a6ee)) +* update comment ([#376](https://github.com/openai/openai-node/issues/376)) ([a06c685](https://github.com/openai/openai-node/commit/a06c6850bfdd756dc8f07dd1f70218be610faa30)) +* update comment ([#378](https://github.com/openai/openai-node/issues/378)) ([b04031d](https://github.com/openai/openai-node/commit/b04031d19210a66f82c7d233a50f7bc427a1bf92)) + + +### Refactors + +* **streaming:** change Stream constructor signature ([#370](https://github.com/openai/openai-node/issues/370)) ([71984ed](https://github.com/openai/openai-node/commit/71984edc3141ba99ffa1327bab6a182b4452209f)) +* **test:** refactor authentication tests ([#371](https://github.com/openai/openai-node/issues/371)) ([e0d459f](https://github.com/openai/openai-node/commit/e0d459f958451a99e15a11a0e5ea6471abbe1ac1)) + +## 4.12.1 (2023-10-11) + +Full Changelog: [v4.12.0...v4.12.1](https://github.com/openai/openai-node/compare/v4.12.0...v4.12.1) + +### Bug Fixes + +* fix namespace exports regression ([#366](https://github.com/openai/openai-node/issues/366)) ([b2b1d85](https://github.com/openai/openai-node/commit/b2b1d85d90eef51e689ca75c0ca2f35bb63cccc0)) + +## 4.12.0 (2023-10-11) + +Full Changelog: [v4.11.1...v4.12.0](https://github.com/openai/openai-node/compare/v4.11.1...v4.12.0) + +### Features + +* **api:** remove `content_filter` stop_reason and update documentation ([#352](https://github.com/openai/openai-node/issues/352)) ([a4b401e](https://github.com/openai/openai-node/commit/a4b401e91a0b3fbf55aedfb6ed6d93396377bf27)) +* re-export chat completion types at the top level, and work around webpack limitations ([#365](https://github.com/openai/openai-node/issues/365)) ([bb815d0](https://github.com/openai/openai-node/commit/bb815d0373ae33f58329e34e8983f5b3881db22d)) + + +### Bug Fixes + +* prevent ReferenceError, update compatibility to ES2020 and Node 18+ ([#356](https://github.com/openai/openai-node/issues/356)) ([fc71a4b](https://github.com/openai/openai-node/commit/fc71a4b6b73208ff3e8f0c8792a9a03e3790d26b)) + + +### Chores + +* **internal:** minor formatting improvement ([#354](https://github.com/openai/openai-node/issues/354)) ([3799863](https://github.com/openai/openai-node/commit/3799863da4ff2a27940ef0b7e57360c72e44d986)) + +## 4.11.1 (2023-10-03) + +Full Changelog: [v4.11.0...v4.11.1](https://github.com/openai/openai-node/compare/v4.11.0...v4.11.1) + +## 4.11.0 (2023-09-29) + +Full Changelog: [v4.10.0...v4.11.0](https://github.com/openai/openai-node/compare/v4.10.0...v4.11.0) + +### Features + +* **client:** handle retry-after with a date ([#340](https://github.com/openai/openai-node/issues/340)) ([b6dd384](https://github.com/openai/openai-node/commit/b6dd38488ea7cc4c22495f16d027b7ffdb87da53)) +* **package:** export a root error type ([#338](https://github.com/openai/openai-node/issues/338)) ([462bcda](https://github.com/openai/openai-node/commit/462bcda7140611afa20bc25de4aec6d4b205b37d)) + + +### Bug Fixes + +* **api:** add content_filter to chat completion finish reason ([#344](https://github.com/openai/openai-node/issues/344)) ([f10c757](https://github.com/openai/openai-node/commit/f10c757d831d90407ba47b4659d9cd34b1a35b1d)) + + +### Chores + +* **internal:** bump lock file ([#334](https://github.com/openai/openai-node/issues/334)) ([fd2337b](https://github.com/openai/openai-node/commit/fd2337b018ab2f31bcea8f9feda0ddaf755390c7)) +* **internal:** update lock file ([#339](https://github.com/openai/openai-node/issues/339)) ([1bf84b6](https://github.com/openai/openai-node/commit/1bf84b672c386f8ca46bb8fc120eb8d8d48b3a82)) +* **internal:** update lock file ([#342](https://github.com/openai/openai-node/issues/342)) ([0001f06](https://github.com/openai/openai-node/commit/0001f062728b0e2047d2bf03b9d947a4be0c7206)) +* **internal:** update lock file ([#343](https://github.com/openai/openai-node/issues/343)) ([a02ac8e](https://github.com/openai/openai-node/commit/a02ac8e7f881551527a3cbcadad53b7e424650e8)) + +## 4.10.0 (2023-09-21) + +Full Changelog: [v4.9.1...v4.10.0](https://github.com/openai/openai-node/compare/v4.9.1...v4.10.0) + +### Features + +* **api:** add 'gpt-3.5-turbo-instruct', fine-tune error objects, update documentation ([#329](https://github.com/openai/openai-node/issues/329)) ([e5f3852](https://github.com/openai/openai-node/commit/e5f385233737002b4bb47a94cba33da7fedfe64d)) + +## 4.10.0 (2023-09-21) + +Full Changelog: [v4.9.1...v4.10.0](https://github.com/openai/openai-node/compare/v4.9.1...v4.10.0) + +### Features + +* **api:** add 'gpt-3.5-turbo-instruct', fine-tune error objects, update documentation ([#329](https://github.com/openai/openai-node/issues/329)) ([e5f3852](https://github.com/openai/openai-node/commit/e5f385233737002b4bb47a94cba33da7fedfe64d)) + +## 4.9.1 (2023-09-21) + +Full Changelog: [v4.9.0...v4.9.1](https://github.com/openai/openai-node/compare/v4.9.0...v4.9.1) + +### Documentation + +* **README:** fix variable names in some examples ([#327](https://github.com/openai/openai-node/issues/327)) ([5e05b31](https://github.com/openai/openai-node/commit/5e05b31c132545ce166cea92c5f3e4410fd40711)) + +## 4.9.0 (2023-09-20) + +Full Changelog: [v4.8.0...v4.9.0](https://github.com/openai/openai-node/compare/v4.8.0...v4.9.0) + +### Features + +* **client:** support importing node or web shims manually ([#325](https://github.com/openai/openai-node/issues/325)) ([628f293](https://github.com/openai/openai-node/commit/628f2935a8791625685f68f73db8f3759b8f4f91)) + +## 4.8.0 (2023-09-15) + +Full Changelog: [v4.7.1...v4.8.0](https://github.com/openai/openai-node/compare/v4.7.1...v4.8.0) + +### Features + +* **errors:** add status code to error message ([#315](https://github.com/openai/openai-node/issues/315)) ([9341219](https://github.com/openai/openai-node/commit/93412197c67cb3fb203f35e3ae0a7c3fb173453e)) + +## 4.7.1 (2023-09-15) + +Full Changelog: [v4.7.0...v4.7.1](https://github.com/openai/openai-node/compare/v4.7.0...v4.7.1) + +### Documentation + +* declare Bun 1.0 officially supported ([#314](https://github.com/openai/openai-node/issues/314)) ([a16e268](https://github.com/openai/openai-node/commit/a16e26863390235cb43e2fe0e569298a4f84c32f)) + +## 4.7.0 (2023-09-14) + +Full Changelog: [v4.6.0...v4.7.0](https://github.com/openai/openai-node/compare/v4.6.0...v4.7.0) + +### Features + +* **client:** retry on 408 Request Timeout ([#310](https://github.com/openai/openai-node/issues/310)) ([1f98eac](https://github.com/openai/openai-node/commit/1f98eac5be956e56d75ef5456115165b45a4763c)) +* make docs urls in comments absolute ([#306](https://github.com/openai/openai-node/issues/306)) ([9db3819](https://github.com/openai/openai-node/commit/9db381961e38d2280b0602447e7d91691b327bde)) + +## 4.6.0 (2023-09-08) + +Full Changelog: [v4.5.0...v4.6.0](https://github.com/openai/openai-node/compare/v4.5.0...v4.6.0) + +### Features + +* **types:** extract ChatCompletionRole enum to its own type ([#298](https://github.com/openai/openai-node/issues/298)) ([5893e37](https://github.com/openai/openai-node/commit/5893e37406ff85331c85a3baa519ca3051a28e00)) + + +### Bug Fixes + +* fix module not found errors in Vercel edge ([#300](https://github.com/openai/openai-node/issues/300)) ([47c79fe](https://github.com/openai/openai-node/commit/47c79fee0fa715ad04410e73530829602736d85f)) + +## 4.5.0 (2023-09-06) + +Full Changelog: [v4.4.0...v4.5.0](https://github.com/openai/openai-node/compare/v4.4.0...v4.5.0) + +### Features + +* **client:** add files.waitForProcessing() method ([#292](https://github.com/openai/openai-node/issues/292)) ([ef59010](https://github.com/openai/openai-node/commit/ef59010cab0c666fa8a437ec6e27800789aa8705)) +* fixes tests where an array has to have unique enum values ([#290](https://github.com/openai/openai-node/issues/290)) ([a10b895](https://github.com/openai/openai-node/commit/a10b8956b3eaae7cdcb90329a8386a41219ca021)) +* make docs more readable by eliminating unnecessary escape sequences ([#287](https://github.com/openai/openai-node/issues/287)) ([a068043](https://github.com/openai/openai-node/commit/a06804314d4815d420c97f6f965c926ea70d56df)) + + +### Bug Fixes + +* **client:** fix TS errors that appear when users Go to Source in VSCode ([#281](https://github.com/openai/openai-node/issues/281)) ([8dc59bc](https://github.com/openai/openai-node/commit/8dc59bcf924cc991747ca475c714d915e04c6012)), closes [#249](https://github.com/openai/openai-node/issues/249) +* **client:** handle case where the client is instantiated with a undefined baseURL ([#285](https://github.com/openai/openai-node/issues/285)) ([5095cf3](https://github.com/openai/openai-node/commit/5095cf340743e4627b4f0ad2f055ebe332824d23)) +* **client:** use explicit file extensions in _shims imports ([#276](https://github.com/openai/openai-node/issues/276)) ([16fe929](https://github.com/openai/openai-node/commit/16fe929688d35c2ebe52c8cf1c1570bafda5f97e)) + + +### Documentation + +* **api:** update docstrings ([#286](https://github.com/openai/openai-node/issues/286)) ([664e953](https://github.com/openai/openai-node/commit/664e9532c8acfbf981e9a788ab40c111ebe2fda0)) +* **readme:** add link to api.md ([#291](https://github.com/openai/openai-node/issues/291)) ([0d1cce2](https://github.com/openai/openai-node/commit/0d1cce26cdc6567c10c8d72bbc72a788ffb8f2be)) + +## 4.4.0 (2023-09-01) + +Full Changelog: [v4.3.1...v4.4.0](https://github.com/openai/openai-node/compare/v4.3.1...v4.4.0) + +### Features + +* **package:** add Bun export map ([#269](https://github.com/openai/openai-node/issues/269)) ([16f239c](https://github.com/openai/openai-node/commit/16f239c6b4e8526371b01c511d2e0ebba4c5c8c6)) +* re-export chat completion types at the top level ([#268](https://github.com/openai/openai-node/issues/268)) ([1a71a39](https://github.com/openai/openai-node/commit/1a71a39421828fdde7b8605094363a5047d2fdc9)) +* **tests:** unskip multipart form data tests ([#275](https://github.com/openai/openai-node/issues/275)) ([47d3e18](https://github.com/openai/openai-node/commit/47d3e18a3ee987d04b958dad1a51821ad5472d54)) +* **types:** fix ambiguous auto-import for chat completions params ([#266](https://github.com/openai/openai-node/issues/266)) ([19c99fb](https://github.com/openai/openai-node/commit/19c99fb268d6d6c7fc7aaa66475c35f45d12b4bd)) + + +### Bug Fixes + +* revert import change which triggered circular import bug in webpack ([#274](https://github.com/openai/openai-node/issues/274)) ([6534e36](https://github.com/openai/openai-node/commit/6534e3620d7e2983e98b42cf95fa966deab1ab1d)) + +## 4.3.1 (2023-08-29) + +Full Changelog: [v4.3.0...v4.3.1](https://github.com/openai/openai-node/compare/v4.3.0...v4.3.1) + +### Bug Fixes + +* **types:** improve getNextPage() return type ([#262](https://github.com/openai/openai-node/issues/262)) ([245a984](https://github.com/openai/openai-node/commit/245a9847d1ba5bbe5262bc06b2f7bb7385cd3a9a)) + + +### Chores + +* **ci:** setup workflows to create releases and release PRs ([#259](https://github.com/openai/openai-node/issues/259)) ([290908c](https://github.com/openai/openai-node/commit/290908ce24dc6c31df18b2eb7808d5b495387454)) + +## [4.3.0](https://github.com/openai/openai-node/compare/v4.2.0...v4.3.0) (2023-08-27) + + +### Features + +* **client:** add auto-pagination to fine tuning list endpoints ([#254](https://github.com/openai/openai-node/issues/254)) ([5f89c5e](https://github.com/openai/openai-node/commit/5f89c5e6b9088cc2e86405a32b60cae91c078ce1)) +* **cli:** rewrite in JS for better compatibility ([#244](https://github.com/openai/openai-node/issues/244)) ([d8d7c05](https://github.com/openai/openai-node/commit/d8d7c0592bfad89669cd2f174e6207370cd7d3fb)) + + +### Bug Fixes + +* **stream:** declare Stream.controller as public ([#252](https://github.com/openai/openai-node/issues/252)) ([81e5de7](https://github.com/openai/openai-node/commit/81e5de7ba94c992cafa3d08e2697c8122382497a)) + + +### Documentation + +* **readme:** mention Azure support ([#253](https://github.com/openai/openai-node/issues/253)) ([294727a](https://github.com/openai/openai-node/commit/294727ad3543d91ef59df285ce1616c442d369db)) + + +### Chores + +* **internal:** add helper method ([#255](https://github.com/openai/openai-node/issues/255)) ([6d8cff0](https://github.com/openai/openai-node/commit/6d8cff00164c0f65ed40b941486f2e0d752feb1e)) + +## [4.2.0](https://github.com/openai/openai-node/compare/v4.1.0...v4.2.0) (2023-08-23) + + +### Features + +* **types:** export RequestOptions type ([#240](https://github.com/openai/openai-node/issues/240)) ([ecf3bce](https://github.com/openai/openai-node/commit/ecf3bcee3c64a80a3cd901aa32d3db78d1364645)) + + +### Chores + +* **internal:** export HeadersInit type shim ([#241](https://github.com/openai/openai-node/issues/241)) ([cf9f672](https://github.com/openai/openai-node/commit/cf9f6729b5b232a37841c33db33b2519b54f19b2)) diff --git a/node_modules/openai/LICENSE b/node_modules/openai/LICENSE new file mode 100644 index 0000000..621a6be --- /dev/null +++ b/node_modules/openai/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2024 OpenAI + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/openai/README.md b/node_modules/openai/README.md new file mode 100644 index 0000000..b03bcd8 --- /dev/null +++ b/node_modules/openai/README.md @@ -0,0 +1,673 @@ +# OpenAI TypeScript and JavaScript API Library + +[![NPM version](https://img.shields.io/npm/v/openai.svg)](https://npmjs.org/package/openai) ![npm bundle size](https://img.shields.io/bundlephobia/minzip/openai) [![JSR Version](https://jsr.io/badges/@openai/openai)](https://jsr.io/@openai/openai) + +This library provides convenient access to the OpenAI REST API from TypeScript or JavaScript. + +It is generated from our [OpenAPI specification](https://github.com/openai/openai-openapi) with [Stainless](https://stainlessapi.com/). + +To learn how to use the OpenAI API, check out our [API Reference](https://platform.openai.com/docs/api-reference) and [Documentation](https://platform.openai.com/docs). + +## Installation + +```sh +npm install openai +``` + +### Installation from JSR + +```sh +deno add jsr:@openai/openai +npx jsr add @openai/openai +``` + +These commands will make the module importable from the `@openai/openai` scope: + +You can also [import directly from JSR](https://jsr.io/docs/using-packages#importing-with-jsr-specifiers) without an install step if you're using the Deno JavaScript runtime: + +```ts +import OpenAI from 'jsr:@openai/openai'; +``` + +## Usage + +The full API of this library can be found in [api.md file](api.md) along with many [code examples](https://github.com/openai/openai-node/tree/master/examples). The code below shows how to get started using the chat completions API. + + +```js +import OpenAI from 'openai'; + +const client = new OpenAI({ + apiKey: process.env['OPENAI_API_KEY'], // This is the default and can be omitted +}); + +async function main() { + const chatCompletion = await client.chat.completions.create({ + messages: [{ role: 'user', content: 'Say this is a test' }], + model: 'gpt-4o', + }); +} + +main(); +``` + +## Streaming responses + +We provide support for streaming responses using Server Sent Events (SSE). + +```ts +import OpenAI from 'openai'; + +const client = new OpenAI(); + +async function main() { + const stream = await client.chat.completions.create({ + model: 'gpt-4o', + messages: [{ role: 'user', content: 'Say this is a test' }], + stream: true, + }); + for await (const chunk of stream) { + process.stdout.write(chunk.choices[0]?.delta?.content || ''); + } +} + +main(); +``` + +If you need to cancel a stream, you can `break` from the loop +or call `stream.controller.abort()`. + +### Request & Response types + +This library includes TypeScript definitions for all request params and response fields. You may import and use them like so: + + +```ts +import OpenAI from 'openai'; + +const client = new OpenAI({ + apiKey: process.env['OPENAI_API_KEY'], // This is the default and can be omitted +}); + +async function main() { + const params: OpenAI.Chat.ChatCompletionCreateParams = { + messages: [{ role: 'user', content: 'Say this is a test' }], + model: 'gpt-4o', + }; + const chatCompletion: OpenAI.Chat.ChatCompletion = await client.chat.completions.create(params); +} + +main(); +``` + +Documentation for each method, request param, and response field are available in docstrings and will appear on hover in most modern editors. + +> [!IMPORTANT] +> Previous versions of this SDK used a `Configuration` class. See the [v3 to v4 migration guide](https://github.com/openai/openai-node/discussions/217). + +### Polling Helpers + +When interacting with the API some actions such as starting a Run and adding files to vector stores are asynchronous and take time to complete. The SDK includes +helper functions which will poll the status until it reaches a terminal state and then return the resulting object. +If an API method results in an action which could benefit from polling there will be a corresponding version of the +method ending in 'AndPoll'. + +For instance to create a Run and poll until it reaches a terminal state you can run: + +```ts +const run = await openai.beta.threads.runs.createAndPoll(thread.id, { + assistant_id: assistantId, +}); +``` + +More information on the lifecycle of a Run can be found in the [Run Lifecycle Documentation](https://platform.openai.com/docs/assistants/deep-dive/run-lifecycle) + +### Bulk Upload Helpers + +When creating and interacting with vector stores, you can use the polling helpers to monitor the status of operations. +For convenience, we also provide a bulk upload helper to allow you to simultaneously upload several files at once. + +```ts +const fileList = [ + createReadStream('/home/data/example.pdf'), + ... +]; + +const batch = await openai.vectorStores.fileBatches.uploadAndPoll(vectorStore.id, {files: fileList}); +``` + +### Streaming Helpers + +The SDK also includes helpers to process streams and handle the incoming events. + +```ts +const run = openai.beta.threads.runs + .stream(thread.id, { + assistant_id: assistant.id, + }) + .on('textCreated', (text) => process.stdout.write('\nassistant > ')) + .on('textDelta', (textDelta, snapshot) => process.stdout.write(textDelta.value)) + .on('toolCallCreated', (toolCall) => process.stdout.write(`\nassistant > ${toolCall.type}\n\n`)) + .on('toolCallDelta', (toolCallDelta, snapshot) => { + if (toolCallDelta.type === 'code_interpreter') { + if (toolCallDelta.code_interpreter.input) { + process.stdout.write(toolCallDelta.code_interpreter.input); + } + if (toolCallDelta.code_interpreter.outputs) { + process.stdout.write('\noutput >\n'); + toolCallDelta.code_interpreter.outputs.forEach((output) => { + if (output.type === 'logs') { + process.stdout.write(`\n${output.logs}\n`); + } + }); + } + } + }); +``` + +More information on streaming helpers can be found in the dedicated documentation: [helpers.md](helpers.md) + +### Streaming responses + +This library provides several conveniences for streaming chat completions, for example: + +```ts +import OpenAI from 'openai'; + +const openai = new OpenAI(); + +async function main() { + const stream = await openai.beta.chat.completions.stream({ + model: 'gpt-4o', + messages: [{ role: 'user', content: 'Say this is a test' }], + stream: true, + }); + + stream.on('content', (delta, snapshot) => { + process.stdout.write(delta); + }); + + // or, equivalently: + for await (const chunk of stream) { + process.stdout.write(chunk.choices[0]?.delta?.content || ''); + } + + const chatCompletion = await stream.finalChatCompletion(); + console.log(chatCompletion); // {id: "…", choices: […], …} +} + +main(); +``` + +Streaming with `openai.beta.chat.completions.stream({…})` exposes +[various helpers for your convenience](helpers.md#chat-events) including event handlers and promises. + +Alternatively, you can use `openai.chat.completions.create({ stream: true, … })` +which only returns an async iterable of the chunks in the stream and thus uses less memory +(it does not build up a final chat completion object for you). + +If you need to cancel a stream, you can `break` from a `for await` loop or call `stream.abort()`. + +### Automated function calls + +We provide the `openai.beta.chat.completions.runTools({…})` +convenience helper for using function tool calls with the `/chat/completions` endpoint +which automatically call the JavaScript functions you provide +and sends their results back to the `/chat/completions` endpoint, +looping as long as the model requests tool calls. + +If you pass a `parse` function, it will automatically parse the `arguments` for you +and returns any parsing errors to the model to attempt auto-recovery. +Otherwise, the args will be passed to the function you provide as a string. + +If you pass `tool_choice: {function: {name: …}}` instead of `auto`, +it returns immediately after calling that function (and only loops to auto-recover parsing errors). + +```ts +import OpenAI from 'openai'; + +const client = new OpenAI(); + +async function main() { + const runner = client.beta.chat.completions + .runTools({ + model: 'gpt-4o', + messages: [{ role: 'user', content: 'How is the weather this week?' }], + tools: [ + { + type: 'function', + function: { + function: getCurrentLocation, + parameters: { type: 'object', properties: {} }, + }, + }, + { + type: 'function', + function: { + function: getWeather, + parse: JSON.parse, // or use a validation library like zod for typesafe parsing. + parameters: { + type: 'object', + properties: { + location: { type: 'string' }, + }, + }, + }, + }, + ], + }) + .on('message', (message) => console.log(message)); + + const finalContent = await runner.finalContent(); + console.log(); + console.log('Final content:', finalContent); +} + +async function getCurrentLocation() { + return 'Boston'; // Simulate lookup +} + +async function getWeather(args: { location: string }) { + const { location } = args; + // … do lookup … + return { temperature, precipitation }; +} + +main(); + +// {role: "user", content: "How's the weather this week?"} +// {role: "assistant", tool_calls: [{type: "function", function: {name: "getCurrentLocation", arguments: "{}"}, id: "123"} +// {role: "tool", name: "getCurrentLocation", content: "Boston", tool_call_id: "123"} +// {role: "assistant", tool_calls: [{type: "function", function: {name: "getWeather", arguments: '{"location": "Boston"}'}, id: "1234"}]} +// {role: "tool", name: "getWeather", content: '{"temperature": "50degF", "preciptation": "high"}', tool_call_id: "1234"} +// {role: "assistant", content: "It's looking cold and rainy - you might want to wear a jacket!"} +// +// Final content: "It's looking cold and rainy - you might want to wear a jacket!" +``` + +Like with `.stream()`, we provide a variety of [helpers and events](helpers.md#chat-events). + +Note that `runFunctions` was previously available as well, but has been deprecated in favor of `runTools`. + +Read more about various examples such as with integrating with [zod](helpers.md#integrate-with-zod), +[next.js](helpers.md#integrate-with-nextjs), and [proxying a stream to the browser](helpers.md#proxy-streaming-to-a-browser). + +## File uploads + +Request parameters that correspond to file uploads can be passed in many different forms: + +- `File` (or an object with the same structure) +- a `fetch` `Response` (or an object with the same structure) +- an `fs.ReadStream` +- the return value of our `toFile` helper + +```ts +import fs from 'fs'; +import fetch from 'node-fetch'; +import OpenAI, { toFile } from 'openai'; + +const client = new OpenAI(); + +// If you have access to Node `fs` we recommend using `fs.createReadStream()`: +await client.files.create({ file: fs.createReadStream('input.jsonl'), purpose: 'fine-tune' }); + +// Or if you have the web `File` API you can pass a `File` instance: +await client.files.create({ file: new File(['my bytes'], 'input.jsonl'), purpose: 'fine-tune' }); + +// You can also pass a `fetch` `Response`: +await client.files.create({ file: await fetch('https://somesite/input.jsonl'), purpose: 'fine-tune' }); + +// Finally, if none of the above are convenient, you can use our `toFile` helper: +await client.files.create({ + file: await toFile(Buffer.from('my bytes'), 'input.jsonl'), + purpose: 'fine-tune', +}); +await client.files.create({ + file: await toFile(new Uint8Array([0, 1, 2]), 'input.jsonl'), + purpose: 'fine-tune', +}); +``` + +## Handling errors + +When the library is unable to connect to the API, +or if the API returns a non-success status code (i.e., 4xx or 5xx response), +a subclass of `APIError` will be thrown: + + +```ts +async function main() { + const job = await client.fineTuning.jobs + .create({ model: 'gpt-4o', training_file: 'file-abc123' }) + .catch(async (err) => { + if (err instanceof OpenAI.APIError) { + console.log(err.status); // 400 + console.log(err.name); // BadRequestError + console.log(err.headers); // {server: 'nginx', ...} + } else { + throw err; + } + }); +} + +main(); +``` + +Error codes are as followed: + +| Status Code | Error Type | +| ----------- | -------------------------- | +| 400 | `BadRequestError` | +| 401 | `AuthenticationError` | +| 403 | `PermissionDeniedError` | +| 404 | `NotFoundError` | +| 422 | `UnprocessableEntityError` | +| 429 | `RateLimitError` | +| >=500 | `InternalServerError` | +| N/A | `APIConnectionError` | + +## Request IDs + +> For more information on debugging requests, see [these docs](https://platform.openai.com/docs/api-reference/debugging-requests) + +All object responses in the SDK provide a `_request_id` property which is added from the `x-request-id` response header so that you can quickly log failing requests and report them back to OpenAI. + +```ts +const completion = await client.chat.completions.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-4o' }); +console.log(completion._request_id) // req_123 +``` + +You can also access the Request ID using the `.withResponse()` method: + +```ts +const { data: stream, request_id } = await openai.chat.completions + .create({ + model: 'gpt-4', + messages: [{ role: 'user', content: 'Say this is a test' }], + stream: true, + }) + .withResponse(); +``` + +## Microsoft Azure OpenAI + +To use this library with [Azure OpenAI](https://learn.microsoft.com/azure/ai-services/openai/overview), use the `AzureOpenAI` +class instead of the `OpenAI` class. + +> [!IMPORTANT] +> The Azure API shape slightly differs from the core API shape which means that the static types for responses / params +> won't always be correct. + +```ts +import { AzureOpenAI } from 'openai'; +import { getBearerTokenProvider, DefaultAzureCredential } from '@azure/identity'; + +const credential = new DefaultAzureCredential(); +const scope = 'https://cognitiveservices.azure.com/.default'; +const azureADTokenProvider = getBearerTokenProvider(credential, scope); + +const openai = new AzureOpenAI({ azureADTokenProvider }); + +const result = await openai.chat.completions.create({ + model: 'gpt-4o', + messages: [{ role: 'user', content: 'Say hello!' }], +}); + +console.log(result.choices[0]!.message?.content); +``` + +### Retries + +Certain errors will be automatically retried 2 times by default, with a short exponential backoff. +Connection errors (for example, due to a network connectivity problem), 408 Request Timeout, 409 Conflict, +429 Rate Limit, and >=500 Internal errors will all be retried by default. + +You can use the `maxRetries` option to configure or disable this: + + +```js +// Configure the default for all requests: +const client = new OpenAI({ + maxRetries: 0, // default is 2 +}); + +// Or, configure per-request: +await client.chat.completions.create({ messages: [{ role: 'user', content: 'How can I get the name of the current day in JavaScript?' }], model: 'gpt-4o' }, { + maxRetries: 5, +}); +``` + +### Timeouts + +Requests time out after 10 minutes by default. You can configure this with a `timeout` option: + + +```ts +// Configure the default for all requests: +const client = new OpenAI({ + timeout: 20 * 1000, // 20 seconds (default is 10 minutes) +}); + +// Override per-request: +await client.chat.completions.create({ messages: [{ role: 'user', content: 'How can I list all files in a directory using Python?' }], model: 'gpt-4o' }, { + timeout: 5 * 1000, +}); +``` + +On timeout, an `APIConnectionTimeoutError` is thrown. + +Note that requests which time out will be [retried twice by default](#retries). + +## Auto-pagination + +List methods in the OpenAI API are paginated. +You can use the `for await … of` syntax to iterate through items across all pages: + +```ts +async function fetchAllFineTuningJobs(params) { + const allFineTuningJobs = []; + // Automatically fetches more pages as needed. + for await (const fineTuningJob of client.fineTuning.jobs.list({ limit: 20 })) { + allFineTuningJobs.push(fineTuningJob); + } + return allFineTuningJobs; +} +``` + +Alternatively, you can request a single page at a time: + +```ts +let page = await client.fineTuning.jobs.list({ limit: 20 }); +for (const fineTuningJob of page.data) { + console.log(fineTuningJob); +} + +// Convenience methods are provided for manually paginating: +while (page.hasNextPage()) { + page = await page.getNextPage(); + // ... +} +``` + +## Advanced Usage + +### Accessing raw Response data (e.g., headers) + +The "raw" `Response` returned by `fetch()` can be accessed through the `.asResponse()` method on the `APIPromise` type that all methods return. + +You can also use the `.withResponse()` method to get the raw `Response` along with the parsed data. + + +```ts +const client = new OpenAI(); + +const response = await client.chat.completions + .create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-4o' }) + .asResponse(); +console.log(response.headers.get('X-My-Header')); +console.log(response.statusText); // access the underlying Response object + +const { data: chatCompletion, response: raw } = await client.chat.completions + .create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-4o' }) + .withResponse(); +console.log(raw.headers.get('X-My-Header')); +console.log(chatCompletion); +``` + +### Making custom/undocumented requests + +This library is typed for convenient access to the documented API. If you need to access undocumented +endpoints, params, or response properties, the library can still be used. + +#### Undocumented endpoints + +To make requests to undocumented endpoints, you can use `client.get`, `client.post`, and other HTTP verbs. +Options on the client, such as retries, will be respected when making these requests. + +```ts +await client.post('/some/path', { + body: { some_prop: 'foo' }, + query: { some_query_arg: 'bar' }, +}); +``` + +#### Undocumented request params + +To make requests using undocumented parameters, you may use `// @ts-expect-error` on the undocumented +parameter. This library doesn't validate at runtime that the request matches the type, so any extra values you +send will be sent as-is. + +```ts +client.foo.create({ + foo: 'my_param', + bar: 12, + // @ts-expect-error baz is not yet public + baz: 'undocumented option', +}); +``` + +For requests with the `GET` verb, any extra params will be in the query, all other requests will send the +extra param in the body. + +If you want to explicitly send an extra argument, you can do so with the `query`, `body`, and `headers` request +options. + +#### Undocumented response properties + +To access undocumented response properties, you may access the response object with `// @ts-expect-error` on +the response object, or cast the response object to the requisite type. Like the request params, we do not +validate or strip extra properties from the response from the API. + +### Customizing the fetch client + +By default, this library uses `node-fetch` in Node, and expects a global `fetch` function in other environments. + +If you would prefer to use a global, web-standards-compliant `fetch` function even in a Node environment, +(for example, if you are running Node with `--experimental-fetch` or using NextJS which polyfills with `undici`), +add the following import before your first import `from "OpenAI"`: + +```ts +// Tell TypeScript and the package to use the global web fetch instead of node-fetch. +// Note, despite the name, this does not add any polyfills, but expects them to be provided if needed. +import 'openai/shims/web'; +import OpenAI from 'openai'; +``` + +To do the inverse, add `import "openai/shims/node"` (which does import polyfills). +This can also be useful if you are getting the wrong TypeScript types for `Response` ([more details](https://github.com/openai/openai-node/tree/master/src/_shims#readme)). + +### Logging and middleware + +You may also provide a custom `fetch` function when instantiating the client, +which can be used to inspect or alter the `Request` or `Response` before/after each request: + +```ts +import { fetch } from 'undici'; // as one example +import OpenAI from 'openai'; + +const client = new OpenAI({ + fetch: async (url: RequestInfo, init?: RequestInit): Promise => { + console.log('About to make a request', url, init); + const response = await fetch(url, init); + console.log('Got response', response); + return response; + }, +}); +``` + +Note that if given a `DEBUG=true` environment variable, this library will log all requests and responses automatically. +This is intended for debugging purposes only and may change in the future without notice. + +### Configuring an HTTP(S) Agent (e.g., for proxies) + +By default, this library uses a stable agent for all http/https requests to reuse TCP connections, eliminating many TCP & TLS handshakes and shaving around 100ms off most requests. + +If you would like to disable or customize this behavior, for example to use the API behind a proxy, you can pass an `httpAgent` which is used for all requests (be they http or https), for example: + + +```ts +import http from 'http'; +import { HttpsProxyAgent } from 'https-proxy-agent'; + +// Configure the default for all requests: +const client = new OpenAI({ + httpAgent: new HttpsProxyAgent(process.env.PROXY_URL), +}); + +// Override per-request: +await client.models.list({ + httpAgent: new http.Agent({ keepAlive: false }), +}); +``` + +## Semantic versioning + +This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: + +1. Changes that only affect static types, without breaking runtime behavior. +2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals)_. +3. Changes that we do not expect to impact the vast majority of users in practice. + +We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. + +We are keen for your feedback; please open an [issue](https://www.github.com/openai/openai-node/issues) with questions, bugs, or suggestions. + +## Requirements + +TypeScript >= 4.5 is supported. + +The following runtimes are supported: + +- Node.js 18 LTS or later ([non-EOL](https://endoflife.date/nodejs)) versions. +- Deno v1.28.0 or higher. +- Bun 1.0 or later. +- Cloudflare Workers. +- Vercel Edge Runtime. +- Jest 28 or greater with the `"node"` environment (`"jsdom"` is not supported at this time). +- Nitro v2.6 or greater. +- Web browsers: disabled by default to avoid exposing your secret API credentials. Enable browser support by explicitly setting `dangerouslyAllowBrowser` to true'. +
+ More explanation + + ### Why is this dangerous? + + Enabling the `dangerouslyAllowBrowser` option can be dangerous because it exposes your secret API credentials in the client-side code. Web browsers are inherently less secure than server environments, + any user with access to the browser can potentially inspect, extract, and misuse these credentials. This could lead to unauthorized access using your credentials and potentially compromise sensitive data or functionality. + + ### When might this not be dangerous? + + In certain scenarios where enabling browser support might not pose significant risks: + + - Internal Tools: If the application is used solely within a controlled internal environment where the users are trusted, the risk of credential exposure can be mitigated. + - Public APIs with Limited Scope: If your API has very limited scope and the exposed credentials do not grant access to sensitive data or critical operations, the potential impact of exposure is reduced. + - Development or debugging purpose: Enabling this feature temporarily might be acceptable, provided the credentials are short-lived, aren't also used in production environments, or are frequently rotated. + +
+ +Note that React Native is not supported at this time. + +If you are interested in other runtime environments, please open or upvote an issue on GitHub. + +## Contributing + +See [the contributing documentation](./CONTRIBUTING.md). diff --git a/node_modules/openai/_shims/MultipartBody.d.ts b/node_modules/openai/_shims/MultipartBody.d.ts new file mode 100644 index 0000000..d9118c8 --- /dev/null +++ b/node_modules/openai/_shims/MultipartBody.d.ts @@ -0,0 +1,9 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export declare class MultipartBody { + body: any; + constructor(body: any); + get [Symbol.toStringTag](): string; +} +//# sourceMappingURL=MultipartBody.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_shims/MultipartBody.d.ts.map b/node_modules/openai/_shims/MultipartBody.d.ts.map new file mode 100644 index 0000000..fc6a8b6 --- /dev/null +++ b/node_modules/openai/_shims/MultipartBody.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"MultipartBody.d.ts","sourceRoot":"","sources":["../src/_shims/MultipartBody.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,qBAAa,aAAa;IACL,IAAI,EAAE,GAAG;gBAAT,IAAI,EAAE,GAAG;IAC5B,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,MAAM,CAEjC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/_shims/MultipartBody.js b/node_modules/openai/_shims/MultipartBody.js new file mode 100644 index 0000000..67efb46 --- /dev/null +++ b/node_modules/openai/_shims/MultipartBody.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MultipartBody = void 0; +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +class MultipartBody { + constructor(body) { + this.body = body; + } + get [Symbol.toStringTag]() { + return 'MultipartBody'; + } +} +exports.MultipartBody = MultipartBody; +//# sourceMappingURL=MultipartBody.js.map \ No newline at end of file diff --git a/node_modules/openai/_shims/MultipartBody.js.map b/node_modules/openai/_shims/MultipartBody.js.map new file mode 100644 index 0000000..adad49f --- /dev/null +++ b/node_modules/openai/_shims/MultipartBody.js.map @@ -0,0 +1 @@ +{"version":3,"file":"MultipartBody.js","sourceRoot":"","sources":["../src/_shims/MultipartBody.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,MAAa,aAAa;IACxB,YAAmB,IAAS;QAAT,SAAI,GAAJ,IAAI,CAAK;IAAG,CAAC;IAChC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC;QACtB,OAAO,eAAe,CAAC;IACzB,CAAC;CACF;AALD,sCAKC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/MultipartBody.mjs b/node_modules/openai/_shims/MultipartBody.mjs new file mode 100644 index 0000000..68a8280 --- /dev/null +++ b/node_modules/openai/_shims/MultipartBody.mjs @@ -0,0 +1,12 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export class MultipartBody { + constructor(body) { + this.body = body; + } + get [Symbol.toStringTag]() { + return 'MultipartBody'; + } +} +//# sourceMappingURL=MultipartBody.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_shims/MultipartBody.mjs.map b/node_modules/openai/_shims/MultipartBody.mjs.map new file mode 100644 index 0000000..6aca413 --- /dev/null +++ b/node_modules/openai/_shims/MultipartBody.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"MultipartBody.mjs","sourceRoot":"","sources":["../src/_shims/MultipartBody.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,OAAO,aAAa;IACxB,YAAmB,IAAS;QAAT,SAAI,GAAJ,IAAI,CAAK;IAAG,CAAC;IAChC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC;QACtB,OAAO,eAAe,CAAC;IACzB,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/_shims/README.md b/node_modules/openai/_shims/README.md new file mode 100644 index 0000000..b3a349b --- /dev/null +++ b/node_modules/openai/_shims/README.md @@ -0,0 +1,46 @@ +# 👋 Wondering what everything in here does? + +`openai` supports a wide variety of runtime environments like Node.js, Deno, Bun, browsers, and various +edge runtimes, as well as both CommonJS (CJS) and EcmaScript Modules (ESM). + +To do this, `openai` provides shims for either using `node-fetch` when in Node (because `fetch` is still experimental there) or the global `fetch` API built into the environment when not in Node. + +It uses [conditional exports](https://nodejs.org/api/packages.html#conditional-exports) to +automatically select the correct shims for each environment. However, conditional exports are a fairly new +feature and not supported everywhere. For instance, the TypeScript `"moduleResolution": "node"` + +setting doesn't consult the `exports` map, compared to `"moduleResolution": "nodeNext"`, which does. +Unfortunately that's still the default setting, and it can result in errors like +getting the wrong raw `Response` type from `.asResponse()`, for example. + +The user can work around these issues by manually importing one of: + +- `import 'openai/shims/node'` +- `import 'openai/shims/web'` + +All of the code here in `_shims` handles selecting the automatic default shims or manual overrides. + +### How it works - Runtime + +Runtime shims get installed by calling `setShims` exported by `openai/_shims/registry`. + +Manually importing `openai/shims/node` or `openai/shims/web`, calls `setShims` with the respective runtime shims. + +All client code imports shims from `openai/_shims/index`, which: + +- checks if shims have been set manually +- if not, calls `setShims` with the shims from `openai/_shims/auto/runtime` +- re-exports the installed shims from `openai/_shims/registry`. + +`openai/_shims/auto/runtime` exports web runtime shims. +If the `node` export condition is set, the export map replaces it with `openai/_shims/auto/runtime-node`. + +### How it works - Type time + +All client code imports shim types from `openai/_shims/index`, which selects the manual types from `openai/_shims/manual-types` if they have been declared, otherwise it exports the auto types from `openai/_shims/auto/types`. + +`openai/_shims/manual-types` exports an empty namespace. +Manually importing `openai/shims/node` or `openai/shims/web` merges declarations into this empty namespace, so they get picked up by `openai/_shims/index`. + +`openai/_shims/auto/types` exports web type definitions. +If the `node` export condition is set, the export map replaces it with `openai/_shims/auto/types-node`, though TS only picks this up if `"moduleResolution": "nodenext"` or `"moduleResolution": "bundler"`. diff --git a/node_modules/openai/_shims/auto/runtime-bun.d.ts b/node_modules/openai/_shims/auto/runtime-bun.d.ts new file mode 100644 index 0000000..1baecb8 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-bun.d.ts @@ -0,0 +1,5 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export * from "../bun-runtime.js"; +//# sourceMappingURL=runtime-bun.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-bun.d.ts.map b/node_modules/openai/_shims/auto/runtime-bun.d.ts.map new file mode 100644 index 0000000..8d57eee --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-bun.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"runtime-bun.d.ts","sourceRoot":"","sources":["../../src/_shims/auto/runtime-bun.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,cAAc,gBAAgB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-bun.js b/node_modules/openai/_shims/auto/runtime-bun.js new file mode 100644 index 0000000..312c535 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-bun.js @@ -0,0 +1,21 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +__exportStar(require("../bun-runtime.js"), exports); +//# sourceMappingURL=runtime-bun.js.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-bun.js.map b/node_modules/openai/_shims/auto/runtime-bun.js.map new file mode 100644 index 0000000..c78786e --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-bun.js.map @@ -0,0 +1 @@ +{"version":3,"file":"runtime-bun.js","sourceRoot":"","sources":["../../src/_shims/auto/runtime-bun.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA;;GAEG;AACH,oDAA+B"} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-bun.mjs b/node_modules/openai/_shims/auto/runtime-bun.mjs new file mode 100644 index 0000000..3197fae --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-bun.mjs @@ -0,0 +1,2 @@ +export * from "../bun-runtime.mjs"; +//# sourceMappingURL=runtime-bun.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-bun.mjs.map b/node_modules/openai/_shims/auto/runtime-bun.mjs.map new file mode 100644 index 0000000..32011c5 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-bun.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"runtime-bun.mjs","sourceRoot":"","sources":["../../src/_shims/auto/runtime-bun.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-node.d.ts b/node_modules/openai/_shims/auto/runtime-node.d.ts new file mode 100644 index 0000000..9db3401 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-node.d.ts @@ -0,0 +1,5 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export * from "../node-runtime.js"; +//# sourceMappingURL=runtime-node.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-node.d.ts.map b/node_modules/openai/_shims/auto/runtime-node.d.ts.map new file mode 100644 index 0000000..63a03d6 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-node.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"runtime-node.d.ts","sourceRoot":"","sources":["../../src/_shims/auto/runtime-node.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,cAAc,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-node.js b/node_modules/openai/_shims/auto/runtime-node.js new file mode 100644 index 0000000..318ae7c --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-node.js @@ -0,0 +1,21 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +__exportStar(require("../node-runtime.js"), exports); +//# sourceMappingURL=runtime-node.js.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-node.js.map b/node_modules/openai/_shims/auto/runtime-node.js.map new file mode 100644 index 0000000..4d43fab --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-node.js.map @@ -0,0 +1 @@ +{"version":3,"file":"runtime-node.js","sourceRoot":"","sources":["../../src/_shims/auto/runtime-node.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA;;GAEG;AACH,qDAAgC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-node.mjs b/node_modules/openai/_shims/auto/runtime-node.mjs new file mode 100644 index 0000000..f846bd3 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-node.mjs @@ -0,0 +1,2 @@ +export * from "../node-runtime.mjs"; +//# sourceMappingURL=runtime-node.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime-node.mjs.map b/node_modules/openai/_shims/auto/runtime-node.mjs.map new file mode 100644 index 0000000..aa25d93 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime-node.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"runtime-node.mjs","sourceRoot":"","sources":["../../src/_shims/auto/runtime-node.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime.d.ts b/node_modules/openai/_shims/auto/runtime.d.ts new file mode 100644 index 0000000..ad73696 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime.d.ts @@ -0,0 +1,5 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export * from "../web-runtime.js"; +//# sourceMappingURL=runtime.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime.d.ts.map b/node_modules/openai/_shims/auto/runtime.d.ts.map new file mode 100644 index 0000000..256985a --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"runtime.d.ts","sourceRoot":"","sources":["../../src/_shims/auto/runtime.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,cAAc,gBAAgB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime.js b/node_modules/openai/_shims/auto/runtime.js new file mode 100644 index 0000000..2559391 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime.js @@ -0,0 +1,21 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +__exportStar(require("../web-runtime.js"), exports); +//# sourceMappingURL=runtime.js.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime.js.map b/node_modules/openai/_shims/auto/runtime.js.map new file mode 100644 index 0000000..3cbe401 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime.js.map @@ -0,0 +1 @@ +{"version":3,"file":"runtime.js","sourceRoot":"","sources":["../../src/_shims/auto/runtime.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA;;GAEG;AACH,oDAA+B"} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime.mjs b/node_modules/openai/_shims/auto/runtime.mjs new file mode 100644 index 0000000..c16bf9e --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime.mjs @@ -0,0 +1,2 @@ +export * from "../web-runtime.mjs"; +//# sourceMappingURL=runtime.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/runtime.mjs.map b/node_modules/openai/_shims/auto/runtime.mjs.map new file mode 100644 index 0000000..5a61fc6 --- /dev/null +++ b/node_modules/openai/_shims/auto/runtime.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"runtime.mjs","sourceRoot":"","sources":["../../src/_shims/auto/runtime.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/types-node.d.ts b/node_modules/openai/_shims/auto/types-node.d.ts new file mode 100644 index 0000000..66e5296 --- /dev/null +++ b/node_modules/openai/_shims/auto/types-node.d.ts @@ -0,0 +1,5 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export * from "../node-types.js"; +//# sourceMappingURL=types-node.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/types-node.d.ts.map b/node_modules/openai/_shims/auto/types-node.d.ts.map new file mode 100644 index 0000000..bc0e15d --- /dev/null +++ b/node_modules/openai/_shims/auto/types-node.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"types-node.d.ts","sourceRoot":"","sources":["../../src/_shims/auto/types-node.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,cAAc,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/types-node.js b/node_modules/openai/_shims/auto/types-node.js new file mode 100644 index 0000000..b7577b3 --- /dev/null +++ b/node_modules/openai/_shims/auto/types-node.js @@ -0,0 +1,21 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +__exportStar(require("../node-types.js"), exports); +//# sourceMappingURL=types-node.js.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/types-node.js.map b/node_modules/openai/_shims/auto/types-node.js.map new file mode 100644 index 0000000..553fe3e --- /dev/null +++ b/node_modules/openai/_shims/auto/types-node.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types-node.js","sourceRoot":"","sources":["../../src/_shims/auto/types-node.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA;;GAEG;AACH,mDAA8B"} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/types-node.mjs b/node_modules/openai/_shims/auto/types-node.mjs new file mode 100644 index 0000000..10c37f2 --- /dev/null +++ b/node_modules/openai/_shims/auto/types-node.mjs @@ -0,0 +1,2 @@ +export * from "../node-types.mjs"; +//# sourceMappingURL=types-node.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/types-node.mjs.map b/node_modules/openai/_shims/auto/types-node.mjs.map new file mode 100644 index 0000000..27e85a2 --- /dev/null +++ b/node_modules/openai/_shims/auto/types-node.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"types-node.mjs","sourceRoot":"","sources":["../../src/_shims/auto/types-node.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/openai/_shims/auto/types.d.ts b/node_modules/openai/_shims/auto/types.d.ts new file mode 100644 index 0000000..d775507 --- /dev/null +++ b/node_modules/openai/_shims/auto/types.d.ts @@ -0,0 +1,101 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export type Agent = any; + +// @ts-ignore +declare const _fetch: unknown extends typeof fetch ? never : typeof fetch; +export { _fetch as fetch }; + +// @ts-ignore +type _Request = unknown extends Request ? never : Request; +export { _Request as Request }; + +// @ts-ignore +type _RequestInfo = unknown extends RequestInfo ? never : RequestInfo; +export { type _RequestInfo as RequestInfo }; + +// @ts-ignore +type _RequestInit = unknown extends RequestInit ? never : RequestInit; +export { type _RequestInit as RequestInit }; + +// @ts-ignore +type _Response = unknown extends Response ? never : Response; +export { _Response as Response }; + +// @ts-ignore +type _ResponseInit = unknown extends ResponseInit ? never : ResponseInit; +export { type _ResponseInit as ResponseInit }; + +// @ts-ignore +type _ResponseType = unknown extends ResponseType ? never : ResponseType; +export { type _ResponseType as ResponseType }; + +// @ts-ignore +type _BodyInit = unknown extends BodyInit ? never : BodyInit; +export { type _BodyInit as BodyInit }; + +// @ts-ignore +type _Headers = unknown extends Headers ? never : Headers; +export { _Headers as Headers }; + +// @ts-ignore +type _HeadersInit = unknown extends HeadersInit ? never : HeadersInit; +export { type _HeadersInit as HeadersInit }; + +type EndingType = 'native' | 'transparent'; + +export interface BlobPropertyBag { + endings?: EndingType; + type?: string; +} + +export interface FilePropertyBag extends BlobPropertyBag { + lastModified?: number; +} + +export type FileFromPathOptions = Omit; + +// @ts-ignore +type _FormData = unknown extends FormData ? never : FormData; +// @ts-ignore +declare const _FormData: unknown extends typeof FormData ? never : typeof FormData; +export { _FormData as FormData }; + +// @ts-ignore +type _File = unknown extends File ? never : File; +// @ts-ignore +declare const _File: unknown extends typeof File ? never : typeof File; +export { _File as File }; + +// @ts-ignore +type _Blob = unknown extends Blob ? never : Blob; +// @ts-ignore +declare const _Blob: unknown extends typeof Blob ? never : typeof Blob; +export { _Blob as Blob }; + +export declare class Readable { + readable: boolean; + readonly readableEnded: boolean; + readonly readableFlowing: boolean | null; + readonly readableHighWaterMark: number; + readonly readableLength: number; + readonly readableObjectMode: boolean; + destroyed: boolean; + read(size?: number): any; + pause(): this; + resume(): this; + isPaused(): boolean; + destroy(error?: Error): this; + [Symbol.asyncIterator](): AsyncIterableIterator; +} + +export declare class FsReadStream extends Readable { + path: {}; // node type is string | Buffer +} + +// @ts-ignore +type _ReadableStream = unknown extends ReadableStream ? never : ReadableStream; +// @ts-ignore +declare const _ReadableStream: unknown extends typeof ReadableStream ? never : typeof ReadableStream; +export { _ReadableStream as ReadableStream }; diff --git a/node_modules/openai/_shims/auto/types.js b/node_modules/openai/_shims/auto/types.js new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/_shims/auto/types.js @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/_shims/auto/types.mjs b/node_modules/openai/_shims/auto/types.mjs new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/_shims/auto/types.mjs @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/_shims/bun-runtime.d.ts b/node_modules/openai/_shims/bun-runtime.d.ts new file mode 100644 index 0000000..7228102 --- /dev/null +++ b/node_modules/openai/_shims/bun-runtime.d.ts @@ -0,0 +1,6 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import { type Shims } from "./registry.js"; +export declare function getRuntime(): Shims; +//# sourceMappingURL=bun-runtime.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_shims/bun-runtime.d.ts.map b/node_modules/openai/_shims/bun-runtime.d.ts.map new file mode 100644 index 0000000..9de8c65 --- /dev/null +++ b/node_modules/openai/_shims/bun-runtime.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bun-runtime.d.ts","sourceRoot":"","sources":["../src/_shims/bun-runtime.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,OAAO,EAAE,KAAK,KAAK,EAAE,MAAM,YAAY,CAAC;AAIxC,wBAAgB,UAAU,IAAI,KAAK,CAMlC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/bun-runtime.js b/node_modules/openai/_shims/bun-runtime.js new file mode 100644 index 0000000..e098e16 --- /dev/null +++ b/node_modules/openai/_shims/bun-runtime.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntime = void 0; +const web_runtime_1 = require("./web-runtime.js"); +const node_fs_1 = require("node:fs"); +function getRuntime() { + const runtime = (0, web_runtime_1.getRuntime)(); + function isFsReadStream(value) { + return value instanceof node_fs_1.ReadStream; + } + return { ...runtime, isFsReadStream }; +} +exports.getRuntime = getRuntime; +//# sourceMappingURL=bun-runtime.js.map \ No newline at end of file diff --git a/node_modules/openai/_shims/bun-runtime.js.map b/node_modules/openai/_shims/bun-runtime.js.map new file mode 100644 index 0000000..fdea462 --- /dev/null +++ b/node_modules/openai/_shims/bun-runtime.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bun-runtime.js","sourceRoot":"","sources":["../src/_shims/bun-runtime.ts"],"names":[],"mappings":";;;AAIA,kDAA4D;AAC5D,qCAAqD;AAErD,SAAgB,UAAU;IACxB,MAAM,OAAO,GAAG,IAAA,wBAAa,GAAE,CAAC;IAChC,SAAS,cAAc,CAAC,KAAU;QAChC,OAAO,KAAK,YAAY,oBAAY,CAAC;IACvC,CAAC;IACD,OAAO,EAAE,GAAG,OAAO,EAAE,cAAc,EAAE,CAAC;AACxC,CAAC;AAND,gCAMC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/bun-runtime.mjs b/node_modules/openai/_shims/bun-runtime.mjs new file mode 100644 index 0000000..8f52de8 --- /dev/null +++ b/node_modules/openai/_shims/bun-runtime.mjs @@ -0,0 +1,10 @@ +import { getRuntime as getWebRuntime } from "./web-runtime.mjs"; +import { ReadStream as FsReadStream } from 'node:fs'; +export function getRuntime() { + const runtime = getWebRuntime(); + function isFsReadStream(value) { + return value instanceof FsReadStream; + } + return { ...runtime, isFsReadStream }; +} +//# sourceMappingURL=bun-runtime.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_shims/bun-runtime.mjs.map b/node_modules/openai/_shims/bun-runtime.mjs.map new file mode 100644 index 0000000..4a1b11d --- /dev/null +++ b/node_modules/openai/_shims/bun-runtime.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"bun-runtime.mjs","sourceRoot":"","sources":["../src/_shims/bun-runtime.ts"],"names":[],"mappings":"OAIO,EAAE,UAAU,IAAI,aAAa,EAAE;OAC/B,EAAE,UAAU,IAAI,YAAY,EAAE,MAAM,SAAS;AAEpD,MAAM,UAAU,UAAU;IACxB,MAAM,OAAO,GAAG,aAAa,EAAE,CAAC;IAChC,SAAS,cAAc,CAAC,KAAU;QAChC,OAAO,KAAK,YAAY,YAAY,CAAC;IACvC,CAAC;IACD,OAAO,EAAE,GAAG,OAAO,EAAE,cAAc,EAAE,CAAC;AACxC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/index.d.ts b/node_modules/openai/_shims/index.d.ts new file mode 100644 index 0000000..7ff9513 --- /dev/null +++ b/node_modules/openai/_shims/index.d.ts @@ -0,0 +1,81 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import { manual } from "./manual-types.js"; +import * as auto from 'openai/_shims/auto/types'; +import { type RequestOptions } from "../core.js"; + +type SelectType = unknown extends Manual ? Auto : Manual; + +export const kind: string; + +// @ts-ignore +export type Agent = SelectType; + +// @ts-ignore +export const fetch: SelectType; + +// @ts-ignore +export type Request = SelectType; +// @ts-ignore +export type RequestInfo = SelectType; +// @ts-ignore +export type RequestInit = SelectType; + +// @ts-ignore +export type Response = SelectType; +// @ts-ignore +export type ResponseInit = SelectType; +// @ts-ignore +export type ResponseType = SelectType; +// @ts-ignore +export type BodyInit = SelectType; +// @ts-ignore +export type Headers = SelectType; +// @ts-ignore +export const Headers: SelectType; +// @ts-ignore +export type HeadersInit = SelectType; + +// @ts-ignore +export type BlobPropertyBag = SelectType; +// @ts-ignore +export type FilePropertyBag = SelectType; +// @ts-ignore +export type FileFromPathOptions = SelectType; +// @ts-ignore +export type FormData = SelectType; +// @ts-ignore +export const FormData: SelectType; +// @ts-ignore +export type File = SelectType; +// @ts-ignore +export const File: SelectType; +// @ts-ignore +export type Blob = SelectType; +// @ts-ignore +export const Blob: SelectType; + +// @ts-ignore +export type Readable = SelectType; +// @ts-ignore +export type FsReadStream = SelectType; +// @ts-ignore +export type ReadableStream = SelectType; +// @ts-ignore +export const ReadableStream: SelectType; + +export function getMultipartRequestOptions>( + form: FormData, + opts: RequestOptions, +): Promise>; + +export function getDefaultAgent(url: string): any; + +// @ts-ignore +export type FileFromPathOptions = SelectType; + +export function fileFromPath(path: string, options?: FileFromPathOptions): Promise; +export function fileFromPath(path: string, filename?: string, options?: FileFromPathOptions): Promise; + +export function isFsReadStream(value: any): value is FsReadStream; diff --git a/node_modules/openai/_shims/index.js b/node_modules/openai/_shims/index.js new file mode 100644 index 0000000..b5fc822 --- /dev/null +++ b/node_modules/openai/_shims/index.js @@ -0,0 +1,13 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +const shims = require('./registry'); +const auto = require('openai/_shims/auto/runtime'); +if (!shims.kind) shims.setShims(auto.getRuntime(), { auto: true }); +for (const property of Object.keys(shims)) { + Object.defineProperty(exports, property, { + get() { + return shims[property]; + }, + }); +} diff --git a/node_modules/openai/_shims/index.mjs b/node_modules/openai/_shims/index.mjs new file mode 100644 index 0000000..81665e6 --- /dev/null +++ b/node_modules/openai/_shims/index.mjs @@ -0,0 +1,7 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import * as shims from './registry.mjs'; +import * as auto from 'openai/_shims/auto/runtime'; +if (!shims.kind) shims.setShims(auto.getRuntime(), { auto: true }); +export * from './registry.mjs'; diff --git a/node_modules/openai/_shims/manual-types.d.ts b/node_modules/openai/_shims/manual-types.d.ts new file mode 100644 index 0000000..3d00fc2 --- /dev/null +++ b/node_modules/openai/_shims/manual-types.d.ts @@ -0,0 +1,12 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +/** + * Types will get added to this namespace when you import one of the following: + * + * import 'openai/shims/node' + * import 'openai/shims/web' + * + * Importing more than one will cause type and runtime errors. + */ +export namespace manual {} diff --git a/node_modules/openai/_shims/manual-types.js b/node_modules/openai/_shims/manual-types.js new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/_shims/manual-types.js @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/_shims/manual-types.mjs b/node_modules/openai/_shims/manual-types.mjs new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/_shims/manual-types.mjs @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/_shims/node-runtime.d.ts b/node_modules/openai/_shims/node-runtime.d.ts new file mode 100644 index 0000000..0c2b042 --- /dev/null +++ b/node_modules/openai/_shims/node-runtime.d.ts @@ -0,0 +1,3 @@ +import { type Shims } from "./registry.js"; +export declare function getRuntime(): Shims; +//# sourceMappingURL=node-runtime.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_shims/node-runtime.d.ts.map b/node_modules/openai/_shims/node-runtime.d.ts.map new file mode 100644 index 0000000..04da63c --- /dev/null +++ b/node_modules/openai/_shims/node-runtime.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"node-runtime.d.ts","sourceRoot":"","sources":["../src/_shims/node-runtime.ts"],"names":[],"mappings":"AAcA,OAAO,EAAE,KAAK,KAAK,EAAE,MAAM,YAAY,CAAC;AA6CxC,wBAAgB,UAAU,IAAI,KAAK,CAqBlC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/node-runtime.js b/node_modules/openai/_shims/node-runtime.js new file mode 100644 index 0000000..73ac45d --- /dev/null +++ b/node_modules/openai/_shims/node-runtime.js @@ -0,0 +1,89 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntime = void 0; +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +const nf = __importStar(require("node-fetch")); +const fd = __importStar(require("formdata-node")); +const agentkeepalive_1 = __importDefault(require("agentkeepalive")); +const abort_controller_1 = require("abort-controller"); +const node_fs_1 = require("node:fs"); +const form_data_encoder_1 = require("form-data-encoder"); +const node_stream_1 = require("node:stream"); +const MultipartBody_1 = require("./MultipartBody.js"); +const web_1 = require("node:stream/web"); +let fileFromPathWarned = false; +async function fileFromPath(path, ...args) { + // this import fails in environments that don't handle export maps correctly, like old versions of Jest + const { fileFromPath: _fileFromPath } = await Promise.resolve().then(() => __importStar(require('formdata-node/file-from-path'))); + if (!fileFromPathWarned) { + console.warn(`fileFromPath is deprecated; use fs.createReadStream(${JSON.stringify(path)}) instead`); + fileFromPathWarned = true; + } + // @ts-ignore + return await _fileFromPath(path, ...args); +} +const defaultHttpAgent = new agentkeepalive_1.default({ keepAlive: true, timeout: 5 * 60 * 1000 }); +const defaultHttpsAgent = new agentkeepalive_1.default.HttpsAgent({ keepAlive: true, timeout: 5 * 60 * 1000 }); +async function getMultipartRequestOptions(form, opts) { + const encoder = new form_data_encoder_1.FormDataEncoder(form); + const readable = node_stream_1.Readable.from(encoder); + const body = new MultipartBody_1.MultipartBody(readable); + const headers = { + ...opts.headers, + ...encoder.headers, + 'Content-Length': encoder.contentLength, + }; + return { ...opts, body: body, headers }; +} +function getRuntime() { + // Polyfill global object if needed. + if (typeof AbortController === 'undefined') { + // @ts-expect-error (the types are subtly different, but compatible in practice) + globalThis.AbortController = abort_controller_1.AbortController; + } + return { + kind: 'node', + fetch: nf.default, + Request: nf.Request, + Response: nf.Response, + Headers: nf.Headers, + FormData: fd.FormData, + Blob: fd.Blob, + File: fd.File, + ReadableStream: web_1.ReadableStream, + getMultipartRequestOptions, + getDefaultAgent: (url) => (url.startsWith('https') ? defaultHttpsAgent : defaultHttpAgent), + fileFromPath, + isFsReadStream: (value) => value instanceof node_fs_1.ReadStream, + }; +} +exports.getRuntime = getRuntime; +//# sourceMappingURL=node-runtime.js.map \ No newline at end of file diff --git a/node_modules/openai/_shims/node-runtime.js.map b/node_modules/openai/_shims/node-runtime.js.map new file mode 100644 index 0000000..c5dde76 --- /dev/null +++ b/node_modules/openai/_shims/node-runtime.js.map @@ -0,0 +1 @@ +{"version":3,"file":"node-runtime.js","sourceRoot":"","sources":["../src/_shims/node-runtime.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;GAEG;AACH,+CAAiC;AACjC,kDAAoC;AAEpC,oEAA4C;AAC5C,uDAA8E;AAC9E,qCAAqD;AAErD,yDAAoD;AACpD,6CAAuC;AAEvC,sDAAgD;AAEhD,yCAAiD;AAIjD,IAAI,kBAAkB,GAAG,KAAK,CAAC;AAS/B,KAAK,UAAU,YAAY,CAAC,IAAY,EAAE,GAAG,IAAW;IACtD,uGAAuG;IACvG,MAAM,EAAE,YAAY,EAAE,aAAa,EAAE,GAAG,wDAAa,8BAA8B,GAAC,CAAC;IAErF,IAAI,CAAC,kBAAkB,EAAE;QACvB,OAAO,CAAC,IAAI,CAAC,uDAAuD,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QACrG,kBAAkB,GAAG,IAAI,CAAC;KAC3B;IACD,aAAa;IACb,OAAO,MAAM,aAAa,CAAC,IAAI,EAAE,GAAG,IAAI,CAAC,CAAC;AAC5C,CAAC;AAED,MAAM,gBAAgB,GAAU,IAAI,wBAAc,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC,CAAC;AAChG,MAAM,iBAAiB,GAAU,IAAI,wBAAc,CAAC,UAAU,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC,CAAC;AAE5G,KAAK,UAAU,0BAA0B,CACvC,IAAiB,EACjB,IAAuB;IAEvB,MAAM,OAAO,GAAG,IAAI,mCAAe,CAAC,IAAI,CAAC,CAAC;IAC1C,MAAM,QAAQ,GAAG,sBAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACxC,MAAM,IAAI,GAAG,IAAI,6BAAa,CAAC,QAAQ,CAAC,CAAC;IACzC,MAAM,OAAO,GAAG;QACd,GAAG,IAAI,CAAC,OAAO;QACf,GAAG,OAAO,CAAC,OAAO;QAClB,gBAAgB,EAAE,OAAO,CAAC,aAAa;KACxC,CAAC;IAEF,OAAO,EAAE,GAAG,IAAI,EAAE,IAAI,EAAE,IAAW,EAAE,OAAO,EAAE,CAAC;AACjD,CAAC;AAED,SAAgB,UAAU;IACxB,oCAAoC;IACpC,IAAI,OAAO,eAAe,KAAK,WAAW,EAAE;QAC1C,gFAAgF;QAChF,UAAU,CAAC,eAAe,GAAG,kCAAuB,CAAC;KACtD;IACD,OAAO;QACL,IAAI,EAAE,MAAM;QACZ,KAAK,EAAE,EAAE,CAAC,OAAO;QACjB,OAAO,EAAE,EAAE,CAAC,OAAO;QACnB,QAAQ,EAAE,EAAE,CAAC,QAAQ;QACrB,OAAO,EAAE,EAAE,CAAC,OAAO;QACnB,QAAQ,EAAE,EAAE,CAAC,QAAQ;QACrB,IAAI,EAAE,EAAE,CAAC,IAAI;QACb,IAAI,EAAE,EAAE,CAAC,IAAI;QACb,cAAc,EAAd,oBAAc;QACd,0BAA0B;QAC1B,eAAe,EAAE,CAAC,GAAW,EAAS,EAAE,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,gBAAgB,CAAC;QACzG,YAAY;QACZ,cAAc,EAAE,CAAC,KAAU,EAAyB,EAAE,CAAC,KAAK,YAAY,oBAAY;KACrF,CAAC;AACJ,CAAC;AArBD,gCAqBC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/node-runtime.mjs b/node_modules/openai/_shims/node-runtime.mjs new file mode 100644 index 0000000..3605162 --- /dev/null +++ b/node_modules/openai/_shims/node-runtime.mjs @@ -0,0 +1,56 @@ +import * as nf from 'node-fetch'; +import * as fd from 'formdata-node'; +import KeepAliveAgent from 'agentkeepalive'; +import { AbortController as AbortControllerPolyfill } from 'abort-controller'; +import { ReadStream as FsReadStream } from 'node:fs'; +import { FormDataEncoder } from 'form-data-encoder'; +import { Readable } from 'node:stream'; +import { MultipartBody } from "./MultipartBody.mjs"; +import { ReadableStream } from 'node:stream/web'; +let fileFromPathWarned = false; +async function fileFromPath(path, ...args) { + // this import fails in environments that don't handle export maps correctly, like old versions of Jest + const { fileFromPath: _fileFromPath } = await import('formdata-node/file-from-path'); + if (!fileFromPathWarned) { + console.warn(`fileFromPath is deprecated; use fs.createReadStream(${JSON.stringify(path)}) instead`); + fileFromPathWarned = true; + } + // @ts-ignore + return await _fileFromPath(path, ...args); +} +const defaultHttpAgent = new KeepAliveAgent({ keepAlive: true, timeout: 5 * 60 * 1000 }); +const defaultHttpsAgent = new KeepAliveAgent.HttpsAgent({ keepAlive: true, timeout: 5 * 60 * 1000 }); +async function getMultipartRequestOptions(form, opts) { + const encoder = new FormDataEncoder(form); + const readable = Readable.from(encoder); + const body = new MultipartBody(readable); + const headers = { + ...opts.headers, + ...encoder.headers, + 'Content-Length': encoder.contentLength, + }; + return { ...opts, body: body, headers }; +} +export function getRuntime() { + // Polyfill global object if needed. + if (typeof AbortController === 'undefined') { + // @ts-expect-error (the types are subtly different, but compatible in practice) + globalThis.AbortController = AbortControllerPolyfill; + } + return { + kind: 'node', + fetch: nf.default, + Request: nf.Request, + Response: nf.Response, + Headers: nf.Headers, + FormData: fd.FormData, + Blob: fd.Blob, + File: fd.File, + ReadableStream, + getMultipartRequestOptions, + getDefaultAgent: (url) => (url.startsWith('https') ? defaultHttpsAgent : defaultHttpAgent), + fileFromPath, + isFsReadStream: (value) => value instanceof FsReadStream, + }; +} +//# sourceMappingURL=node-runtime.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_shims/node-runtime.mjs.map b/node_modules/openai/_shims/node-runtime.mjs.map new file mode 100644 index 0000000..53a3e51 --- /dev/null +++ b/node_modules/openai/_shims/node-runtime.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"node-runtime.mjs","sourceRoot":"","sources":["../src/_shims/node-runtime.ts"],"names":[],"mappings":"OAGO,KAAK,EAAE,MAAM,YAAY;OACzB,KAAK,EAAE,MAAM,eAAe;OAE5B,cAAc,MAAM,gBAAgB;OACpC,EAAE,eAAe,IAAI,uBAAuB,EAAE,MAAM,kBAAkB;OACtE,EAAE,UAAU,IAAI,YAAY,EAAE,MAAM,SAAS;OAE7C,EAAE,eAAe,EAAE,MAAM,mBAAmB;OAC5C,EAAE,QAAQ,EAAE,MAAM,aAAa;OAE/B,EAAE,aAAa,EAAE;OAEjB,EAAE,cAAc,EAAE,MAAM,iBAAiB;AAIhD,IAAI,kBAAkB,GAAG,KAAK,CAAC;AAS/B,KAAK,UAAU,YAAY,CAAC,IAAY,EAAE,GAAG,IAAW;IACtD,uGAAuG;IACvG,MAAM,EAAE,YAAY,EAAE,aAAa,EAAE,GAAG,MAAM,MAAM,CAAC,8BAA8B,CAAC,CAAC;IAErF,IAAI,CAAC,kBAAkB,EAAE;QACvB,OAAO,CAAC,IAAI,CAAC,uDAAuD,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QACrG,kBAAkB,GAAG,IAAI,CAAC;KAC3B;IACD,aAAa;IACb,OAAO,MAAM,aAAa,CAAC,IAAI,EAAE,GAAG,IAAI,CAAC,CAAC;AAC5C,CAAC;AAED,MAAM,gBAAgB,GAAU,IAAI,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC,CAAC;AAChG,MAAM,iBAAiB,GAAU,IAAI,cAAc,CAAC,UAAU,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC,CAAC;AAE5G,KAAK,UAAU,0BAA0B,CACvC,IAAiB,EACjB,IAAuB;IAEvB,MAAM,OAAO,GAAG,IAAI,eAAe,CAAC,IAAI,CAAC,CAAC;IAC1C,MAAM,QAAQ,GAAG,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACxC,MAAM,IAAI,GAAG,IAAI,aAAa,CAAC,QAAQ,CAAC,CAAC;IACzC,MAAM,OAAO,GAAG;QACd,GAAG,IAAI,CAAC,OAAO;QACf,GAAG,OAAO,CAAC,OAAO;QAClB,gBAAgB,EAAE,OAAO,CAAC,aAAa;KACxC,CAAC;IAEF,OAAO,EAAE,GAAG,IAAI,EAAE,IAAI,EAAE,IAAW,EAAE,OAAO,EAAE,CAAC;AACjD,CAAC;AAED,MAAM,UAAU,UAAU;IACxB,oCAAoC;IACpC,IAAI,OAAO,eAAe,KAAK,WAAW,EAAE;QAC1C,gFAAgF;QAChF,UAAU,CAAC,eAAe,GAAG,uBAAuB,CAAC;KACtD;IACD,OAAO;QACL,IAAI,EAAE,MAAM;QACZ,KAAK,EAAE,EAAE,CAAC,OAAO;QACjB,OAAO,EAAE,EAAE,CAAC,OAAO;QACnB,QAAQ,EAAE,EAAE,CAAC,QAAQ;QACrB,OAAO,EAAE,EAAE,CAAC,OAAO;QACnB,QAAQ,EAAE,EAAE,CAAC,QAAQ;QACrB,IAAI,EAAE,EAAE,CAAC,IAAI;QACb,IAAI,EAAE,EAAE,CAAC,IAAI;QACb,cAAc;QACd,0BAA0B;QAC1B,eAAe,EAAE,CAAC,GAAW,EAAS,EAAE,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,gBAAgB,CAAC;QACzG,YAAY;QACZ,cAAc,EAAE,CAAC,KAAU,EAAyB,EAAE,CAAC,KAAK,YAAY,YAAY;KACrF,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/node-types.d.ts b/node_modules/openai/_shims/node-types.d.ts new file mode 100644 index 0000000..c159e5f --- /dev/null +++ b/node_modules/openai/_shims/node-types.d.ts @@ -0,0 +1,42 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import * as nf from 'node-fetch'; +import * as fd from 'formdata-node'; + +export { type Agent } from 'node:http'; +export { type Readable } from 'node:stream'; +export { type ReadStream as FsReadStream } from 'node:fs'; +export { ReadableStream } from 'node:stream/web'; + +export const fetch: typeof nf.default; + +export type Request = nf.Request; +export type RequestInfo = nf.RequestInfo; +export type RequestInit = nf.RequestInit; + +export type Response = nf.Response; +export type ResponseInit = nf.ResponseInit; +export type ResponseType = nf.ResponseType; +export type BodyInit = nf.BodyInit; +export type Headers = nf.Headers; +export type HeadersInit = nf.HeadersInit; + +type EndingType = 'native' | 'transparent'; +export interface BlobPropertyBag { + endings?: EndingType; + type?: string; +} + +export interface FilePropertyBag extends BlobPropertyBag { + lastModified?: number; +} + +export type FileFromPathOptions = Omit; + +export type FormData = fd.FormData; +export const FormData: typeof fd.FormData; +export type File = fd.File; +export const File: typeof fd.File; +export type Blob = fd.Blob; +export const Blob: typeof fd.Blob; diff --git a/node_modules/openai/_shims/node-types.js b/node_modules/openai/_shims/node-types.js new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/_shims/node-types.js @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/_shims/node-types.mjs b/node_modules/openai/_shims/node-types.mjs new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/_shims/node-types.mjs @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/_shims/registry.d.ts b/node_modules/openai/_shims/registry.d.ts new file mode 100644 index 0000000..c041fa2 --- /dev/null +++ b/node_modules/openai/_shims/registry.d.ts @@ -0,0 +1,37 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import { type RequestOptions } from "../core.js"; +export interface Shims { + kind: string; + fetch: any; + Request: any; + Response: any; + Headers: any; + FormData: any; + Blob: any; + File: any; + ReadableStream: any; + getMultipartRequestOptions: >(form: Shims['FormData'], opts: RequestOptions) => Promise>; + getDefaultAgent: (url: string) => any; + fileFromPath: ((path: string, filename?: string, options?: {}) => Promise) | ((path: string, options?: {}) => Promise); + isFsReadStream: (value: any) => boolean; +} +export declare let auto: boolean; +export declare let kind: Shims['kind'] | undefined; +export declare let fetch: Shims['fetch'] | undefined; +export declare let Request: Shims['Request'] | undefined; +export declare let Response: Shims['Response'] | undefined; +export declare let Headers: Shims['Headers'] | undefined; +export declare let FormData: Shims['FormData'] | undefined; +export declare let Blob: Shims['Blob'] | undefined; +export declare let File: Shims['File'] | undefined; +export declare let ReadableStream: Shims['ReadableStream'] | undefined; +export declare let getMultipartRequestOptions: Shims['getMultipartRequestOptions'] | undefined; +export declare let getDefaultAgent: Shims['getDefaultAgent'] | undefined; +export declare let fileFromPath: Shims['fileFromPath'] | undefined; +export declare let isFsReadStream: Shims['isFsReadStream'] | undefined; +export declare function setShims(shims: Shims, options?: { + auto: boolean; +}): void; +//# sourceMappingURL=registry.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_shims/registry.d.ts.map b/node_modules/openai/_shims/registry.d.ts.map new file mode 100644 index 0000000..036e7d4 --- /dev/null +++ b/node_modules/openai/_shims/registry.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"registry.d.ts","sourceRoot":"","sources":["../src/_shims/registry.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,OAAO,EAAE,KAAK,cAAc,EAAE,MAAM,SAAS,CAAC;AAE9C,MAAM,WAAW,KAAK;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,GAAG,CAAC;IACX,OAAO,EAAE,GAAG,CAAC;IACb,QAAQ,EAAE,GAAG,CAAC;IACd,OAAO,EAAE,GAAG,CAAC;IACb,QAAQ,EAAE,GAAG,CAAC;IACd,IAAI,EAAE,GAAG,CAAC;IACV,IAAI,EAAE,GAAG,CAAC;IACV,cAAc,EAAE,GAAG,CAAC;IACpB,0BAA0B,EAAE,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACtD,IAAI,EAAE,KAAK,CAAC,UAAU,CAAC,EACvB,IAAI,EAAE,cAAc,CAAC,CAAC,CAAC,KACpB,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,GAAG,CAAC;IACtC,YAAY,EACR,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,EAAE,KAAK,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,GAC3E,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,EAAE,KAAK,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC7D,cAAc,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,OAAO,CAAC;CACzC;AAED,eAAO,IAAI,IAAI,SAAQ,CAAC;AACxB,eAAO,IAAI,IAAI,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,SAAqB,CAAC;AACvD,eAAO,IAAI,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,GAAG,SAAqB,CAAC;AACzD,eAAO,IAAI,OAAO,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,SAAqB,CAAC;AAC7D,eAAO,IAAI,QAAQ,EAAE,KAAK,CAAC,UAAU,CAAC,GAAG,SAAqB,CAAC;AAC/D,eAAO,IAAI,OAAO,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,SAAqB,CAAC;AAC7D,eAAO,IAAI,QAAQ,EAAE,KAAK,CAAC,UAAU,CAAC,GAAG,SAAqB,CAAC;AAC/D,eAAO,IAAI,IAAI,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,SAAqB,CAAC;AACvD,eAAO,IAAI,IAAI,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,SAAqB,CAAC;AACvD,eAAO,IAAI,cAAc,EAAE,KAAK,CAAC,gBAAgB,CAAC,GAAG,SAAqB,CAAC;AAC3E,eAAO,IAAI,0BAA0B,EAAE,KAAK,CAAC,4BAA4B,CAAC,GAAG,SAAqB,CAAC;AACnG,eAAO,IAAI,eAAe,EAAE,KAAK,CAAC,iBAAiB,CAAC,GAAG,SAAqB,CAAC;AAC7E,eAAO,IAAI,YAAY,EAAE,KAAK,CAAC,cAAc,CAAC,GAAG,SAAqB,CAAC;AACvE,eAAO,IAAI,cAAc,EAAE,KAAK,CAAC,gBAAgB,CAAC,GAAG,SAAqB,CAAC;AAE3E,wBAAgB,QAAQ,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,GAAE;IAAE,IAAI,EAAE,OAAO,CAAA;CAAoB,QAuBlF"} \ No newline at end of file diff --git a/node_modules/openai/_shims/registry.js b/node_modules/openai/_shims/registry.js new file mode 100644 index 0000000..824fbf6 --- /dev/null +++ b/node_modules/openai/_shims/registry.js @@ -0,0 +1,41 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.setShims = exports.isFsReadStream = exports.fileFromPath = exports.getDefaultAgent = exports.getMultipartRequestOptions = exports.ReadableStream = exports.File = exports.Blob = exports.FormData = exports.Headers = exports.Response = exports.Request = exports.fetch = exports.kind = exports.auto = void 0; +exports.auto = false; +exports.kind = undefined; +exports.fetch = undefined; +exports.Request = undefined; +exports.Response = undefined; +exports.Headers = undefined; +exports.FormData = undefined; +exports.Blob = undefined; +exports.File = undefined; +exports.ReadableStream = undefined; +exports.getMultipartRequestOptions = undefined; +exports.getDefaultAgent = undefined; +exports.fileFromPath = undefined; +exports.isFsReadStream = undefined; +function setShims(shims, options = { auto: false }) { + if (exports.auto) { + throw new Error(`you must \`import 'openai/shims/${shims.kind}'\` before importing anything else from openai`); + } + if (exports.kind) { + throw new Error(`can't \`import 'openai/shims/${shims.kind}'\` after \`import 'openai/shims/${exports.kind}'\``); + } + exports.auto = options.auto; + exports.kind = shims.kind; + exports.fetch = shims.fetch; + exports.Request = shims.Request; + exports.Response = shims.Response; + exports.Headers = shims.Headers; + exports.FormData = shims.FormData; + exports.Blob = shims.Blob; + exports.File = shims.File; + exports.ReadableStream = shims.ReadableStream; + exports.getMultipartRequestOptions = shims.getMultipartRequestOptions; + exports.getDefaultAgent = shims.getDefaultAgent; + exports.fileFromPath = shims.fileFromPath; + exports.isFsReadStream = shims.isFsReadStream; +} +exports.setShims = setShims; +//# sourceMappingURL=registry.js.map \ No newline at end of file diff --git a/node_modules/openai/_shims/registry.js.map b/node_modules/openai/_shims/registry.js.map new file mode 100644 index 0000000..4827a45 --- /dev/null +++ b/node_modules/openai/_shims/registry.js.map @@ -0,0 +1 @@ +{"version":3,"file":"registry.js","sourceRoot":"","sources":["../src/_shims/registry.ts"],"names":[],"mappings":";;;AA0BW,QAAA,IAAI,GAAG,KAAK,CAAC;AACb,QAAA,IAAI,GAA8B,SAAS,CAAC;AAC5C,QAAA,KAAK,GAA+B,SAAS,CAAC;AAC9C,QAAA,OAAO,GAAiC,SAAS,CAAC;AAClD,QAAA,QAAQ,GAAkC,SAAS,CAAC;AACpD,QAAA,OAAO,GAAiC,SAAS,CAAC;AAClD,QAAA,QAAQ,GAAkC,SAAS,CAAC;AACpD,QAAA,IAAI,GAA8B,SAAS,CAAC;AAC5C,QAAA,IAAI,GAA8B,SAAS,CAAC;AAC5C,QAAA,cAAc,GAAwC,SAAS,CAAC;AAChE,QAAA,0BAA0B,GAAoD,SAAS,CAAC;AACxF,QAAA,eAAe,GAAyC,SAAS,CAAC;AAClE,QAAA,YAAY,GAAsC,SAAS,CAAC;AAC5D,QAAA,cAAc,GAAwC,SAAS,CAAC;AAE3E,SAAgB,QAAQ,CAAC,KAAY,EAAE,UAA6B,EAAE,IAAI,EAAE,KAAK,EAAE;IACjF,IAAI,YAAI,EAAE;QACR,MAAM,IAAI,KAAK,CACb,mCAAmC,KAAK,CAAC,IAAI,gDAAgD,CAC9F,CAAC;KACH;IACD,IAAI,YAAI,EAAE;QACR,MAAM,IAAI,KAAK,CAAC,gCAAgC,KAAK,CAAC,IAAI,oCAAoC,YAAI,KAAK,CAAC,CAAC;KAC1G;IACD,YAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IACpB,YAAI,GAAG,KAAK,CAAC,IAAI,CAAC;IAClB,aAAK,GAAG,KAAK,CAAC,KAAK,CAAC;IACpB,eAAO,GAAG,KAAK,CAAC,OAAO,CAAC;IACxB,gBAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;IAC1B,eAAO,GAAG,KAAK,CAAC,OAAO,CAAC;IACxB,gBAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;IAC1B,YAAI,GAAG,KAAK,CAAC,IAAI,CAAC;IAClB,YAAI,GAAG,KAAK,CAAC,IAAI,CAAC;IAClB,sBAAc,GAAG,KAAK,CAAC,cAAc,CAAC;IACtC,kCAA0B,GAAG,KAAK,CAAC,0BAA0B,CAAC;IAC9D,uBAAe,GAAG,KAAK,CAAC,eAAe,CAAC;IACxC,oBAAY,GAAG,KAAK,CAAC,YAAY,CAAC;IAClC,sBAAc,GAAG,KAAK,CAAC,cAAc,CAAC;AACxC,CAAC;AAvBD,4BAuBC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/registry.mjs b/node_modules/openai/_shims/registry.mjs new file mode 100644 index 0000000..c757629 --- /dev/null +++ b/node_modules/openai/_shims/registry.mjs @@ -0,0 +1,37 @@ +export let auto = false; +export let kind = undefined; +export let fetch = undefined; +export let Request = undefined; +export let Response = undefined; +export let Headers = undefined; +export let FormData = undefined; +export let Blob = undefined; +export let File = undefined; +export let ReadableStream = undefined; +export let getMultipartRequestOptions = undefined; +export let getDefaultAgent = undefined; +export let fileFromPath = undefined; +export let isFsReadStream = undefined; +export function setShims(shims, options = { auto: false }) { + if (auto) { + throw new Error(`you must \`import 'openai/shims/${shims.kind}'\` before importing anything else from openai`); + } + if (kind) { + throw new Error(`can't \`import 'openai/shims/${shims.kind}'\` after \`import 'openai/shims/${kind}'\``); + } + auto = options.auto; + kind = shims.kind; + fetch = shims.fetch; + Request = shims.Request; + Response = shims.Response; + Headers = shims.Headers; + FormData = shims.FormData; + Blob = shims.Blob; + File = shims.File; + ReadableStream = shims.ReadableStream; + getMultipartRequestOptions = shims.getMultipartRequestOptions; + getDefaultAgent = shims.getDefaultAgent; + fileFromPath = shims.fileFromPath; + isFsReadStream = shims.isFsReadStream; +} +//# sourceMappingURL=registry.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_shims/registry.mjs.map b/node_modules/openai/_shims/registry.mjs.map new file mode 100644 index 0000000..92ec20a --- /dev/null +++ b/node_modules/openai/_shims/registry.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"registry.mjs","sourceRoot":"","sources":["../src/_shims/registry.ts"],"names":[],"mappings":"AA0BA,MAAM,CAAC,IAAI,IAAI,GAAG,KAAK,CAAC;AACxB,MAAM,CAAC,IAAI,IAAI,GAA8B,SAAS,CAAC;AACvD,MAAM,CAAC,IAAI,KAAK,GAA+B,SAAS,CAAC;AACzD,MAAM,CAAC,IAAI,OAAO,GAAiC,SAAS,CAAC;AAC7D,MAAM,CAAC,IAAI,QAAQ,GAAkC,SAAS,CAAC;AAC/D,MAAM,CAAC,IAAI,OAAO,GAAiC,SAAS,CAAC;AAC7D,MAAM,CAAC,IAAI,QAAQ,GAAkC,SAAS,CAAC;AAC/D,MAAM,CAAC,IAAI,IAAI,GAA8B,SAAS,CAAC;AACvD,MAAM,CAAC,IAAI,IAAI,GAA8B,SAAS,CAAC;AACvD,MAAM,CAAC,IAAI,cAAc,GAAwC,SAAS,CAAC;AAC3E,MAAM,CAAC,IAAI,0BAA0B,GAAoD,SAAS,CAAC;AACnG,MAAM,CAAC,IAAI,eAAe,GAAyC,SAAS,CAAC;AAC7E,MAAM,CAAC,IAAI,YAAY,GAAsC,SAAS,CAAC;AACvE,MAAM,CAAC,IAAI,cAAc,GAAwC,SAAS,CAAC;AAE3E,MAAM,UAAU,QAAQ,CAAC,KAAY,EAAE,UAA6B,EAAE,IAAI,EAAE,KAAK,EAAE;IACjF,IAAI,IAAI,EAAE;QACR,MAAM,IAAI,KAAK,CACb,mCAAmC,KAAK,CAAC,IAAI,gDAAgD,CAC9F,CAAC;KACH;IACD,IAAI,IAAI,EAAE;QACR,MAAM,IAAI,KAAK,CAAC,gCAAgC,KAAK,CAAC,IAAI,oCAAoC,IAAI,KAAK,CAAC,CAAC;KAC1G;IACD,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IACpB,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;IAClB,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;IACpB,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;IACxB,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;IAC1B,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;IACxB,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;IAC1B,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;IAClB,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;IAClB,cAAc,GAAG,KAAK,CAAC,cAAc,CAAC;IACtC,0BAA0B,GAAG,KAAK,CAAC,0BAA0B,CAAC;IAC9D,eAAe,GAAG,KAAK,CAAC,eAAe,CAAC;IACxC,YAAY,GAAG,KAAK,CAAC,YAAY,CAAC;IAClC,cAAc,GAAG,KAAK,CAAC,cAAc,CAAC;AACxC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/web-runtime.d.ts b/node_modules/openai/_shims/web-runtime.d.ts new file mode 100644 index 0000000..4ae8077 --- /dev/null +++ b/node_modules/openai/_shims/web-runtime.d.ts @@ -0,0 +1,5 @@ +import { type Shims } from "./registry.js"; +export declare function getRuntime({ manuallyImported }?: { + manuallyImported?: boolean; +}): Shims; +//# sourceMappingURL=web-runtime.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_shims/web-runtime.d.ts.map b/node_modules/openai/_shims/web-runtime.d.ts.map new file mode 100644 index 0000000..c6a1874 --- /dev/null +++ b/node_modules/openai/_shims/web-runtime.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"web-runtime.d.ts","sourceRoot":"","sources":["../src/_shims/web-runtime.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,KAAK,KAAK,EAAE,MAAM,YAAY,CAAC;AAExC,wBAAgB,UAAU,CAAC,EAAE,gBAAgB,EAAE,GAAE;IAAE,gBAAgB,CAAC,EAAE,OAAO,CAAA;CAAO,GAAG,KAAK,CA+F3F"} \ No newline at end of file diff --git a/node_modules/openai/_shims/web-runtime.js b/node_modules/openai/_shims/web-runtime.js new file mode 100644 index 0000000..f2f3732 --- /dev/null +++ b/node_modules/openai/_shims/web-runtime.js @@ -0,0 +1,78 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntime = void 0; +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +const MultipartBody_1 = require("./MultipartBody.js"); +function getRuntime({ manuallyImported } = {}) { + const recommendation = manuallyImported ? + `You may need to use polyfills` + : `Add one of these imports before your first \`import … from 'openai'\`: +- \`import 'openai/shims/node'\` (if you're running on Node) +- \`import 'openai/shims/web'\` (otherwise) +`; + let _fetch, _Request, _Response, _Headers; + try { + // @ts-ignore + _fetch = fetch; + // @ts-ignore + _Request = Request; + // @ts-ignore + _Response = Response; + // @ts-ignore + _Headers = Headers; + } + catch (error) { + throw new Error(`this environment is missing the following Web Fetch API type: ${error.message}. ${recommendation}`); + } + return { + kind: 'web', + fetch: _fetch, + Request: _Request, + Response: _Response, + Headers: _Headers, + FormData: + // @ts-ignore + typeof FormData !== 'undefined' ? FormData : (class FormData { + // @ts-ignore + constructor() { + throw new Error(`file uploads aren't supported in this environment yet as 'FormData' is undefined. ${recommendation}`); + } + }), + Blob: typeof Blob !== 'undefined' ? Blob : (class Blob { + constructor() { + throw new Error(`file uploads aren't supported in this environment yet as 'Blob' is undefined. ${recommendation}`); + } + }), + File: + // @ts-ignore + typeof File !== 'undefined' ? File : (class File { + // @ts-ignore + constructor() { + throw new Error(`file uploads aren't supported in this environment yet as 'File' is undefined. ${recommendation}`); + } + }), + ReadableStream: + // @ts-ignore + typeof ReadableStream !== 'undefined' ? ReadableStream : (class ReadableStream { + // @ts-ignore + constructor() { + throw new Error(`streaming isn't supported in this environment yet as 'ReadableStream' is undefined. ${recommendation}`); + } + }), + getMultipartRequestOptions: async ( + // @ts-ignore + form, opts) => ({ + ...opts, + body: new MultipartBody_1.MultipartBody(form), + }), + getDefaultAgent: (url) => undefined, + fileFromPath: () => { + throw new Error('The `fileFromPath` function is only supported in Node. See the README for more details: https://www.github.com/openai/openai-node#file-uploads'); + }, + isFsReadStream: (value) => false, + }; +} +exports.getRuntime = getRuntime; +//# sourceMappingURL=web-runtime.js.map \ No newline at end of file diff --git a/node_modules/openai/_shims/web-runtime.js.map b/node_modules/openai/_shims/web-runtime.js.map new file mode 100644 index 0000000..4aeb05f --- /dev/null +++ b/node_modules/openai/_shims/web-runtime.js.map @@ -0,0 +1 @@ +{"version":3,"file":"web-runtime.js","sourceRoot":"","sources":["../src/_shims/web-runtime.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,sDAAgD;AAIhD,SAAgB,UAAU,CAAC,EAAE,gBAAgB,KAAqC,EAAE;IAClF,MAAM,cAAc,GAClB,gBAAgB,CAAC,CAAC;QAChB,+BAA+B;QACjC,CAAC,CAAC;;;CAGL,CAAC;IAEA,IAAI,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,CAAC;IAC1C,IAAI;QACF,aAAa;QACb,MAAM,GAAG,KAAK,CAAC;QACf,aAAa;QACb,QAAQ,GAAG,OAAO,CAAC;QACnB,aAAa;QACb,SAAS,GAAG,QAAQ,CAAC;QACrB,aAAa;QACb,QAAQ,GAAG,OAAO,CAAC;KACpB;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,IAAI,KAAK,CACb,iEACG,KAAa,CAAC,OACjB,KAAK,cAAc,EAAE,CACtB,CAAC;KACH;IAED,OAAO;QACL,IAAI,EAAE,KAAK;QACX,KAAK,EAAE,MAAM;QACb,OAAO,EAAE,QAAQ;QACjB,QAAQ,EAAE,SAAS;QACnB,OAAO,EAAE,QAAQ;QACjB,QAAQ;QACN,aAAa;QACb,OAAO,QAAQ,KAAK,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAC3C,MAAM,QAAQ;YACZ,aAAa;YACb;gBACE,MAAM,IAAI,KAAK,CACb,qFAAqF,cAAc,EAAE,CACtG,CAAC;YACJ,CAAC;SACF,CACF;QACH,IAAI,EACF,OAAO,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CACnC,MAAM,IAAI;YACR;gBACE,MAAM,IAAI,KAAK,CACb,iFAAiF,cAAc,EAAE,CAClG,CAAC;YACJ,CAAC;SACF,CACF;QACH,IAAI;QACF,aAAa;QACb,OAAO,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CACnC,MAAM,IAAI;YACR,aAAa;YACb;gBACE,MAAM,IAAI,KAAK,CACb,iFAAiF,cAAc,EAAE,CAClG,CAAC;YACJ,CAAC;SACF,CACF;QACH,cAAc;QACZ,aAAa;QACb,OAAO,cAAc,KAAK,WAAW,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CACvD,MAAM,cAAc;YAClB,aAAa;YACb;gBACE,MAAM,IAAI,KAAK,CACb,uFAAuF,cAAc,EAAE,CACxG,CAAC;YACJ,CAAC;SACF,CACF;QACH,0BAA0B,EAAE,KAAK;QAC/B,aAAa;QACb,IAAc,EACd,IAAuB,EACK,EAAE,CAAC,CAAC;YAChC,GAAG,IAAI;YACP,IAAI,EAAE,IAAI,6BAAa,CAAC,IAAI,CAAQ;SACrC,CAAC;QACF,eAAe,EAAE,CAAC,GAAW,EAAE,EAAE,CAAC,SAAS;QAC3C,YAAY,EAAE,GAAG,EAAE;YACjB,MAAM,IAAI,KAAK,CACb,gJAAgJ,CACjJ,CAAC;QACJ,CAAC;QACD,cAAc,EAAE,CAAC,KAAU,EAAE,EAAE,CAAC,KAAK;KACtC,CAAC;AACJ,CAAC;AA/FD,gCA+FC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/web-runtime.mjs b/node_modules/openai/_shims/web-runtime.mjs new file mode 100644 index 0000000..691d508 --- /dev/null +++ b/node_modules/openai/_shims/web-runtime.mjs @@ -0,0 +1,71 @@ +import { MultipartBody } from "./MultipartBody.mjs"; +export function getRuntime({ manuallyImported } = {}) { + const recommendation = manuallyImported ? + `You may need to use polyfills` + : `Add one of these imports before your first \`import … from 'openai'\`: +- \`import 'openai/shims/node'\` (if you're running on Node) +- \`import 'openai/shims/web'\` (otherwise) +`; + let _fetch, _Request, _Response, _Headers; + try { + // @ts-ignore + _fetch = fetch; + // @ts-ignore + _Request = Request; + // @ts-ignore + _Response = Response; + // @ts-ignore + _Headers = Headers; + } + catch (error) { + throw new Error(`this environment is missing the following Web Fetch API type: ${error.message}. ${recommendation}`); + } + return { + kind: 'web', + fetch: _fetch, + Request: _Request, + Response: _Response, + Headers: _Headers, + FormData: + // @ts-ignore + typeof FormData !== 'undefined' ? FormData : (class FormData { + // @ts-ignore + constructor() { + throw new Error(`file uploads aren't supported in this environment yet as 'FormData' is undefined. ${recommendation}`); + } + }), + Blob: typeof Blob !== 'undefined' ? Blob : (class Blob { + constructor() { + throw new Error(`file uploads aren't supported in this environment yet as 'Blob' is undefined. ${recommendation}`); + } + }), + File: + // @ts-ignore + typeof File !== 'undefined' ? File : (class File { + // @ts-ignore + constructor() { + throw new Error(`file uploads aren't supported in this environment yet as 'File' is undefined. ${recommendation}`); + } + }), + ReadableStream: + // @ts-ignore + typeof ReadableStream !== 'undefined' ? ReadableStream : (class ReadableStream { + // @ts-ignore + constructor() { + throw new Error(`streaming isn't supported in this environment yet as 'ReadableStream' is undefined. ${recommendation}`); + } + }), + getMultipartRequestOptions: async ( + // @ts-ignore + form, opts) => ({ + ...opts, + body: new MultipartBody(form), + }), + getDefaultAgent: (url) => undefined, + fileFromPath: () => { + throw new Error('The `fileFromPath` function is only supported in Node. See the README for more details: https://www.github.com/openai/openai-node#file-uploads'); + }, + isFsReadStream: (value) => false, + }; +} +//# sourceMappingURL=web-runtime.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_shims/web-runtime.mjs.map b/node_modules/openai/_shims/web-runtime.mjs.map new file mode 100644 index 0000000..921ce6b --- /dev/null +++ b/node_modules/openai/_shims/web-runtime.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"web-runtime.mjs","sourceRoot":"","sources":["../src/_shims/web-runtime.ts"],"names":[],"mappings":"OAGO,EAAE,aAAa,EAAE;AAIxB,MAAM,UAAU,UAAU,CAAC,EAAE,gBAAgB,KAAqC,EAAE;IAClF,MAAM,cAAc,GAClB,gBAAgB,CAAC,CAAC;QAChB,+BAA+B;QACjC,CAAC,CAAC;;;CAGL,CAAC;IAEA,IAAI,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,CAAC;IAC1C,IAAI;QACF,aAAa;QACb,MAAM,GAAG,KAAK,CAAC;QACf,aAAa;QACb,QAAQ,GAAG,OAAO,CAAC;QACnB,aAAa;QACb,SAAS,GAAG,QAAQ,CAAC;QACrB,aAAa;QACb,QAAQ,GAAG,OAAO,CAAC;KACpB;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,IAAI,KAAK,CACb,iEACG,KAAa,CAAC,OACjB,KAAK,cAAc,EAAE,CACtB,CAAC;KACH;IAED,OAAO;QACL,IAAI,EAAE,KAAK;QACX,KAAK,EAAE,MAAM;QACb,OAAO,EAAE,QAAQ;QACjB,QAAQ,EAAE,SAAS;QACnB,OAAO,EAAE,QAAQ;QACjB,QAAQ;QACN,aAAa;QACb,OAAO,QAAQ,KAAK,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAC3C,MAAM,QAAQ;YACZ,aAAa;YACb;gBACE,MAAM,IAAI,KAAK,CACb,qFAAqF,cAAc,EAAE,CACtG,CAAC;YACJ,CAAC;SACF,CACF;QACH,IAAI,EACF,OAAO,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CACnC,MAAM,IAAI;YACR;gBACE,MAAM,IAAI,KAAK,CACb,iFAAiF,cAAc,EAAE,CAClG,CAAC;YACJ,CAAC;SACF,CACF;QACH,IAAI;QACF,aAAa;QACb,OAAO,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CACnC,MAAM,IAAI;YACR,aAAa;YACb;gBACE,MAAM,IAAI,KAAK,CACb,iFAAiF,cAAc,EAAE,CAClG,CAAC;YACJ,CAAC;SACF,CACF;QACH,cAAc;QACZ,aAAa;QACb,OAAO,cAAc,KAAK,WAAW,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CACvD,MAAM,cAAc;YAClB,aAAa;YACb;gBACE,MAAM,IAAI,KAAK,CACb,uFAAuF,cAAc,EAAE,CACxG,CAAC;YACJ,CAAC;SACF,CACF;QACH,0BAA0B,EAAE,KAAK;QAC/B,aAAa;QACb,IAAc,EACd,IAAuB,EACK,EAAE,CAAC,CAAC;YAChC,GAAG,IAAI;YACP,IAAI,EAAE,IAAI,aAAa,CAAC,IAAI,CAAQ;SACrC,CAAC;QACF,eAAe,EAAE,CAAC,GAAW,EAAE,EAAE,CAAC,SAAS;QAC3C,YAAY,EAAE,GAAG,EAAE;YACjB,MAAM,IAAI,KAAK,CACb,gJAAgJ,CACjJ,CAAC;QACJ,CAAC;QACD,cAAc,EAAE,CAAC,KAAU,EAAE,EAAE,CAAC,KAAK;KACtC,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_shims/web-types.d.ts b/node_modules/openai/_shims/web-types.d.ts new file mode 100644 index 0000000..4ff3513 --- /dev/null +++ b/node_modules/openai/_shims/web-types.d.ts @@ -0,0 +1,83 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export type Agent = any; + +declare const _fetch: typeof fetch; +export { _fetch as fetch }; + +type _Request = Request; +export { _Request as Request }; + +type _RequestInfo = RequestInfo; +export { type _RequestInfo as RequestInfo }; + +type _RequestInit = RequestInit; +export { type _RequestInit as RequestInit }; + +type _Response = Response; +export { _Response as Response }; + +type _ResponseInit = ResponseInit; +export { type _ResponseInit as ResponseInit }; + +type _ResponseType = ResponseType; +export { type _ResponseType as ResponseType }; + +type _BodyInit = BodyInit; +export { type _BodyInit as BodyInit }; + +type _Headers = Headers; +export { _Headers as Headers }; + +type _HeadersInit = HeadersInit; +export { type _HeadersInit as HeadersInit }; + +type EndingType = 'native' | 'transparent'; + +export interface BlobPropertyBag { + endings?: EndingType; + type?: string; +} + +export interface FilePropertyBag extends BlobPropertyBag { + lastModified?: number; +} + +export type FileFromPathOptions = Omit; + +type _FormData = FormData; +declare const _FormData: typeof FormData; +export { _FormData as FormData }; + +type _File = File; +declare const _File: typeof File; +export { _File as File }; + +type _Blob = Blob; +declare const _Blob: typeof Blob; +export { _Blob as Blob }; + +export declare class Readable { + readable: boolean; + readonly readableEnded: boolean; + readonly readableFlowing: boolean | null; + readonly readableHighWaterMark: number; + readonly readableLength: number; + readonly readableObjectMode: boolean; + destroyed: boolean; + read(size?: number): any; + pause(): this; + resume(): this; + isPaused(): boolean; + destroy(error?: Error): this; + [Symbol.asyncIterator](): AsyncIterableIterator; +} + +export declare class FsReadStream extends Readable { + path: {}; // node type is string | Buffer +} + +type _ReadableStream = ReadableStream; +declare const _ReadableStream: typeof ReadableStream; +export { _ReadableStream as ReadableStream }; diff --git a/node_modules/openai/_shims/web-types.js b/node_modules/openai/_shims/web-types.js new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/_shims/web-types.js @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/_shims/web-types.mjs b/node_modules/openai/_shims/web-types.mjs new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/_shims/web-types.mjs @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/_vendor/partial-json-parser/parser.d.ts b/node_modules/openai/_vendor/partial-json-parser/parser.d.ts new file mode 100644 index 0000000..5ef5c37 --- /dev/null +++ b/node_modules/openai/_vendor/partial-json-parser/parser.d.ts @@ -0,0 +1,7 @@ +declare class PartialJSON extends Error { +} +declare class MalformedJSON extends Error { +} +declare const partialParse: (input: string) => any; +export { partialParse, PartialJSON, MalformedJSON }; +//# sourceMappingURL=parser.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/partial-json-parser/parser.d.ts.map b/node_modules/openai/_vendor/partial-json-parser/parser.d.ts.map new file mode 100644 index 0000000..1e3bbc9 --- /dev/null +++ b/node_modules/openai/_vendor/partial-json-parser/parser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"parser.d.ts","sourceRoot":"","sources":["../../src/_vendor/partial-json-parser/parser.ts"],"names":[],"mappings":"AAkCA,cAAM,WAAY,SAAQ,KAAK;CAAG;AAElC,cAAM,aAAc,SAAQ,KAAK;CAAG;AAgNpC,QAAA,MAAM,YAAY,UAAW,MAAM,QAA4C,CAAC;AAEhF,OAAO,EAAE,YAAY,EAAE,WAAW,EAAE,aAAa,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/partial-json-parser/parser.js b/node_modules/openai/_vendor/partial-json-parser/parser.js new file mode 100644 index 0000000..83126bc --- /dev/null +++ b/node_modules/openai/_vendor/partial-json-parser/parser.js @@ -0,0 +1,246 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MalformedJSON = exports.PartialJSON = exports.partialParse = void 0; +const STR = 0b000000001; +const NUM = 0b000000010; +const ARR = 0b000000100; +const OBJ = 0b000001000; +const NULL = 0b000010000; +const BOOL = 0b000100000; +const NAN = 0b001000000; +const INFINITY = 0b010000000; +const MINUS_INFINITY = 0b100000000; +const INF = INFINITY | MINUS_INFINITY; +const SPECIAL = NULL | BOOL | INF | NAN; +const ATOM = STR | NUM | SPECIAL; +const COLLECTION = ARR | OBJ; +const ALL = ATOM | COLLECTION; +const Allow = { + STR, + NUM, + ARR, + OBJ, + NULL, + BOOL, + NAN, + INFINITY, + MINUS_INFINITY, + INF, + SPECIAL, + ATOM, + COLLECTION, + ALL, +}; +// The JSON string segment was unable to be parsed completely +class PartialJSON extends Error { +} +exports.PartialJSON = PartialJSON; +class MalformedJSON extends Error { +} +exports.MalformedJSON = MalformedJSON; +/** + * Parse incomplete JSON + * @param {string} jsonString Partial JSON to be parsed + * @param {number} allowPartial Specify what types are allowed to be partial, see {@link Allow} for details + * @returns The parsed JSON + * @throws {PartialJSON} If the JSON is incomplete (related to the `allow` parameter) + * @throws {MalformedJSON} If the JSON is malformed + */ +function parseJSON(jsonString, allowPartial = Allow.ALL) { + if (typeof jsonString !== 'string') { + throw new TypeError(`expecting str, got ${typeof jsonString}`); + } + if (!jsonString.trim()) { + throw new Error(`${jsonString} is empty`); + } + return _parseJSON(jsonString.trim(), allowPartial); +} +const _parseJSON = (jsonString, allow) => { + const length = jsonString.length; + let index = 0; + const markPartialJSON = (msg) => { + throw new PartialJSON(`${msg} at position ${index}`); + }; + const throwMalformedError = (msg) => { + throw new MalformedJSON(`${msg} at position ${index}`); + }; + const parseAny = () => { + skipBlank(); + if (index >= length) + markPartialJSON('Unexpected end of input'); + if (jsonString[index] === '"') + return parseStr(); + if (jsonString[index] === '{') + return parseObj(); + if (jsonString[index] === '[') + return parseArr(); + if (jsonString.substring(index, index + 4) === 'null' || + (Allow.NULL & allow && length - index < 4 && 'null'.startsWith(jsonString.substring(index)))) { + index += 4; + return null; + } + if (jsonString.substring(index, index + 4) === 'true' || + (Allow.BOOL & allow && length - index < 4 && 'true'.startsWith(jsonString.substring(index)))) { + index += 4; + return true; + } + if (jsonString.substring(index, index + 5) === 'false' || + (Allow.BOOL & allow && length - index < 5 && 'false'.startsWith(jsonString.substring(index)))) { + index += 5; + return false; + } + if (jsonString.substring(index, index + 8) === 'Infinity' || + (Allow.INFINITY & allow && length - index < 8 && 'Infinity'.startsWith(jsonString.substring(index)))) { + index += 8; + return Infinity; + } + if (jsonString.substring(index, index + 9) === '-Infinity' || + (Allow.MINUS_INFINITY & allow && + 1 < length - index && + length - index < 9 && + '-Infinity'.startsWith(jsonString.substring(index)))) { + index += 9; + return -Infinity; + } + if (jsonString.substring(index, index + 3) === 'NaN' || + (Allow.NAN & allow && length - index < 3 && 'NaN'.startsWith(jsonString.substring(index)))) { + index += 3; + return NaN; + } + return parseNum(); + }; + const parseStr = () => { + const start = index; + let escape = false; + index++; // skip initial quote + while (index < length && (jsonString[index] !== '"' || (escape && jsonString[index - 1] === '\\'))) { + escape = jsonString[index] === '\\' ? !escape : false; + index++; + } + if (jsonString.charAt(index) == '"') { + try { + return JSON.parse(jsonString.substring(start, ++index - Number(escape))); + } + catch (e) { + throwMalformedError(String(e)); + } + } + else if (Allow.STR & allow) { + try { + return JSON.parse(jsonString.substring(start, index - Number(escape)) + '"'); + } + catch (e) { + // SyntaxError: Invalid escape sequence + return JSON.parse(jsonString.substring(start, jsonString.lastIndexOf('\\')) + '"'); + } + } + markPartialJSON('Unterminated string literal'); + }; + const parseObj = () => { + index++; // skip initial brace + skipBlank(); + const obj = {}; + try { + while (jsonString[index] !== '}') { + skipBlank(); + if (index >= length && Allow.OBJ & allow) + return obj; + const key = parseStr(); + skipBlank(); + index++; // skip colon + try { + const value = parseAny(); + Object.defineProperty(obj, key, { value, writable: true, enumerable: true, configurable: true }); + } + catch (e) { + if (Allow.OBJ & allow) + return obj; + else + throw e; + } + skipBlank(); + if (jsonString[index] === ',') + index++; // skip comma + } + } + catch (e) { + if (Allow.OBJ & allow) + return obj; + else + markPartialJSON("Expected '}' at end of object"); + } + index++; // skip final brace + return obj; + }; + const parseArr = () => { + index++; // skip initial bracket + const arr = []; + try { + while (jsonString[index] !== ']') { + arr.push(parseAny()); + skipBlank(); + if (jsonString[index] === ',') { + index++; // skip comma + } + } + } + catch (e) { + if (Allow.ARR & allow) { + return arr; + } + markPartialJSON("Expected ']' at end of array"); + } + index++; // skip final bracket + return arr; + }; + const parseNum = () => { + if (index === 0) { + if (jsonString === '-' && Allow.NUM & allow) + markPartialJSON("Not sure what '-' is"); + try { + return JSON.parse(jsonString); + } + catch (e) { + if (Allow.NUM & allow) { + try { + if ('.' === jsonString[jsonString.length - 1]) + return JSON.parse(jsonString.substring(0, jsonString.lastIndexOf('.'))); + return JSON.parse(jsonString.substring(0, jsonString.lastIndexOf('e'))); + } + catch (e) { } + } + throwMalformedError(String(e)); + } + } + const start = index; + if (jsonString[index] === '-') + index++; + while (jsonString[index] && !',]}'.includes(jsonString[index])) + index++; + if (index == length && !(Allow.NUM & allow)) + markPartialJSON('Unterminated number literal'); + try { + return JSON.parse(jsonString.substring(start, index)); + } + catch (e) { + if (jsonString.substring(start, index) === '-' && Allow.NUM & allow) + markPartialJSON("Not sure what '-' is"); + try { + return JSON.parse(jsonString.substring(start, jsonString.lastIndexOf('e'))); + } + catch (e) { + throwMalformedError(String(e)); + } + } + }; + const skipBlank = () => { + while (index < length && ' \n\r\t'.includes(jsonString[index])) { + index++; + } + }; + return parseAny(); +}; +// using this function with malformed JSON is undefined behavior +const partialParse = (input) => parseJSON(input, Allow.ALL ^ Allow.NUM); +exports.partialParse = partialParse; +//# sourceMappingURL=parser.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/partial-json-parser/parser.js.map b/node_modules/openai/_vendor/partial-json-parser/parser.js.map new file mode 100644 index 0000000..019fb93 --- /dev/null +++ b/node_modules/openai/_vendor/partial-json-parser/parser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parser.js","sourceRoot":"","sources":["../../src/_vendor/partial-json-parser/parser.ts"],"names":[],"mappings":";;;AAAA,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,IAAI,GAAG,WAAW,CAAC;AACzB,MAAM,IAAI,GAAG,WAAW,CAAC;AACzB,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,QAAQ,GAAG,WAAW,CAAC;AAC7B,MAAM,cAAc,GAAG,WAAW,CAAC;AAEnC,MAAM,GAAG,GAAG,QAAQ,GAAG,cAAc,CAAC;AACtC,MAAM,OAAO,GAAG,IAAI,GAAG,IAAI,GAAG,GAAG,GAAG,GAAG,CAAC;AACxC,MAAM,IAAI,GAAG,GAAG,GAAG,GAAG,GAAG,OAAO,CAAC;AACjC,MAAM,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC;AAC7B,MAAM,GAAG,GAAG,IAAI,GAAG,UAAU,CAAC;AAE9B,MAAM,KAAK,GAAG;IACZ,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,IAAI;IACJ,IAAI;IACJ,GAAG;IACH,QAAQ;IACR,cAAc;IACd,GAAG;IACH,OAAO;IACP,IAAI;IACJ,UAAU;IACV,GAAG;CACJ,CAAC;AAEF,6DAA6D;AAC7D,MAAM,WAAY,SAAQ,KAAK;CAAG;AAoNX,kCAAW;AAlNlC,MAAM,aAAc,SAAQ,KAAK;CAAG;AAkNA,sCAAa;AAhNjD;;;;;;;GAOG;AACH,SAAS,SAAS,CAAC,UAAkB,EAAE,eAAuB,KAAK,CAAC,GAAG;IACrE,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;QAClC,MAAM,IAAI,SAAS,CAAC,sBAAsB,OAAO,UAAU,EAAE,CAAC,CAAC;KAChE;IACD,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,WAAW,CAAC,CAAC;KAC3C;IACD,OAAO,UAAU,CAAC,UAAU,CAAC,IAAI,EAAE,EAAE,YAAY,CAAC,CAAC;AACrD,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,UAAkB,EAAE,KAAa,EAAE,EAAE;IACvD,MAAM,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC;IACjC,IAAI,KAAK,GAAG,CAAC,CAAC;IAEd,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,EAAE;QACtC,MAAM,IAAI,WAAW,CAAC,GAAG,GAAG,gBAAgB,KAAK,EAAE,CAAC,CAAC;IACvD,CAAC,CAAC;IAEF,MAAM,mBAAmB,GAAG,CAAC,GAAW,EAAE,EAAE;QAC1C,MAAM,IAAI,aAAa,CAAC,GAAG,GAAG,gBAAgB,KAAK,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAc,GAAG,EAAE;QAC/B,SAAS,EAAE,CAAC;QACZ,IAAI,KAAK,IAAI,MAAM;YAAE,eAAe,CAAC,yBAAyB,CAAC,CAAC;QAChE,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;YAAE,OAAO,QAAQ,EAAE,CAAC;QACjD,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;YAAE,OAAO,QAAQ,EAAE,CAAC;QACjD,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;YAAE,OAAO,QAAQ,EAAE,CAAC;QACjD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,MAAM;YACjD,CAAC,KAAK,CAAC,IAAI,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAC5F;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,IAAI,CAAC;SACb;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,MAAM;YACjD,CAAC,KAAK,CAAC,IAAI,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAC5F;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,IAAI,CAAC;SACb;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,OAAO;YAClD,CAAC,KAAK,CAAC,IAAI,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,OAAO,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAC7F;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,KAAK,CAAC;SACd;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,UAAU;YACrD,CAAC,KAAK,CAAC,QAAQ,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,UAAU,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACpG;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,QAAQ,CAAC;SACjB;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,WAAW;YACtD,CAAC,KAAK,CAAC,cAAc,GAAG,KAAK;gBAC3B,CAAC,GAAG,MAAM,GAAG,KAAK;gBAClB,MAAM,GAAG,KAAK,GAAG,CAAC;gBAClB,WAAW,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACtD;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,CAAC,QAAQ,CAAC;SAClB;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,KAAK;YAChD,CAAC,KAAK,CAAC,GAAG,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAC1F;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,GAAG,CAAC;SACZ;QACD,OAAO,QAAQ,EAAE,CAAC;IACpB,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAiB,GAAG,EAAE;QAClC,MAAM,KAAK,GAAG,KAAK,CAAC;QACpB,IAAI,MAAM,GAAG,KAAK,CAAC;QACnB,KAAK,EAAE,CAAC,CAAC,qBAAqB;QAC9B,OAAO,KAAK,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,MAAM,IAAI,UAAU,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,EAAE;YAClG,MAAM,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;YACtD,KAAK,EAAE,CAAC;SACT;QACD,IAAI,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,GAAG,EAAE;YACnC,IAAI;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,EAAE,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;aAC1E;YAAC,OAAO,CAAC,EAAE;gBACV,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;aAChC;SACF;aAAM,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK,EAAE;YAC5B,IAAI;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC;aAC9E;YAAC,OAAO,CAAC,EAAE;gBACV,uCAAuC;gBACvC,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,UAAU,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC;aACpF;SACF;QACD,eAAe,CAAC,6BAA6B,CAAC,CAAC;IACjD,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAG,GAAG,EAAE;QACpB,KAAK,EAAE,CAAC,CAAC,qBAAqB;QAC9B,SAAS,EAAE,CAAC;QACZ,MAAM,GAAG,GAAwB,EAAE,CAAC;QACpC,IAAI;YACF,OAAO,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG,EAAE;gBAChC,SAAS,EAAE,CAAC;gBACZ,IAAI,KAAK,IAAI,MAAM,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;oBAAE,OAAO,GAAG,CAAC;gBACrD,MAAM,GAAG,GAAG,QAAQ,EAAE,CAAC;gBACvB,SAAS,EAAE,CAAC;gBACZ,KAAK,EAAE,CAAC,CAAC,aAAa;gBACtB,IAAI;oBACF,MAAM,KAAK,GAAG,QAAQ,EAAE,CAAC;oBACzB,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,KAAK,EAAE,QAAQ,EAAE,IAAI,EAAE,UAAU,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC;iBAClG;gBAAC,OAAO,CAAC,EAAE;oBACV,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;wBAAE,OAAO,GAAG,CAAC;;wBAC7B,MAAM,CAAC,CAAC;iBACd;gBACD,SAAS,EAAE,CAAC;gBACZ,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;oBAAE,KAAK,EAAE,CAAC,CAAC,aAAa;aACtD;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;gBAAE,OAAO,GAAG,CAAC;;gBAC7B,eAAe,CAAC,+BAA+B,CAAC,CAAC;SACvD;QACD,KAAK,EAAE,CAAC,CAAC,mBAAmB;QAC5B,OAAO,GAAG,CAAC;IACb,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAG,GAAG,EAAE;QACpB,KAAK,EAAE,CAAC,CAAC,uBAAuB;QAChC,MAAM,GAAG,GAAG,EAAE,CAAC;QACf,IAAI;YACF,OAAO,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG,EAAE;gBAChC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;gBACrB,SAAS,EAAE,CAAC;gBACZ,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG,EAAE;oBAC7B,KAAK,EAAE,CAAC,CAAC,aAAa;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK,EAAE;gBACrB,OAAO,GAAG,CAAC;aACZ;YACD,eAAe,CAAC,8BAA8B,CAAC,CAAC;SACjD;QACD,KAAK,EAAE,CAAC,CAAC,qBAAqB;QAC9B,OAAO,GAAG,CAAC;IACb,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAG,GAAG,EAAE;QACpB,IAAI,KAAK,KAAK,CAAC,EAAE;YACf,IAAI,UAAU,KAAK,GAAG,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;gBAAE,eAAe,CAAC,sBAAsB,CAAC,CAAC;YACrF,IAAI;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;aAC/B;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK,EAAE;oBACrB,IAAI;wBACF,IAAI,GAAG,KAAK,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;4BAC3C,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;wBAC1E,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;qBACzE;oBAAC,OAAO,CAAC,EAAE,GAAE;iBACf;gBACD,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;aAChC;SACF;QAED,MAAM,KAAK,GAAG,KAAK,CAAC;QAEpB,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;YAAE,KAAK,EAAE,CAAC;QACvC,OAAO,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAE,CAAC;YAAE,KAAK,EAAE,CAAC;QAEzE,IAAI,KAAK,IAAI,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,GAAG,KAAK,CAAC;YAAE,eAAe,CAAC,6BAA6B,CAAC,CAAC;QAE5F,IAAI;YACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;SACvD;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC,KAAK,GAAG,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;gBACjE,eAAe,CAAC,sBAAsB,CAAC,CAAC;YAC1C,IAAI;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;aAC7E;YAAC,OAAO,CAAC,EAAE;gBACV,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;aAChC;SACF;IACH,CAAC,CAAC;IAEF,MAAM,SAAS,GAAG,GAAG,EAAE;QACrB,OAAO,KAAK,GAAG,MAAM,IAAI,SAAS,CAAC,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAE,CAAC,EAAE;YAC/D,KAAK,EAAE,CAAC;SACT;IACH,CAAC,CAAC;IAEF,OAAO,QAAQ,EAAE,CAAC;AACpB,CAAC,CAAC;AAEF,gEAAgE;AAChE,MAAM,YAAY,GAAG,CAAC,KAAa,EAAE,EAAE,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC,GAAG,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC;AAEvE,oCAAY"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/partial-json-parser/parser.mjs b/node_modules/openai/_vendor/partial-json-parser/parser.mjs new file mode 100644 index 0000000..f8954e1 --- /dev/null +++ b/node_modules/openai/_vendor/partial-json-parser/parser.mjs @@ -0,0 +1,241 @@ +const STR = 0b000000001; +const NUM = 0b000000010; +const ARR = 0b000000100; +const OBJ = 0b000001000; +const NULL = 0b000010000; +const BOOL = 0b000100000; +const NAN = 0b001000000; +const INFINITY = 0b010000000; +const MINUS_INFINITY = 0b100000000; +const INF = INFINITY | MINUS_INFINITY; +const SPECIAL = NULL | BOOL | INF | NAN; +const ATOM = STR | NUM | SPECIAL; +const COLLECTION = ARR | OBJ; +const ALL = ATOM | COLLECTION; +const Allow = { + STR, + NUM, + ARR, + OBJ, + NULL, + BOOL, + NAN, + INFINITY, + MINUS_INFINITY, + INF, + SPECIAL, + ATOM, + COLLECTION, + ALL, +}; +// The JSON string segment was unable to be parsed completely +class PartialJSON extends Error { +} +class MalformedJSON extends Error { +} +/** + * Parse incomplete JSON + * @param {string} jsonString Partial JSON to be parsed + * @param {number} allowPartial Specify what types are allowed to be partial, see {@link Allow} for details + * @returns The parsed JSON + * @throws {PartialJSON} If the JSON is incomplete (related to the `allow` parameter) + * @throws {MalformedJSON} If the JSON is malformed + */ +function parseJSON(jsonString, allowPartial = Allow.ALL) { + if (typeof jsonString !== 'string') { + throw new TypeError(`expecting str, got ${typeof jsonString}`); + } + if (!jsonString.trim()) { + throw new Error(`${jsonString} is empty`); + } + return _parseJSON(jsonString.trim(), allowPartial); +} +const _parseJSON = (jsonString, allow) => { + const length = jsonString.length; + let index = 0; + const markPartialJSON = (msg) => { + throw new PartialJSON(`${msg} at position ${index}`); + }; + const throwMalformedError = (msg) => { + throw new MalformedJSON(`${msg} at position ${index}`); + }; + const parseAny = () => { + skipBlank(); + if (index >= length) + markPartialJSON('Unexpected end of input'); + if (jsonString[index] === '"') + return parseStr(); + if (jsonString[index] === '{') + return parseObj(); + if (jsonString[index] === '[') + return parseArr(); + if (jsonString.substring(index, index + 4) === 'null' || + (Allow.NULL & allow && length - index < 4 && 'null'.startsWith(jsonString.substring(index)))) { + index += 4; + return null; + } + if (jsonString.substring(index, index + 4) === 'true' || + (Allow.BOOL & allow && length - index < 4 && 'true'.startsWith(jsonString.substring(index)))) { + index += 4; + return true; + } + if (jsonString.substring(index, index + 5) === 'false' || + (Allow.BOOL & allow && length - index < 5 && 'false'.startsWith(jsonString.substring(index)))) { + index += 5; + return false; + } + if (jsonString.substring(index, index + 8) === 'Infinity' || + (Allow.INFINITY & allow && length - index < 8 && 'Infinity'.startsWith(jsonString.substring(index)))) { + index += 8; + return Infinity; + } + if (jsonString.substring(index, index + 9) === '-Infinity' || + (Allow.MINUS_INFINITY & allow && + 1 < length - index && + length - index < 9 && + '-Infinity'.startsWith(jsonString.substring(index)))) { + index += 9; + return -Infinity; + } + if (jsonString.substring(index, index + 3) === 'NaN' || + (Allow.NAN & allow && length - index < 3 && 'NaN'.startsWith(jsonString.substring(index)))) { + index += 3; + return NaN; + } + return parseNum(); + }; + const parseStr = () => { + const start = index; + let escape = false; + index++; // skip initial quote + while (index < length && (jsonString[index] !== '"' || (escape && jsonString[index - 1] === '\\'))) { + escape = jsonString[index] === '\\' ? !escape : false; + index++; + } + if (jsonString.charAt(index) == '"') { + try { + return JSON.parse(jsonString.substring(start, ++index - Number(escape))); + } + catch (e) { + throwMalformedError(String(e)); + } + } + else if (Allow.STR & allow) { + try { + return JSON.parse(jsonString.substring(start, index - Number(escape)) + '"'); + } + catch (e) { + // SyntaxError: Invalid escape sequence + return JSON.parse(jsonString.substring(start, jsonString.lastIndexOf('\\')) + '"'); + } + } + markPartialJSON('Unterminated string literal'); + }; + const parseObj = () => { + index++; // skip initial brace + skipBlank(); + const obj = {}; + try { + while (jsonString[index] !== '}') { + skipBlank(); + if (index >= length && Allow.OBJ & allow) + return obj; + const key = parseStr(); + skipBlank(); + index++; // skip colon + try { + const value = parseAny(); + Object.defineProperty(obj, key, { value, writable: true, enumerable: true, configurable: true }); + } + catch (e) { + if (Allow.OBJ & allow) + return obj; + else + throw e; + } + skipBlank(); + if (jsonString[index] === ',') + index++; // skip comma + } + } + catch (e) { + if (Allow.OBJ & allow) + return obj; + else + markPartialJSON("Expected '}' at end of object"); + } + index++; // skip final brace + return obj; + }; + const parseArr = () => { + index++; // skip initial bracket + const arr = []; + try { + while (jsonString[index] !== ']') { + arr.push(parseAny()); + skipBlank(); + if (jsonString[index] === ',') { + index++; // skip comma + } + } + } + catch (e) { + if (Allow.ARR & allow) { + return arr; + } + markPartialJSON("Expected ']' at end of array"); + } + index++; // skip final bracket + return arr; + }; + const parseNum = () => { + if (index === 0) { + if (jsonString === '-' && Allow.NUM & allow) + markPartialJSON("Not sure what '-' is"); + try { + return JSON.parse(jsonString); + } + catch (e) { + if (Allow.NUM & allow) { + try { + if ('.' === jsonString[jsonString.length - 1]) + return JSON.parse(jsonString.substring(0, jsonString.lastIndexOf('.'))); + return JSON.parse(jsonString.substring(0, jsonString.lastIndexOf('e'))); + } + catch (e) { } + } + throwMalformedError(String(e)); + } + } + const start = index; + if (jsonString[index] === '-') + index++; + while (jsonString[index] && !',]}'.includes(jsonString[index])) + index++; + if (index == length && !(Allow.NUM & allow)) + markPartialJSON('Unterminated number literal'); + try { + return JSON.parse(jsonString.substring(start, index)); + } + catch (e) { + if (jsonString.substring(start, index) === '-' && Allow.NUM & allow) + markPartialJSON("Not sure what '-' is"); + try { + return JSON.parse(jsonString.substring(start, jsonString.lastIndexOf('e'))); + } + catch (e) { + throwMalformedError(String(e)); + } + } + }; + const skipBlank = () => { + while (index < length && ' \n\r\t'.includes(jsonString[index])) { + index++; + } + }; + return parseAny(); +}; +// using this function with malformed JSON is undefined behavior +const partialParse = (input) => parseJSON(input, Allow.ALL ^ Allow.NUM); +export { partialParse, PartialJSON, MalformedJSON }; +//# sourceMappingURL=parser.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/partial-json-parser/parser.mjs.map b/node_modules/openai/_vendor/partial-json-parser/parser.mjs.map new file mode 100644 index 0000000..e0f8cfe --- /dev/null +++ b/node_modules/openai/_vendor/partial-json-parser/parser.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"parser.mjs","sourceRoot":"","sources":["../../src/_vendor/partial-json-parser/parser.ts"],"names":[],"mappings":"AAAA,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,IAAI,GAAG,WAAW,CAAC;AACzB,MAAM,IAAI,GAAG,WAAW,CAAC;AACzB,MAAM,GAAG,GAAG,WAAW,CAAC;AACxB,MAAM,QAAQ,GAAG,WAAW,CAAC;AAC7B,MAAM,cAAc,GAAG,WAAW,CAAC;AAEnC,MAAM,GAAG,GAAG,QAAQ,GAAG,cAAc,CAAC;AACtC,MAAM,OAAO,GAAG,IAAI,GAAG,IAAI,GAAG,GAAG,GAAG,GAAG,CAAC;AACxC,MAAM,IAAI,GAAG,GAAG,GAAG,GAAG,GAAG,OAAO,CAAC;AACjC,MAAM,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC;AAC7B,MAAM,GAAG,GAAG,IAAI,GAAG,UAAU,CAAC;AAE9B,MAAM,KAAK,GAAG;IACZ,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,IAAI;IACJ,IAAI;IACJ,GAAG;IACH,QAAQ;IACR,cAAc;IACd,GAAG;IACH,OAAO;IACP,IAAI;IACJ,UAAU;IACV,GAAG;CACJ,CAAC;AAEF,6DAA6D;AAC7D,MAAM,WAAY,SAAQ,KAAK;CAAG;AAElC,MAAM,aAAc,SAAQ,KAAK;CAAG;AAEpC;;;;;;;GAOG;AACH,SAAS,SAAS,CAAC,UAAkB,EAAE,eAAuB,KAAK,CAAC,GAAG;IACrE,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;QAClC,MAAM,IAAI,SAAS,CAAC,sBAAsB,OAAO,UAAU,EAAE,CAAC,CAAC;KAChE;IACD,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,WAAW,CAAC,CAAC;KAC3C;IACD,OAAO,UAAU,CAAC,UAAU,CAAC,IAAI,EAAE,EAAE,YAAY,CAAC,CAAC;AACrD,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,UAAkB,EAAE,KAAa,EAAE,EAAE;IACvD,MAAM,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC;IACjC,IAAI,KAAK,GAAG,CAAC,CAAC;IAEd,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,EAAE;QACtC,MAAM,IAAI,WAAW,CAAC,GAAG,GAAG,gBAAgB,KAAK,EAAE,CAAC,CAAC;IACvD,CAAC,CAAC;IAEF,MAAM,mBAAmB,GAAG,CAAC,GAAW,EAAE,EAAE;QAC1C,MAAM,IAAI,aAAa,CAAC,GAAG,GAAG,gBAAgB,KAAK,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAc,GAAG,EAAE;QAC/B,SAAS,EAAE,CAAC;QACZ,IAAI,KAAK,IAAI,MAAM;YAAE,eAAe,CAAC,yBAAyB,CAAC,CAAC;QAChE,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;YAAE,OAAO,QAAQ,EAAE,CAAC;QACjD,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;YAAE,OAAO,QAAQ,EAAE,CAAC;QACjD,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;YAAE,OAAO,QAAQ,EAAE,CAAC;QACjD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,MAAM;YACjD,CAAC,KAAK,CAAC,IAAI,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAC5F;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,IAAI,CAAC;SACb;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,MAAM;YACjD,CAAC,KAAK,CAAC,IAAI,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAC5F;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,IAAI,CAAC;SACb;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,OAAO;YAClD,CAAC,KAAK,CAAC,IAAI,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,OAAO,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAC7F;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,KAAK,CAAC;SACd;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,UAAU;YACrD,CAAC,KAAK,CAAC,QAAQ,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,UAAU,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACpG;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,QAAQ,CAAC;SACjB;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,WAAW;YACtD,CAAC,KAAK,CAAC,cAAc,GAAG,KAAK;gBAC3B,CAAC,GAAG,MAAM,GAAG,KAAK;gBAClB,MAAM,GAAG,KAAK,GAAG,CAAC;gBAClB,WAAW,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACtD;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,CAAC,QAAQ,CAAC;SAClB;QACD,IACE,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,KAAK,KAAK;YAChD,CAAC,KAAK,CAAC,GAAG,GAAG,KAAK,IAAI,MAAM,GAAG,KAAK,GAAG,CAAC,IAAI,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAC1F;YACA,KAAK,IAAI,CAAC,CAAC;YACX,OAAO,GAAG,CAAC;SACZ;QACD,OAAO,QAAQ,EAAE,CAAC;IACpB,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAiB,GAAG,EAAE;QAClC,MAAM,KAAK,GAAG,KAAK,CAAC;QACpB,IAAI,MAAM,GAAG,KAAK,CAAC;QACnB,KAAK,EAAE,CAAC,CAAC,qBAAqB;QAC9B,OAAO,KAAK,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,MAAM,IAAI,UAAU,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,EAAE;YAClG,MAAM,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;YACtD,KAAK,EAAE,CAAC;SACT;QACD,IAAI,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,GAAG,EAAE;YACnC,IAAI;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,EAAE,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;aAC1E;YAAC,OAAO,CAAC,EAAE;gBACV,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;aAChC;SACF;aAAM,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK,EAAE;YAC5B,IAAI;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC;aAC9E;YAAC,OAAO,CAAC,EAAE;gBACV,uCAAuC;gBACvC,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,UAAU,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC;aACpF;SACF;QACD,eAAe,CAAC,6BAA6B,CAAC,CAAC;IACjD,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAG,GAAG,EAAE;QACpB,KAAK,EAAE,CAAC,CAAC,qBAAqB;QAC9B,SAAS,EAAE,CAAC;QACZ,MAAM,GAAG,GAAwB,EAAE,CAAC;QACpC,IAAI;YACF,OAAO,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG,EAAE;gBAChC,SAAS,EAAE,CAAC;gBACZ,IAAI,KAAK,IAAI,MAAM,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;oBAAE,OAAO,GAAG,CAAC;gBACrD,MAAM,GAAG,GAAG,QAAQ,EAAE,CAAC;gBACvB,SAAS,EAAE,CAAC;gBACZ,KAAK,EAAE,CAAC,CAAC,aAAa;gBACtB,IAAI;oBACF,MAAM,KAAK,GAAG,QAAQ,EAAE,CAAC;oBACzB,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,KAAK,EAAE,QAAQ,EAAE,IAAI,EAAE,UAAU,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC;iBAClG;gBAAC,OAAO,CAAC,EAAE;oBACV,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;wBAAE,OAAO,GAAG,CAAC;;wBAC7B,MAAM,CAAC,CAAC;iBACd;gBACD,SAAS,EAAE,CAAC;gBACZ,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;oBAAE,KAAK,EAAE,CAAC,CAAC,aAAa;aACtD;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;gBAAE,OAAO,GAAG,CAAC;;gBAC7B,eAAe,CAAC,+BAA+B,CAAC,CAAC;SACvD;QACD,KAAK,EAAE,CAAC,CAAC,mBAAmB;QAC5B,OAAO,GAAG,CAAC;IACb,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAG,GAAG,EAAE;QACpB,KAAK,EAAE,CAAC,CAAC,uBAAuB;QAChC,MAAM,GAAG,GAAG,EAAE,CAAC;QACf,IAAI;YACF,OAAO,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG,EAAE;gBAChC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;gBACrB,SAAS,EAAE,CAAC;gBACZ,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG,EAAE;oBAC7B,KAAK,EAAE,CAAC,CAAC,aAAa;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK,EAAE;gBACrB,OAAO,GAAG,CAAC;aACZ;YACD,eAAe,CAAC,8BAA8B,CAAC,CAAC;SACjD;QACD,KAAK,EAAE,CAAC,CAAC,qBAAqB;QAC9B,OAAO,GAAG,CAAC;IACb,CAAC,CAAC;IAEF,MAAM,QAAQ,GAAG,GAAG,EAAE;QACpB,IAAI,KAAK,KAAK,CAAC,EAAE;YACf,IAAI,UAAU,KAAK,GAAG,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;gBAAE,eAAe,CAAC,sBAAsB,CAAC,CAAC;YACrF,IAAI;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;aAC/B;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK,EAAE;oBACrB,IAAI;wBACF,IAAI,GAAG,KAAK,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;4BAC3C,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;wBAC1E,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;qBACzE;oBAAC,OAAO,CAAC,EAAE,GAAE;iBACf;gBACD,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;aAChC;SACF;QAED,MAAM,KAAK,GAAG,KAAK,CAAC;QAEpB,IAAI,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG;YAAE,KAAK,EAAE,CAAC;QACvC,OAAO,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAE,CAAC;YAAE,KAAK,EAAE,CAAC;QAEzE,IAAI,KAAK,IAAI,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,GAAG,KAAK,CAAC;YAAE,eAAe,CAAC,6BAA6B,CAAC,CAAC;QAE5F,IAAI;YACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;SACvD;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC,KAAK,GAAG,IAAI,KAAK,CAAC,GAAG,GAAG,KAAK;gBACjE,eAAe,CAAC,sBAAsB,CAAC,CAAC;YAC1C,IAAI;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;aAC7E;YAAC,OAAO,CAAC,EAAE;gBACV,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;aAChC;SACF;IACH,CAAC,CAAC;IAEF,MAAM,SAAS,GAAG,GAAG,EAAE;QACrB,OAAO,KAAK,GAAG,MAAM,IAAI,SAAS,CAAC,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAE,CAAC,EAAE;YAC/D,KAAK,EAAE,CAAC;SACT;IACH,CAAC,CAAC;IAEF,OAAO,QAAQ,EAAE,CAAC;AACpB,CAAC,CAAC;AAEF,gEAAgE;AAChE,MAAM,YAAY,GAAG,CAAC,KAAa,EAAE,EAAE,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC,GAAG,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC;AAEhF,OAAO,EAAE,YAAY,EAAE,WAAW,EAAE,aAAa,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Options.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/Options.d.ts new file mode 100644 index 0000000..787f00c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Options.d.ts @@ -0,0 +1,32 @@ +import { ZodSchema, ZodTypeDef } from 'zod'; +import { Refs, Seen } from "./Refs.js"; +import { JsonSchema7Type } from "./parseDef.js"; +export type Targets = 'jsonSchema7' | 'jsonSchema2019-09' | 'openApi3'; +export type DateStrategy = 'format:date-time' | 'format:date' | 'string' | 'integer'; +export declare const ignoreOverride: unique symbol; +export type Options = { + name: string | undefined; + $refStrategy: 'root' | 'relative' | 'none' | 'seen' | 'extract-to-root'; + basePath: string[]; + effectStrategy: 'input' | 'any'; + pipeStrategy: 'input' | 'output' | 'all'; + dateStrategy: DateStrategy | DateStrategy[]; + mapStrategy: 'entries' | 'record'; + removeAdditionalStrategy: 'passthrough' | 'strict'; + nullableStrategy: 'from-target' | 'property'; + target: Target; + strictUnions: boolean; + definitionPath: string; + definitions: Record; + errorMessages: boolean; + markdownDescription: boolean; + patternStrategy: 'escape' | 'preserve'; + applyRegexFlags: boolean; + emailStrategy: 'format:email' | 'format:idn-email' | 'pattern:zod'; + base64Strategy: 'format:binary' | 'contentEncoding:base64' | 'pattern:zod'; + nameStrategy: 'ref' | 'duplicate-ref' | 'title'; + override?: (def: ZodTypeDef, refs: Refs, seen: Seen | undefined, forceResolution?: boolean) => JsonSchema7Type | undefined | typeof ignoreOverride; + openaiStrictMode?: boolean; +}; +export declare const getDefaultOptions: (options: string | Partial> | undefined) => Options; +//# sourceMappingURL=Options.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Options.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/Options.d.ts.map new file mode 100644 index 0000000..862af2e --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Options.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"Options.d.ts","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/Options.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,KAAK,CAAC;AAC5C,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,QAAQ,CAAC;AACpC,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AAE7C,MAAM,MAAM,OAAO,GAAG,aAAa,GAAG,mBAAmB,GAAG,UAAU,CAAC;AAEvE,MAAM,MAAM,YAAY,GAAG,kBAAkB,GAAG,aAAa,GAAG,QAAQ,GAAG,SAAS,CAAC;AAErF,eAAO,MAAM,cAAc,eAA8D,CAAC;AAE1F,MAAM,MAAM,OAAO,CAAC,MAAM,SAAS,OAAO,GAAG,aAAa,IAAI;IAC5D,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC;IACzB,YAAY,EAAE,MAAM,GAAG,UAAU,GAAG,MAAM,GAAG,MAAM,GAAG,iBAAiB,CAAC;IACxE,QAAQ,EAAE,MAAM,EAAE,CAAC;IACnB,cAAc,EAAE,OAAO,GAAG,KAAK,CAAC;IAChC,YAAY,EAAE,OAAO,GAAG,QAAQ,GAAG,KAAK,CAAC;IACzC,YAAY,EAAE,YAAY,GAAG,YAAY,EAAE,CAAC;IAC5C,WAAW,EAAE,SAAS,GAAG,QAAQ,CAAC;IAClC,wBAAwB,EAAE,aAAa,GAAG,QAAQ,CAAC;IACnD,gBAAgB,EAAE,aAAa,GAAG,UAAU,CAAC;IAC7C,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,EAAE,OAAO,CAAC;IACtB,cAAc,EAAE,MAAM,CAAC;IACvB,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,SAAS,GAAG,UAAU,CAAC,CAAC;IACpD,aAAa,EAAE,OAAO,CAAC;IACvB,mBAAmB,EAAE,OAAO,CAAC;IAC7B,eAAe,EAAE,QAAQ,GAAG,UAAU,CAAC;IACvC,eAAe,EAAE,OAAO,CAAC;IACzB,aAAa,EAAE,cAAc,GAAG,kBAAkB,GAAG,aAAa,CAAC;IACnE,cAAc,EAAE,eAAe,GAAG,wBAAwB,GAAG,aAAa,CAAC;IAC3E,YAAY,EAAE,KAAK,GAAG,eAAe,GAAG,OAAO,CAAC;IAChD,QAAQ,CAAC,EAAE,CACT,GAAG,EAAE,UAAU,EACf,IAAI,EAAE,IAAI,EACV,IAAI,EAAE,IAAI,GAAG,SAAS,EACtB,eAAe,CAAC,EAAE,OAAO,KACtB,eAAe,GAAG,SAAS,GAAG,OAAO,cAAc,CAAC;IACzD,gBAAgB,CAAC,EAAE,OAAO,CAAC;CAC5B,CAAC;AAuBF,eAAO,MAAM,iBAAiB,qGAkB7B,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Options.js b/node_modules/openai/_vendor/zod-to-json-schema/Options.js new file mode 100644 index 0000000..ce8822c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Options.js @@ -0,0 +1,42 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getDefaultOptions = exports.ignoreOverride = void 0; +exports.ignoreOverride = Symbol('Let zodToJsonSchema decide on which parser to use'); +const defaultOptions = { + name: undefined, + $refStrategy: 'root', + effectStrategy: 'input', + pipeStrategy: 'all', + dateStrategy: 'format:date-time', + mapStrategy: 'entries', + nullableStrategy: 'from-target', + removeAdditionalStrategy: 'passthrough', + definitionPath: 'definitions', + target: 'jsonSchema7', + strictUnions: false, + errorMessages: false, + markdownDescription: false, + patternStrategy: 'escape', + applyRegexFlags: false, + emailStrategy: 'format:email', + base64Strategy: 'contentEncoding:base64', + nameStrategy: 'ref', +}; +const getDefaultOptions = (options) => { + // We need to add `definitions` here as we may mutate it + return (typeof options === 'string' ? + { + ...defaultOptions, + basePath: ['#'], + definitions: {}, + name: options, + } + : { + ...defaultOptions, + basePath: ['#'], + definitions: {}, + ...options, + }); +}; +exports.getDefaultOptions = getDefaultOptions; +//# sourceMappingURL=Options.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Options.js.map b/node_modules/openai/_vendor/zod-to-json-schema/Options.js.map new file mode 100644 index 0000000..b35ca38 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Options.js","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/Options.ts"],"names":[],"mappings":";;;AAQa,QAAA,cAAc,GAAG,MAAM,CAAC,mDAAmD,CAAC,CAAC;AAgC1F,MAAM,cAAc,GAA8C;IAChE,IAAI,EAAE,SAAS;IACf,YAAY,EAAE,MAAM;IACpB,cAAc,EAAE,OAAO;IACvB,YAAY,EAAE,KAAK;IACnB,YAAY,EAAE,kBAAkB;IAChC,WAAW,EAAE,SAAS;IACtB,gBAAgB,EAAE,aAAa;IAC/B,wBAAwB,EAAE,aAAa;IACvC,cAAc,EAAE,aAAa;IAC7B,MAAM,EAAE,aAAa;IACrB,YAAY,EAAE,KAAK;IACnB,aAAa,EAAE,KAAK;IACpB,mBAAmB,EAAE,KAAK;IAC1B,eAAe,EAAE,QAAQ;IACzB,eAAe,EAAE,KAAK;IACtB,aAAa,EAAE,cAAc;IAC7B,cAAc,EAAE,wBAAwB;IACxC,YAAY,EAAE,KAAK;CACpB,CAAC;AAEK,MAAM,iBAAiB,GAAG,CAC/B,OAAsD,EACtD,EAAE;IACF,wDAAwD;IACxD,OAAO,CACL,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC;QAC3B;YACE,GAAG,cAAc;YACjB,QAAQ,EAAE,CAAC,GAAG,CAAC;YACf,WAAW,EAAE,EAAE;YACf,IAAI,EAAE,OAAO;SACd;QACH,CAAC,CAAC;YACE,GAAG,cAAc;YACjB,QAAQ,EAAE,CAAC,GAAG,CAAC;YACf,WAAW,EAAE,EAAE;YACf,GAAG,OAAO;SACX,CAAoB,CAAC;AAC5B,CAAC,CAAC;AAlBW,QAAA,iBAAiB,qBAkB5B"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Options.mjs b/node_modules/openai/_vendor/zod-to-json-schema/Options.mjs new file mode 100644 index 0000000..8e199f8 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Options.mjs @@ -0,0 +1,38 @@ +export const ignoreOverride = Symbol('Let zodToJsonSchema decide on which parser to use'); +const defaultOptions = { + name: undefined, + $refStrategy: 'root', + effectStrategy: 'input', + pipeStrategy: 'all', + dateStrategy: 'format:date-time', + mapStrategy: 'entries', + nullableStrategy: 'from-target', + removeAdditionalStrategy: 'passthrough', + definitionPath: 'definitions', + target: 'jsonSchema7', + strictUnions: false, + errorMessages: false, + markdownDescription: false, + patternStrategy: 'escape', + applyRegexFlags: false, + emailStrategy: 'format:email', + base64Strategy: 'contentEncoding:base64', + nameStrategy: 'ref', +}; +export const getDefaultOptions = (options) => { + // We need to add `definitions` here as we may mutate it + return (typeof options === 'string' ? + { + ...defaultOptions, + basePath: ['#'], + definitions: {}, + name: options, + } + : { + ...defaultOptions, + basePath: ['#'], + definitions: {}, + ...options, + }); +}; +//# sourceMappingURL=Options.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Options.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/Options.mjs.map new file mode 100644 index 0000000..10a07a6 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Options.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"Options.mjs","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/Options.ts"],"names":[],"mappings":"AAQA,MAAM,CAAC,MAAM,cAAc,GAAG,MAAM,CAAC,mDAAmD,CAAC,CAAC;AAgC1F,MAAM,cAAc,GAA8C;IAChE,IAAI,EAAE,SAAS;IACf,YAAY,EAAE,MAAM;IACpB,cAAc,EAAE,OAAO;IACvB,YAAY,EAAE,KAAK;IACnB,YAAY,EAAE,kBAAkB;IAChC,WAAW,EAAE,SAAS;IACtB,gBAAgB,EAAE,aAAa;IAC/B,wBAAwB,EAAE,aAAa;IACvC,cAAc,EAAE,aAAa;IAC7B,MAAM,EAAE,aAAa;IACrB,YAAY,EAAE,KAAK;IACnB,aAAa,EAAE,KAAK;IACpB,mBAAmB,EAAE,KAAK;IAC1B,eAAe,EAAE,QAAQ;IACzB,eAAe,EAAE,KAAK;IACtB,aAAa,EAAE,cAAc;IAC7B,cAAc,EAAE,wBAAwB;IACxC,YAAY,EAAE,KAAK;CACpB,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAC/B,OAAsD,EACtD,EAAE;IACF,wDAAwD;IACxD,OAAO,CACL,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC;QAC3B;YACE,GAAG,cAAc;YACjB,QAAQ,EAAE,CAAC,GAAG,CAAC;YACf,WAAW,EAAE,EAAE;YACf,IAAI,EAAE,OAAO;SACd;QACH,CAAC,CAAC;YACE,GAAG,cAAc;YACjB,QAAQ,EAAE,CAAC,GAAG,CAAC;YACf,WAAW,EAAE,EAAE;YACf,GAAG,OAAO;SACX,CAAoB,CAAC;AAC5B,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Refs.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/Refs.d.ts new file mode 100644 index 0000000..39db1f7 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Refs.d.ts @@ -0,0 +1,21 @@ +import type { ZodTypeDef } from 'zod'; +import { Options, Targets } from "./Options.js"; +import { JsonSchema7Type } from "./parseDef.js"; +export type Refs = { + seen: Map; + /** + * Set of all the `$ref`s we created, e.g. `Set(['#/$defs/ui'])` + * this notable does not include any `definitions` that were + * explicitly given as an option. + */ + seenRefs: Set; + currentPath: string[]; + propertyPath: string[] | undefined; +} & Options; +export type Seen = { + def: ZodTypeDef; + path: string[]; + jsonSchema: JsonSchema7Type | undefined; +}; +export declare const getRefs: (options?: string | Partial>) => Refs; +//# sourceMappingURL=Refs.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Refs.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/Refs.d.ts.map new file mode 100644 index 0000000..05eaf5a --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Refs.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"Refs.d.ts","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/Refs.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,KAAK,CAAC;AACtC,OAAO,EAAqB,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AAG7C,MAAM,MAAM,IAAI,GAAG;IACjB,IAAI,EAAE,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAC5B;;;;OAIG;IACH,QAAQ,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IACtB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,YAAY,EAAE,MAAM,EAAE,GAAG,SAAS,CAAC;CACpC,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;AAErB,MAAM,MAAM,IAAI,GAAG;IACjB,GAAG,EAAE,UAAU,CAAC;IAChB,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,UAAU,EAAE,eAAe,GAAG,SAAS,CAAC;CACzC,CAAC;AAEF,eAAO,MAAM,OAAO,aAAc,MAAM,GAAG,QAAQ,QAAQ,OAAO,CAAC,CAAC,KAAG,IAuBtE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Refs.js b/node_modules/openai/_vendor/zod-to-json-schema/Refs.js new file mode 100644 index 0000000..e89ba02 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Refs.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRefs = void 0; +const Options_1 = require("./Options.js"); +const util_1 = require("./util.js"); +const getRefs = (options) => { + const _options = (0, Options_1.getDefaultOptions)(options); + const currentPath = _options.name !== undefined ? + [..._options.basePath, _options.definitionPath, _options.name] + : _options.basePath; + return { + ..._options, + currentPath: currentPath, + propertyPath: undefined, + seenRefs: new Set(), + seen: new Map(Object.entries(_options.definitions).map(([name, def]) => [ + (0, util_1.zodDef)(def), + { + def: (0, util_1.zodDef)(def), + path: [..._options.basePath, _options.definitionPath, name], + // Resolution of references will be forced even though seen, so it's ok that the schema is undefined here for now. + jsonSchema: undefined, + }, + ])), + }; +}; +exports.getRefs = getRefs; +//# sourceMappingURL=Refs.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Refs.js.map b/node_modules/openai/_vendor/zod-to-json-schema/Refs.js.map new file mode 100644 index 0000000..f48077c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Refs.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Refs.js","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/Refs.ts"],"names":[],"mappings":";;;AACA,0CAAgE;AAEhE,oCAAgC;AAoBzB,MAAM,OAAO,GAAG,CAAC,OAA4C,EAAQ,EAAE;IAC5E,MAAM,QAAQ,GAAG,IAAA,2BAAiB,EAAC,OAAO,CAAC,CAAC;IAC5C,MAAM,WAAW,GACf,QAAQ,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC;QAC3B,CAAC,GAAG,QAAQ,CAAC,QAAQ,EAAE,QAAQ,CAAC,cAAc,EAAE,QAAQ,CAAC,IAAI,CAAC;QAChE,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;IACtB,OAAO;QACL,GAAG,QAAQ;QACX,WAAW,EAAE,WAAW;QACxB,YAAY,EAAE,SAAS;QACvB,QAAQ,EAAE,IAAI,GAAG,EAAE;QACnB,IAAI,EAAE,IAAI,GAAG,CACX,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,CAAC;YACxD,IAAA,aAAM,EAAC,GAAG,CAAC;YACX;gBACE,GAAG,EAAE,IAAA,aAAM,EAAC,GAAG,CAAC;gBAChB,IAAI,EAAE,CAAC,GAAG,QAAQ,CAAC,QAAQ,EAAE,QAAQ,CAAC,cAAc,EAAE,IAAI,CAAC;gBAC3D,kHAAkH;gBAClH,UAAU,EAAE,SAAS;aACtB;SACF,CAAC,CACH;KACF,CAAC;AACJ,CAAC,CAAC;AAvBW,QAAA,OAAO,WAuBlB"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Refs.mjs b/node_modules/openai/_vendor/zod-to-json-schema/Refs.mjs new file mode 100644 index 0000000..7269765 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Refs.mjs @@ -0,0 +1,24 @@ +import { getDefaultOptions } from "./Options.mjs"; +import { zodDef } from "./util.mjs"; +export const getRefs = (options) => { + const _options = getDefaultOptions(options); + const currentPath = _options.name !== undefined ? + [..._options.basePath, _options.definitionPath, _options.name] + : _options.basePath; + return { + ..._options, + currentPath: currentPath, + propertyPath: undefined, + seenRefs: new Set(), + seen: new Map(Object.entries(_options.definitions).map(([name, def]) => [ + zodDef(def), + { + def: zodDef(def), + path: [..._options.basePath, _options.definitionPath, name], + // Resolution of references will be forced even though seen, so it's ok that the schema is undefined here for now. + jsonSchema: undefined, + }, + ])), + }; +}; +//# sourceMappingURL=Refs.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/Refs.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/Refs.mjs.map new file mode 100644 index 0000000..f73333d --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/Refs.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"Refs.mjs","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/Refs.ts"],"names":[],"mappings":"OACO,EAAE,iBAAiB,EAAoB;OAEvC,EAAE,MAAM,EAAE;AAoBjB,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,OAA4C,EAAQ,EAAE;IAC5E,MAAM,QAAQ,GAAG,iBAAiB,CAAC,OAAO,CAAC,CAAC;IAC5C,MAAM,WAAW,GACf,QAAQ,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC;QAC3B,CAAC,GAAG,QAAQ,CAAC,QAAQ,EAAE,QAAQ,CAAC,cAAc,EAAE,QAAQ,CAAC,IAAI,CAAC;QAChE,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;IACtB,OAAO;QACL,GAAG,QAAQ;QACX,WAAW,EAAE,WAAW;QACxB,YAAY,EAAE,SAAS;QACvB,QAAQ,EAAE,IAAI,GAAG,EAAE;QACnB,IAAI,EAAE,IAAI,GAAG,CACX,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,CAAC;YACxD,MAAM,CAAC,GAAG,CAAC;YACX;gBACE,GAAG,EAAE,MAAM,CAAC,GAAG,CAAC;gBAChB,IAAI,EAAE,CAAC,GAAG,QAAQ,CAAC,QAAQ,EAAE,QAAQ,CAAC,cAAc,EAAE,IAAI,CAAC;gBAC3D,kHAAkH;gBAClH,UAAU,EAAE,SAAS;aACtB;SACF,CAAC,CACH;KACF,CAAC;AACJ,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.d.ts new file mode 100644 index 0000000..6d61f11 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.d.ts @@ -0,0 +1,12 @@ +import { JsonSchema7TypeUnion } from "./parseDef.js"; +import { Refs } from "./Refs.js"; +export type ErrorMessages = Partial>; +export declare function addErrorMessage; +}>(res: T, key: keyof T, errorMessage: string | undefined, refs: Refs): void; +export declare function setResponseValueAndErrors; +}, Key extends keyof Omit>(res: Json7Type, key: Key, value: Json7Type[Key], errorMessage: string | undefined, refs: Refs): void; +//# sourceMappingURL=errorMessages.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.d.ts.map new file mode 100644 index 0000000..2080804 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"errorMessages.d.ts","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/errorMessages.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,YAAY,CAAC;AAClD,OAAO,EAAE,IAAI,EAAE,MAAM,QAAQ,CAAC;AAE9B,MAAM,MAAM,aAAa,CAAC,CAAC,SAAS,oBAAoB,EAAE,cAAc,SAAS,MAAM,GAAG,EAAE,IAAI,OAAO,CACrG,IAAI,CAAC;KAAG,GAAG,IAAI,MAAM,CAAC,GAAG,MAAM;CAAE,EAAE,cAAc,GAAG,MAAM,GAAG,eAAe,CAAC,CAC9E,CAAC;AAEF,wBAAgB,eAAe,CAAC,CAAC,SAAS;IAAE,YAAY,CAAC,EAAE,aAAa,CAAC,GAAG,CAAC,CAAA;CAAE,EAC7E,GAAG,EAAE,CAAC,EACN,GAAG,EAAE,MAAM,CAAC,EACZ,YAAY,EAAE,MAAM,GAAG,SAAS,EAChC,IAAI,EAAE,IAAI,QASX;AAED,wBAAgB,yBAAyB,CACvC,SAAS,SAAS,oBAAoB,GAAG;IACvC,YAAY,CAAC,EAAE,aAAa,CAAC,SAAS,CAAC,CAAC;CACzC,EACD,GAAG,SAAS,MAAM,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,EACjD,GAAG,EAAE,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,SAAS,CAAC,GAAG,CAAC,EAAE,YAAY,EAAE,MAAM,GAAG,SAAS,EAAE,IAAI,EAAE,IAAI,QAG9F"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.js b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.js new file mode 100644 index 0000000..2bd1b29 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.setResponseValueAndErrors = exports.addErrorMessage = void 0; +function addErrorMessage(res, key, errorMessage, refs) { + if (!refs?.errorMessages) + return; + if (errorMessage) { + res.errorMessage = { + ...res.errorMessage, + [key]: errorMessage, + }; + } +} +exports.addErrorMessage = addErrorMessage; +function setResponseValueAndErrors(res, key, value, errorMessage, refs) { + res[key] = value; + addErrorMessage(res, key, errorMessage, refs); +} +exports.setResponseValueAndErrors = setResponseValueAndErrors; +//# sourceMappingURL=errorMessages.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.js.map b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.js.map new file mode 100644 index 0000000..ca2826f --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.js.map @@ -0,0 +1 @@ +{"version":3,"file":"errorMessages.js","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/errorMessages.ts"],"names":[],"mappings":";;;AAOA,SAAgB,eAAe,CAC7B,GAAM,EACN,GAAY,EACZ,YAAgC,EAChC,IAAU;IAEV,IAAI,CAAC,IAAI,EAAE,aAAa;QAAE,OAAO;IACjC,IAAI,YAAY,EAAE;QAChB,GAAG,CAAC,YAAY,GAAG;YACjB,GAAG,GAAG,CAAC,YAAY;YACnB,CAAC,GAAG,CAAC,EAAE,YAAY;SACpB,CAAC;KACH;AACH,CAAC;AAbD,0CAaC;AAED,SAAgB,yBAAyB,CAKvC,GAAc,EAAE,GAAQ,EAAE,KAAqB,EAAE,YAAgC,EAAE,IAAU;IAC7F,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IACjB,eAAe,CAAC,GAAG,EAAE,GAAG,EAAE,YAAY,EAAE,IAAI,CAAC,CAAC;AAChD,CAAC;AARD,8DAQC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.mjs b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.mjs new file mode 100644 index 0000000..750f0c9 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.mjs @@ -0,0 +1,15 @@ +export function addErrorMessage(res, key, errorMessage, refs) { + if (!refs?.errorMessages) + return; + if (errorMessage) { + res.errorMessage = { + ...res.errorMessage, + [key]: errorMessage, + }; + } +} +export function setResponseValueAndErrors(res, key, value, errorMessage, refs) { + res[key] = value; + addErrorMessage(res, key, errorMessage, refs); +} +//# sourceMappingURL=errorMessages.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.mjs.map new file mode 100644 index 0000000..9a98e99 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/errorMessages.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"errorMessages.mjs","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/errorMessages.ts"],"names":[],"mappings":"AAOA,MAAM,UAAU,eAAe,CAC7B,GAAM,EACN,GAAY,EACZ,YAAgC,EAChC,IAAU;IAEV,IAAI,CAAC,IAAI,EAAE,aAAa;QAAE,OAAO;IACjC,IAAI,YAAY,EAAE;QAChB,GAAG,CAAC,YAAY,GAAG;YACjB,GAAG,GAAG,CAAC,YAAY;YACnB,CAAC,GAAG,CAAC,EAAE,YAAY;SACpB,CAAC;KACH;AACH,CAAC;AAED,MAAM,UAAU,yBAAyB,CAKvC,GAAc,EAAE,GAAQ,EAAE,KAAqB,EAAE,YAAgC,EAAE,IAAU;IAC7F,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IACjB,eAAe,CAAC,GAAG,EAAE,GAAG,EAAE,YAAY,EAAE,IAAI,CAAC,CAAC;AAChD,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/index.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/index.d.ts new file mode 100644 index 0000000..479fb21 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/index.d.ts @@ -0,0 +1,38 @@ +export * from "./Options.js"; +export * from "./Refs.js"; +export * from "./errorMessages.js"; +export * from "./parseDef.js"; +export * from "./parsers/any.js"; +export * from "./parsers/array.js"; +export * from "./parsers/bigint.js"; +export * from "./parsers/boolean.js"; +export * from "./parsers/branded.js"; +export * from "./parsers/catch.js"; +export * from "./parsers/date.js"; +export * from "./parsers/default.js"; +export * from "./parsers/effects.js"; +export * from "./parsers/enum.js"; +export * from "./parsers/intersection.js"; +export * from "./parsers/literal.js"; +export * from "./parsers/map.js"; +export * from "./parsers/nativeEnum.js"; +export * from "./parsers/never.js"; +export * from "./parsers/null.js"; +export * from "./parsers/nullable.js"; +export * from "./parsers/number.js"; +export * from "./parsers/object.js"; +export * from "./parsers/optional.js"; +export * from "./parsers/pipeline.js"; +export * from "./parsers/promise.js"; +export * from "./parsers/readonly.js"; +export * from "./parsers/record.js"; +export * from "./parsers/set.js"; +export * from "./parsers/string.js"; +export * from "./parsers/tuple.js"; +export * from "./parsers/undefined.js"; +export * from "./parsers/union.js"; +export * from "./parsers/unknown.js"; +export * from "./zodToJsonSchema.js"; +import { zodToJsonSchema } from "./zodToJsonSchema.js"; +export default zodToJsonSchema; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/index.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/index.d.ts.map new file mode 100644 index 0000000..0737869 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/index.ts"],"names":[],"mappings":"AAAA,cAAc,WAAW,CAAC;AAC1B,cAAc,QAAQ,CAAC;AACvB,cAAc,iBAAiB,CAAC;AAChC,cAAc,YAAY,CAAC;AAC3B,cAAc,eAAe,CAAC;AAC9B,cAAc,iBAAiB,CAAC;AAChC,cAAc,kBAAkB,CAAC;AACjC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,wBAAwB,CAAC;AACvC,cAAc,mBAAmB,CAAC;AAClC,cAAc,eAAe,CAAC;AAC9B,cAAc,sBAAsB,CAAC;AACrC,cAAc,iBAAiB,CAAC;AAChC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,oBAAoB,CAAC;AACnC,cAAc,kBAAkB,CAAC;AACjC,cAAc,kBAAkB,CAAC;AACjC,cAAc,oBAAoB,CAAC;AACnC,cAAc,oBAAoB,CAAC;AACnC,cAAc,mBAAmB,CAAC;AAClC,cAAc,oBAAoB,CAAC;AACnC,cAAc,kBAAkB,CAAC;AACjC,cAAc,eAAe,CAAC;AAC9B,cAAc,kBAAkB,CAAC;AACjC,cAAc,iBAAiB,CAAC;AAChC,cAAc,qBAAqB,CAAC;AACpC,cAAc,iBAAiB,CAAC;AAChC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,eAAe,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/index.js b/node_modules/openai/_vendor/zod-to-json-schema/index.js new file mode 100644 index 0000000..3ba8958 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/index.js @@ -0,0 +1,54 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./Options.js"), exports); +__exportStar(require("./Refs.js"), exports); +__exportStar(require("./errorMessages.js"), exports); +__exportStar(require("./parseDef.js"), exports); +__exportStar(require("./parsers/any.js"), exports); +__exportStar(require("./parsers/array.js"), exports); +__exportStar(require("./parsers/bigint.js"), exports); +__exportStar(require("./parsers/boolean.js"), exports); +__exportStar(require("./parsers/branded.js"), exports); +__exportStar(require("./parsers/catch.js"), exports); +__exportStar(require("./parsers/date.js"), exports); +__exportStar(require("./parsers/default.js"), exports); +__exportStar(require("./parsers/effects.js"), exports); +__exportStar(require("./parsers/enum.js"), exports); +__exportStar(require("./parsers/intersection.js"), exports); +__exportStar(require("./parsers/literal.js"), exports); +__exportStar(require("./parsers/map.js"), exports); +__exportStar(require("./parsers/nativeEnum.js"), exports); +__exportStar(require("./parsers/never.js"), exports); +__exportStar(require("./parsers/null.js"), exports); +__exportStar(require("./parsers/nullable.js"), exports); +__exportStar(require("./parsers/number.js"), exports); +__exportStar(require("./parsers/object.js"), exports); +__exportStar(require("./parsers/optional.js"), exports); +__exportStar(require("./parsers/pipeline.js"), exports); +__exportStar(require("./parsers/promise.js"), exports); +__exportStar(require("./parsers/readonly.js"), exports); +__exportStar(require("./parsers/record.js"), exports); +__exportStar(require("./parsers/set.js"), exports); +__exportStar(require("./parsers/string.js"), exports); +__exportStar(require("./parsers/tuple.js"), exports); +__exportStar(require("./parsers/undefined.js"), exports); +__exportStar(require("./parsers/union.js"), exports); +__exportStar(require("./parsers/unknown.js"), exports); +__exportStar(require("./zodToJsonSchema.js"), exports); +const zodToJsonSchema_1 = require("./zodToJsonSchema.js"); +exports.default = zodToJsonSchema_1.zodToJsonSchema; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/index.js.map b/node_modules/openai/_vendor/zod-to-json-schema/index.js.map new file mode 100644 index 0000000..acfdef9 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,+CAA0B;AAC1B,4CAAuB;AACvB,qDAAgC;AAChC,gDAA2B;AAC3B,mDAA8B;AAC9B,qDAAgC;AAChC,sDAAiC;AACjC,uDAAkC;AAClC,uDAAkC;AAClC,qDAAgC;AAChC,oDAA+B;AAC/B,uDAAkC;AAClC,uDAAkC;AAClC,oDAA+B;AAC/B,4DAAuC;AACvC,uDAAkC;AAClC,mDAA8B;AAC9B,0DAAqC;AACrC,qDAAgC;AAChC,oDAA+B;AAC/B,wDAAmC;AACnC,sDAAiC;AACjC,sDAAiC;AACjC,wDAAmC;AACnC,wDAAmC;AACnC,uDAAkC;AAClC,wDAAmC;AACnC,sDAAiC;AACjC,mDAA8B;AAC9B,sDAAiC;AACjC,qDAAgC;AAChC,yDAAoC;AACpC,qDAAgC;AAChC,uDAAkC;AAClC,uDAAkC;AAClC,0DAAoD;AACpD,kBAAe,iCAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/index.mjs b/node_modules/openai/_vendor/zod-to-json-schema/index.mjs new file mode 100644 index 0000000..6e44437 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/index.mjs @@ -0,0 +1,38 @@ +export * from "./Options.mjs"; +export * from "./Refs.mjs"; +export * from "./errorMessages.mjs"; +export * from "./parseDef.mjs"; +export * from "./parsers/any.mjs"; +export * from "./parsers/array.mjs"; +export * from "./parsers/bigint.mjs"; +export * from "./parsers/boolean.mjs"; +export * from "./parsers/branded.mjs"; +export * from "./parsers/catch.mjs"; +export * from "./parsers/date.mjs"; +export * from "./parsers/default.mjs"; +export * from "./parsers/effects.mjs"; +export * from "./parsers/enum.mjs"; +export * from "./parsers/intersection.mjs"; +export * from "./parsers/literal.mjs"; +export * from "./parsers/map.mjs"; +export * from "./parsers/nativeEnum.mjs"; +export * from "./parsers/never.mjs"; +export * from "./parsers/null.mjs"; +export * from "./parsers/nullable.mjs"; +export * from "./parsers/number.mjs"; +export * from "./parsers/object.mjs"; +export * from "./parsers/optional.mjs"; +export * from "./parsers/pipeline.mjs"; +export * from "./parsers/promise.mjs"; +export * from "./parsers/readonly.mjs"; +export * from "./parsers/record.mjs"; +export * from "./parsers/set.mjs"; +export * from "./parsers/string.mjs"; +export * from "./parsers/tuple.mjs"; +export * from "./parsers/undefined.mjs"; +export * from "./parsers/union.mjs"; +export * from "./parsers/unknown.mjs"; +export * from "./zodToJsonSchema.mjs"; +import { zodToJsonSchema } from "./zodToJsonSchema.mjs"; +export default zodToJsonSchema; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/index.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/index.mjs.map new file mode 100644 index 0000000..5862a5b --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAmCO,EAAE,eAAe,EAAE;AAC1B,eAAe,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parseDef.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.d.ts new file mode 100644 index 0000000..24da486 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.d.ts @@ -0,0 +1,38 @@ +import { ZodTypeDef } from 'zod'; +import { JsonSchema7AnyType } from "./parsers/any.js"; +import { JsonSchema7ArrayType } from "./parsers/array.js"; +import { JsonSchema7BigintType } from "./parsers/bigint.js"; +import { JsonSchema7BooleanType } from "./parsers/boolean.js"; +import { JsonSchema7DateType } from "./parsers/date.js"; +import { JsonSchema7EnumType } from "./parsers/enum.js"; +import { JsonSchema7AllOfType } from "./parsers/intersection.js"; +import { JsonSchema7LiteralType } from "./parsers/literal.js"; +import { JsonSchema7MapType } from "./parsers/map.js"; +import { JsonSchema7NativeEnumType } from "./parsers/nativeEnum.js"; +import { JsonSchema7NeverType } from "./parsers/never.js"; +import { JsonSchema7NullType } from "./parsers/null.js"; +import { JsonSchema7NullableType } from "./parsers/nullable.js"; +import { JsonSchema7NumberType } from "./parsers/number.js"; +import { JsonSchema7ObjectType } from "./parsers/object.js"; +import { JsonSchema7RecordType } from "./parsers/record.js"; +import { JsonSchema7SetType } from "./parsers/set.js"; +import { JsonSchema7StringType } from "./parsers/string.js"; +import { JsonSchema7TupleType } from "./parsers/tuple.js"; +import { JsonSchema7UndefinedType } from "./parsers/undefined.js"; +import { JsonSchema7UnionType } from "./parsers/union.js"; +import { JsonSchema7UnknownType } from "./parsers/unknown.js"; +import { Refs } from "./Refs.js"; +type JsonSchema7RefType = { + $ref: string; +}; +type JsonSchema7Meta = { + title?: string; + default?: any; + description?: string; + markdownDescription?: string; +}; +export type JsonSchema7TypeUnion = JsonSchema7StringType | JsonSchema7ArrayType | JsonSchema7NumberType | JsonSchema7BigintType | JsonSchema7BooleanType | JsonSchema7DateType | JsonSchema7EnumType | JsonSchema7LiteralType | JsonSchema7NativeEnumType | JsonSchema7NullType | JsonSchema7NumberType | JsonSchema7ObjectType | JsonSchema7RecordType | JsonSchema7TupleType | JsonSchema7UnionType | JsonSchema7UndefinedType | JsonSchema7RefType | JsonSchema7NeverType | JsonSchema7MapType | JsonSchema7AnyType | JsonSchema7NullableType | JsonSchema7AllOfType | JsonSchema7UnknownType | JsonSchema7SetType; +export type JsonSchema7Type = JsonSchema7TypeUnion & JsonSchema7Meta; +export declare function parseDef(def: ZodTypeDef, refs: Refs, forceResolution?: boolean): JsonSchema7Type | undefined; +export {}; +//# sourceMappingURL=parseDef.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parseDef.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.d.ts.map new file mode 100644 index 0000000..91ddcf2 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"parseDef.d.ts","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/parseDef.ts"],"names":[],"mappings":"AAAA,OAAO,EAAyB,UAAU,EAAE,MAAM,KAAK,CAAC;AACxD,OAAO,EAAE,kBAAkB,EAAe,MAAM,eAAe,CAAC;AAChE,OAAO,EAAE,oBAAoB,EAAiB,MAAM,iBAAiB,CAAC;AACtE,OAAO,EAAE,qBAAqB,EAAkB,MAAM,kBAAkB,CAAC;AACzE,OAAO,EAAE,sBAAsB,EAAmB,MAAM,mBAAmB,CAAC;AAG5E,OAAO,EAAE,mBAAmB,EAAgB,MAAM,gBAAgB,CAAC;AAGnE,OAAO,EAAE,mBAAmB,EAAgB,MAAM,gBAAgB,CAAC;AACnE,OAAO,EAAE,oBAAoB,EAAwB,MAAM,wBAAwB,CAAC;AACpF,OAAO,EAAE,sBAAsB,EAAmB,MAAM,mBAAmB,CAAC;AAC5E,OAAO,EAAE,kBAAkB,EAAe,MAAM,eAAe,CAAC;AAChE,OAAO,EAAE,yBAAyB,EAAsB,MAAM,sBAAsB,CAAC;AACrF,OAAO,EAAE,oBAAoB,EAAiB,MAAM,iBAAiB,CAAC;AACtE,OAAO,EAAE,mBAAmB,EAAgB,MAAM,gBAAgB,CAAC;AACnE,OAAO,EAAE,uBAAuB,EAAoB,MAAM,oBAAoB,CAAC;AAC/E,OAAO,EAAE,qBAAqB,EAAkB,MAAM,kBAAkB,CAAC;AACzE,OAAO,EAAE,qBAAqB,EAAkB,MAAM,kBAAkB,CAAC;AAIzE,OAAO,EAAE,qBAAqB,EAAkB,MAAM,kBAAkB,CAAC;AACzE,OAAO,EAAE,kBAAkB,EAAe,MAAM,eAAe,CAAC;AAChE,OAAO,EAAE,qBAAqB,EAAkB,MAAM,kBAAkB,CAAC;AACzE,OAAO,EAAE,oBAAoB,EAAiB,MAAM,iBAAiB,CAAC;AACtE,OAAO,EAAE,wBAAwB,EAAqB,MAAM,qBAAqB,CAAC;AAClF,OAAO,EAAE,oBAAoB,EAAiB,MAAM,iBAAiB,CAAC;AACtE,OAAO,EAAE,sBAAsB,EAAmB,MAAM,mBAAmB,CAAC;AAC5E,OAAO,EAAE,IAAI,EAAQ,MAAM,QAAQ,CAAC;AAIpC,KAAK,kBAAkB,GAAG;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,CAAC;AAC3C,KAAK,eAAe,GAAG;IACrB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,mBAAmB,CAAC,EAAE,MAAM,CAAC;CAC9B,CAAC;AAEF,MAAM,MAAM,oBAAoB,GAC5B,qBAAqB,GACrB,oBAAoB,GACpB,qBAAqB,GACrB,qBAAqB,GACrB,sBAAsB,GACtB,mBAAmB,GACnB,mBAAmB,GACnB,sBAAsB,GACtB,yBAAyB,GACzB,mBAAmB,GACnB,qBAAqB,GACrB,qBAAqB,GACrB,qBAAqB,GACrB,oBAAoB,GACpB,oBAAoB,GACpB,wBAAwB,GACxB,kBAAkB,GAClB,oBAAoB,GACpB,kBAAkB,GAClB,kBAAkB,GAClB,uBAAuB,GACvB,oBAAoB,GACpB,sBAAsB,GACtB,kBAAkB,CAAC;AAEvB,MAAM,MAAM,eAAe,GAAG,oBAAoB,GAAG,eAAe,CAAC;AAErE,wBAAgB,QAAQ,CACtB,GAAG,EAAE,UAAU,EACf,IAAI,EAAE,IAAI,EACV,eAAe,UAAQ,GACtB,eAAe,GAAG,SAAS,CAoC7B"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parseDef.js b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.js new file mode 100644 index 0000000..1808cfb --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.js @@ -0,0 +1,187 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseDef = void 0; +const zod_1 = require("zod"); +const any_1 = require("./parsers/any.js"); +const array_1 = require("./parsers/array.js"); +const bigint_1 = require("./parsers/bigint.js"); +const boolean_1 = require("./parsers/boolean.js"); +const branded_1 = require("./parsers/branded.js"); +const catch_1 = require("./parsers/catch.js"); +const date_1 = require("./parsers/date.js"); +const default_1 = require("./parsers/default.js"); +const effects_1 = require("./parsers/effects.js"); +const enum_1 = require("./parsers/enum.js"); +const intersection_1 = require("./parsers/intersection.js"); +const literal_1 = require("./parsers/literal.js"); +const map_1 = require("./parsers/map.js"); +const nativeEnum_1 = require("./parsers/nativeEnum.js"); +const never_1 = require("./parsers/never.js"); +const null_1 = require("./parsers/null.js"); +const nullable_1 = require("./parsers/nullable.js"); +const number_1 = require("./parsers/number.js"); +const object_1 = require("./parsers/object.js"); +const optional_1 = require("./parsers/optional.js"); +const pipeline_1 = require("./parsers/pipeline.js"); +const promise_1 = require("./parsers/promise.js"); +const record_1 = require("./parsers/record.js"); +const set_1 = require("./parsers/set.js"); +const string_1 = require("./parsers/string.js"); +const tuple_1 = require("./parsers/tuple.js"); +const undefined_1 = require("./parsers/undefined.js"); +const union_1 = require("./parsers/union.js"); +const unknown_1 = require("./parsers/unknown.js"); +const readonly_1 = require("./parsers/readonly.js"); +const Options_1 = require("./Options.js"); +function parseDef(def, refs, forceResolution = false) { + const seenItem = refs.seen.get(def); + if (refs.override) { + const overrideResult = refs.override?.(def, refs, seenItem, forceResolution); + if (overrideResult !== Options_1.ignoreOverride) { + return overrideResult; + } + } + if (seenItem && !forceResolution) { + const seenSchema = get$ref(seenItem, refs); + if (seenSchema !== undefined) { + if ('$ref' in seenSchema) { + refs.seenRefs.add(seenSchema.$ref); + } + return seenSchema; + } + } + const newItem = { def, path: refs.currentPath, jsonSchema: undefined }; + refs.seen.set(def, newItem); + const jsonSchema = selectParser(def, def.typeName, refs, forceResolution); + if (jsonSchema) { + addMeta(def, refs, jsonSchema); + } + newItem.jsonSchema = jsonSchema; + return jsonSchema; +} +exports.parseDef = parseDef; +const get$ref = (item, refs) => { + switch (refs.$refStrategy) { + case 'root': + return { $ref: item.path.join('/') }; + // this case is needed as OpenAI strict mode doesn't support top-level `$ref`s, i.e. + // the top-level schema *must* be `{"type": "object", "properties": {...}}` but if we ever + // need to define a `$ref`, relative `$ref`s aren't supported, so we need to extract + // the schema to `#/definitions/` and reference that. + // + // e.g. if we need to reference a schema at + // `["#","definitions","contactPerson","properties","person1","properties","name"]` + // then we'll extract it out to `contactPerson_properties_person1_properties_name` + case 'extract-to-root': + const name = item.path.slice(refs.basePath.length + 1).join('_'); + // we don't need to extract the root schema in this case, as it's already + // been added to the definitions + if (name !== refs.name && refs.nameStrategy === 'duplicate-ref') { + refs.definitions[name] = item.def; + } + return { $ref: [...refs.basePath, refs.definitionPath, name].join('/') }; + case 'relative': + return { $ref: getRelativePath(refs.currentPath, item.path) }; + case 'none': + case 'seen': { + if (item.path.length < refs.currentPath.length && + item.path.every((value, index) => refs.currentPath[index] === value)) { + console.warn(`Recursive reference detected at ${refs.currentPath.join('/')}! Defaulting to any`); + return {}; + } + return refs.$refStrategy === 'seen' ? {} : undefined; + } + } +}; +const getRelativePath = (pathA, pathB) => { + let i = 0; + for (; i < pathA.length && i < pathB.length; i++) { + if (pathA[i] !== pathB[i]) + break; + } + return [(pathA.length - i).toString(), ...pathB.slice(i)].join('/'); +}; +const selectParser = (def, typeName, refs, forceResolution) => { + switch (typeName) { + case zod_1.ZodFirstPartyTypeKind.ZodString: + return (0, string_1.parseStringDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodNumber: + return (0, number_1.parseNumberDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodObject: + return (0, object_1.parseObjectDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodBigInt: + return (0, bigint_1.parseBigintDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodBoolean: + return (0, boolean_1.parseBooleanDef)(); + case zod_1.ZodFirstPartyTypeKind.ZodDate: + return (0, date_1.parseDateDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodUndefined: + return (0, undefined_1.parseUndefinedDef)(); + case zod_1.ZodFirstPartyTypeKind.ZodNull: + return (0, null_1.parseNullDef)(refs); + case zod_1.ZodFirstPartyTypeKind.ZodArray: + return (0, array_1.parseArrayDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodUnion: + case zod_1.ZodFirstPartyTypeKind.ZodDiscriminatedUnion: + return (0, union_1.parseUnionDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodIntersection: + return (0, intersection_1.parseIntersectionDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodTuple: + return (0, tuple_1.parseTupleDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodRecord: + return (0, record_1.parseRecordDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodLiteral: + return (0, literal_1.parseLiteralDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodEnum: + return (0, enum_1.parseEnumDef)(def); + case zod_1.ZodFirstPartyTypeKind.ZodNativeEnum: + return (0, nativeEnum_1.parseNativeEnumDef)(def); + case zod_1.ZodFirstPartyTypeKind.ZodNullable: + return (0, nullable_1.parseNullableDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodOptional: + return (0, optional_1.parseOptionalDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodMap: + return (0, map_1.parseMapDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodSet: + return (0, set_1.parseSetDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodLazy: + return parseDef(def.getter()._def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodPromise: + return (0, promise_1.parsePromiseDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodNaN: + case zod_1.ZodFirstPartyTypeKind.ZodNever: + return (0, never_1.parseNeverDef)(); + case zod_1.ZodFirstPartyTypeKind.ZodEffects: + return (0, effects_1.parseEffectsDef)(def, refs, forceResolution); + case zod_1.ZodFirstPartyTypeKind.ZodAny: + return (0, any_1.parseAnyDef)(); + case zod_1.ZodFirstPartyTypeKind.ZodUnknown: + return (0, unknown_1.parseUnknownDef)(); + case zod_1.ZodFirstPartyTypeKind.ZodDefault: + return (0, default_1.parseDefaultDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodBranded: + return (0, branded_1.parseBrandedDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodReadonly: + return (0, readonly_1.parseReadonlyDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodCatch: + return (0, catch_1.parseCatchDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodPipeline: + return (0, pipeline_1.parsePipelineDef)(def, refs); + case zod_1.ZodFirstPartyTypeKind.ZodFunction: + case zod_1.ZodFirstPartyTypeKind.ZodVoid: + case zod_1.ZodFirstPartyTypeKind.ZodSymbol: + return undefined; + default: + return ((_) => undefined)(typeName); + } +}; +const addMeta = (def, refs, jsonSchema) => { + if (def.description) { + jsonSchema.description = def.description; + if (refs.markdownDescription) { + jsonSchema.markdownDescription = def.description; + } + } + return jsonSchema; +}; +//# sourceMappingURL=parseDef.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parseDef.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.js.map new file mode 100644 index 0000000..9c36724 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parseDef.js","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/parseDef.ts"],"names":[],"mappings":";;;AAAA,6BAAwD;AACxD,0CAAgE;AAChE,8CAAsE;AACtE,gDAAyE;AACzE,kDAA4E;AAC5E,kDAAoD;AACpD,8CAAgD;AAChD,4CAAmE;AACnE,kDAAoD;AACpD,kDAAoD;AACpD,4CAAmE;AACnE,4DAAoF;AACpF,kDAA4E;AAC5E,0CAAgE;AAChE,wDAAqF;AACrF,8CAAsE;AACtE,4CAAmE;AACnE,oDAA+E;AAC/E,gDAAyE;AACzE,gDAAyE;AACzE,oDAAsD;AACtD,oDAAsD;AACtD,kDAAoD;AACpD,gDAAyE;AACzE,0CAAgE;AAChE,gDAAyE;AACzE,8CAAsE;AACtE,sDAAkF;AAClF,8CAAsE;AACtE,kDAA4E;AAE5E,oDAAsD;AACtD,0CAA2C;AAsC3C,SAAgB,QAAQ,CACtB,GAAe,EACf,IAAU,EACV,eAAe,GAAG,KAAK;IAEvB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IAEpC,IAAI,IAAI,CAAC,QAAQ,EAAE;QACjB,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,QAAQ,EAAE,eAAe,CAAC,CAAC;QAE7E,IAAI,cAAc,KAAK,wBAAc,EAAE;YACrC,OAAO,cAAc,CAAC;SACvB;KACF;IAED,IAAI,QAAQ,IAAI,CAAC,eAAe,EAAE;QAChC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QAE3C,IAAI,UAAU,KAAK,SAAS,EAAE;YAC5B,IAAI,MAAM,IAAI,UAAU,EAAE;gBACxB,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;aACpC;YAED,OAAO,UAAU,CAAC;SACnB;KACF;IAED,MAAM,OAAO,GAAS,EAAE,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;IAE7E,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;IAE5B,MAAM,UAAU,GAAG,YAAY,CAAC,GAAG,EAAG,GAAW,CAAC,QAAQ,EAAE,IAAI,EAAE,eAAe,CAAC,CAAC;IAEnF,IAAI,UAAU,EAAE;QACd,OAAO,CAAC,GAAG,EAAE,IAAI,EAAE,UAAU,CAAC,CAAC;KAChC;IAED,OAAO,CAAC,UAAU,GAAG,UAAU,CAAC;IAEhC,OAAO,UAAU,CAAC;AACpB,CAAC;AAxCD,4BAwCC;AAED,MAAM,OAAO,GAAG,CACd,IAAU,EACV,IAAU,EAME,EAAE;IACd,QAAQ,IAAI,CAAC,YAAY,EAAE;QACzB,KAAK,MAAM;YACT,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;QACvC,oFAAoF;QACpF,0FAA0F;QAC1F,oFAAoF;QACpF,qDAAqD;QACrD,EAAE;QACF,2CAA2C;QAC3C,mFAAmF;QACnF,kFAAkF;QAClF,KAAK,iBAAiB;YACpB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAEjE,yEAAyE;YACzE,gCAAgC;YAChC,IAAI,IAAI,KAAK,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,YAAY,KAAK,eAAe,EAAE;gBAC/D,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC;aACnC;YAED,OAAO,EAAE,IAAI,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;QAC3E,KAAK,UAAU;YACb,OAAO,EAAE,IAAI,EAAE,eAAe,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QAChE,KAAK,MAAM,CAAC;QACZ,KAAK,MAAM,CAAC,CAAC;YACX,IACE,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM;gBAC1C,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,KAAK,KAAK,CAAC,EACpE;gBACA,OAAO,CAAC,IAAI,CAAC,mCAAmC,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC;gBAEjG,OAAO,EAAE,CAAC;aACX;YAED,OAAO,IAAI,CAAC,YAAY,KAAK,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;SACtD;KACF;AACH,CAAC,CAAC;AAEF,MAAM,eAAe,GAAG,CAAC,KAAe,EAAE,KAAe,EAAE,EAAE;IAC3D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,KAAK,CAAC,MAAM,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAChD,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC;YAAE,MAAM;KAClC;IACD,OAAO,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,EAAE,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACtE,CAAC,CAAC;AAEF,MAAM,YAAY,GAAG,CACnB,GAAQ,EACR,QAA+B,EAC/B,IAAU,EACV,eAAwB,EACK,EAAE;IAC/B,QAAQ,QAAQ,EAAE;QAChB,KAAK,2BAAqB,CAAC,SAAS;YAClC,OAAO,IAAA,uBAAc,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,2BAAqB,CAAC,SAAS;YAClC,OAAO,IAAA,uBAAc,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,2BAAqB,CAAC,SAAS;YAClC,OAAO,IAAA,uBAAc,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,2BAAqB,CAAC,SAAS;YAClC,OAAO,IAAA,uBAAc,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,2BAAqB,CAAC,UAAU;YACnC,OAAO,IAAA,yBAAe,GAAE,CAAC;QAC3B,KAAK,2BAAqB,CAAC,OAAO;YAChC,OAAO,IAAA,mBAAY,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACjC,KAAK,2BAAqB,CAAC,YAAY;YACrC,OAAO,IAAA,6BAAiB,GAAE,CAAC;QAC7B,KAAK,2BAAqB,CAAC,OAAO;YAChC,OAAO,IAAA,mBAAY,EAAC,IAAI,CAAC,CAAC;QAC5B,KAAK,2BAAqB,CAAC,QAAQ;YACjC,OAAO,IAAA,qBAAa,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAClC,KAAK,2BAAqB,CAAC,QAAQ,CAAC;QACpC,KAAK,2BAAqB,CAAC,qBAAqB;YAC9C,OAAO,IAAA,qBAAa,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAClC,KAAK,2BAAqB,CAAC,eAAe;YACxC,OAAO,IAAA,mCAAoB,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACzC,KAAK,2BAAqB,CAAC,QAAQ;YACjC,OAAO,IAAA,qBAAa,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAClC,KAAK,2BAAqB,CAAC,SAAS;YAClC,OAAO,IAAA,uBAAc,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,2BAAqB,CAAC,UAAU;YACnC,OAAO,IAAA,yBAAe,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACpC,KAAK,2BAAqB,CAAC,OAAO;YAChC,OAAO,IAAA,mBAAY,EAAC,GAAG,CAAC,CAAC;QAC3B,KAAK,2BAAqB,CAAC,aAAa;YACtC,OAAO,IAAA,+BAAkB,EAAC,GAAG,CAAC,CAAC;QACjC,KAAK,2BAAqB,CAAC,WAAW;YACpC,OAAO,IAAA,2BAAgB,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACrC,KAAK,2BAAqB,CAAC,WAAW;YACpC,OAAO,IAAA,2BAAgB,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACrC,KAAK,2BAAqB,CAAC,MAAM;YAC/B,OAAO,IAAA,iBAAW,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAChC,KAAK,2BAAqB,CAAC,MAAM;YAC/B,OAAO,IAAA,iBAAW,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAChC,KAAK,2BAAqB,CAAC,OAAO;YAChC,OAAO,QAAQ,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;QAC3C,KAAK,2BAAqB,CAAC,UAAU;YACnC,OAAO,IAAA,yBAAe,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACpC,KAAK,2BAAqB,CAAC,MAAM,CAAC;QAClC,KAAK,2BAAqB,CAAC,QAAQ;YACjC,OAAO,IAAA,qBAAa,GAAE,CAAC;QACzB,KAAK,2BAAqB,CAAC,UAAU;YACnC,OAAO,IAAA,yBAAe,EAAC,GAAG,EAAE,IAAI,EAAE,eAAe,CAAC,CAAC;QACrD,KAAK,2BAAqB,CAAC,MAAM;YAC/B,OAAO,IAAA,iBAAW,GAAE,CAAC;QACvB,KAAK,2BAAqB,CAAC,UAAU;YACnC,OAAO,IAAA,yBAAe,GAAE,CAAC;QAC3B,KAAK,2BAAqB,CAAC,UAAU;YACnC,OAAO,IAAA,yBAAe,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACpC,KAAK,2BAAqB,CAAC,UAAU;YACnC,OAAO,IAAA,yBAAe,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACpC,KAAK,2BAAqB,CAAC,WAAW;YACpC,OAAO,IAAA,2BAAgB,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACrC,KAAK,2BAAqB,CAAC,QAAQ;YACjC,OAAO,IAAA,qBAAa,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAClC,KAAK,2BAAqB,CAAC,WAAW;YACpC,OAAO,IAAA,2BAAgB,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACrC,KAAK,2BAAqB,CAAC,WAAW,CAAC;QACvC,KAAK,2BAAqB,CAAC,OAAO,CAAC;QACnC,KAAK,2BAAqB,CAAC,SAAS;YAClC,OAAO,SAAS,CAAC;QACnB;YACE,OAAO,CAAC,CAAC,CAAQ,EAAE,EAAE,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,CAAC;KAC9C;AACH,CAAC,CAAC;AAEF,MAAM,OAAO,GAAG,CAAC,GAAe,EAAE,IAAU,EAAE,UAA2B,EAAmB,EAAE;IAC5F,IAAI,GAAG,CAAC,WAAW,EAAE;QACnB,UAAU,CAAC,WAAW,GAAG,GAAG,CAAC,WAAW,CAAC;QAEzC,IAAI,IAAI,CAAC,mBAAmB,EAAE;YAC5B,UAAU,CAAC,mBAAmB,GAAG,GAAG,CAAC,WAAW,CAAC;SAClD;KACF;IACD,OAAO,UAAU,CAAC;AACpB,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parseDef.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.mjs new file mode 100644 index 0000000..380eaea --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.mjs @@ -0,0 +1,183 @@ +import { ZodFirstPartyTypeKind } from 'zod'; +import { parseAnyDef } from "./parsers/any.mjs"; +import { parseArrayDef } from "./parsers/array.mjs"; +import { parseBigintDef } from "./parsers/bigint.mjs"; +import { parseBooleanDef } from "./parsers/boolean.mjs"; +import { parseBrandedDef } from "./parsers/branded.mjs"; +import { parseCatchDef } from "./parsers/catch.mjs"; +import { parseDateDef } from "./parsers/date.mjs"; +import { parseDefaultDef } from "./parsers/default.mjs"; +import { parseEffectsDef } from "./parsers/effects.mjs"; +import { parseEnumDef } from "./parsers/enum.mjs"; +import { parseIntersectionDef } from "./parsers/intersection.mjs"; +import { parseLiteralDef } from "./parsers/literal.mjs"; +import { parseMapDef } from "./parsers/map.mjs"; +import { parseNativeEnumDef } from "./parsers/nativeEnum.mjs"; +import { parseNeverDef } from "./parsers/never.mjs"; +import { parseNullDef } from "./parsers/null.mjs"; +import { parseNullableDef } from "./parsers/nullable.mjs"; +import { parseNumberDef } from "./parsers/number.mjs"; +import { parseObjectDef } from "./parsers/object.mjs"; +import { parseOptionalDef } from "./parsers/optional.mjs"; +import { parsePipelineDef } from "./parsers/pipeline.mjs"; +import { parsePromiseDef } from "./parsers/promise.mjs"; +import { parseRecordDef } from "./parsers/record.mjs"; +import { parseSetDef } from "./parsers/set.mjs"; +import { parseStringDef } from "./parsers/string.mjs"; +import { parseTupleDef } from "./parsers/tuple.mjs"; +import { parseUndefinedDef } from "./parsers/undefined.mjs"; +import { parseUnionDef } from "./parsers/union.mjs"; +import { parseUnknownDef } from "./parsers/unknown.mjs"; +import { parseReadonlyDef } from "./parsers/readonly.mjs"; +import { ignoreOverride } from "./Options.mjs"; +export function parseDef(def, refs, forceResolution = false) { + const seenItem = refs.seen.get(def); + if (refs.override) { + const overrideResult = refs.override?.(def, refs, seenItem, forceResolution); + if (overrideResult !== ignoreOverride) { + return overrideResult; + } + } + if (seenItem && !forceResolution) { + const seenSchema = get$ref(seenItem, refs); + if (seenSchema !== undefined) { + if ('$ref' in seenSchema) { + refs.seenRefs.add(seenSchema.$ref); + } + return seenSchema; + } + } + const newItem = { def, path: refs.currentPath, jsonSchema: undefined }; + refs.seen.set(def, newItem); + const jsonSchema = selectParser(def, def.typeName, refs, forceResolution); + if (jsonSchema) { + addMeta(def, refs, jsonSchema); + } + newItem.jsonSchema = jsonSchema; + return jsonSchema; +} +const get$ref = (item, refs) => { + switch (refs.$refStrategy) { + case 'root': + return { $ref: item.path.join('/') }; + // this case is needed as OpenAI strict mode doesn't support top-level `$ref`s, i.e. + // the top-level schema *must* be `{"type": "object", "properties": {...}}` but if we ever + // need to define a `$ref`, relative `$ref`s aren't supported, so we need to extract + // the schema to `#/definitions/` and reference that. + // + // e.g. if we need to reference a schema at + // `["#","definitions","contactPerson","properties","person1","properties","name"]` + // then we'll extract it out to `contactPerson_properties_person1_properties_name` + case 'extract-to-root': + const name = item.path.slice(refs.basePath.length + 1).join('_'); + // we don't need to extract the root schema in this case, as it's already + // been added to the definitions + if (name !== refs.name && refs.nameStrategy === 'duplicate-ref') { + refs.definitions[name] = item.def; + } + return { $ref: [...refs.basePath, refs.definitionPath, name].join('/') }; + case 'relative': + return { $ref: getRelativePath(refs.currentPath, item.path) }; + case 'none': + case 'seen': { + if (item.path.length < refs.currentPath.length && + item.path.every((value, index) => refs.currentPath[index] === value)) { + console.warn(`Recursive reference detected at ${refs.currentPath.join('/')}! Defaulting to any`); + return {}; + } + return refs.$refStrategy === 'seen' ? {} : undefined; + } + } +}; +const getRelativePath = (pathA, pathB) => { + let i = 0; + for (; i < pathA.length && i < pathB.length; i++) { + if (pathA[i] !== pathB[i]) + break; + } + return [(pathA.length - i).toString(), ...pathB.slice(i)].join('/'); +}; +const selectParser = (def, typeName, refs, forceResolution) => { + switch (typeName) { + case ZodFirstPartyTypeKind.ZodString: + return parseStringDef(def, refs); + case ZodFirstPartyTypeKind.ZodNumber: + return parseNumberDef(def, refs); + case ZodFirstPartyTypeKind.ZodObject: + return parseObjectDef(def, refs); + case ZodFirstPartyTypeKind.ZodBigInt: + return parseBigintDef(def, refs); + case ZodFirstPartyTypeKind.ZodBoolean: + return parseBooleanDef(); + case ZodFirstPartyTypeKind.ZodDate: + return parseDateDef(def, refs); + case ZodFirstPartyTypeKind.ZodUndefined: + return parseUndefinedDef(); + case ZodFirstPartyTypeKind.ZodNull: + return parseNullDef(refs); + case ZodFirstPartyTypeKind.ZodArray: + return parseArrayDef(def, refs); + case ZodFirstPartyTypeKind.ZodUnion: + case ZodFirstPartyTypeKind.ZodDiscriminatedUnion: + return parseUnionDef(def, refs); + case ZodFirstPartyTypeKind.ZodIntersection: + return parseIntersectionDef(def, refs); + case ZodFirstPartyTypeKind.ZodTuple: + return parseTupleDef(def, refs); + case ZodFirstPartyTypeKind.ZodRecord: + return parseRecordDef(def, refs); + case ZodFirstPartyTypeKind.ZodLiteral: + return parseLiteralDef(def, refs); + case ZodFirstPartyTypeKind.ZodEnum: + return parseEnumDef(def); + case ZodFirstPartyTypeKind.ZodNativeEnum: + return parseNativeEnumDef(def); + case ZodFirstPartyTypeKind.ZodNullable: + return parseNullableDef(def, refs); + case ZodFirstPartyTypeKind.ZodOptional: + return parseOptionalDef(def, refs); + case ZodFirstPartyTypeKind.ZodMap: + return parseMapDef(def, refs); + case ZodFirstPartyTypeKind.ZodSet: + return parseSetDef(def, refs); + case ZodFirstPartyTypeKind.ZodLazy: + return parseDef(def.getter()._def, refs); + case ZodFirstPartyTypeKind.ZodPromise: + return parsePromiseDef(def, refs); + case ZodFirstPartyTypeKind.ZodNaN: + case ZodFirstPartyTypeKind.ZodNever: + return parseNeverDef(); + case ZodFirstPartyTypeKind.ZodEffects: + return parseEffectsDef(def, refs, forceResolution); + case ZodFirstPartyTypeKind.ZodAny: + return parseAnyDef(); + case ZodFirstPartyTypeKind.ZodUnknown: + return parseUnknownDef(); + case ZodFirstPartyTypeKind.ZodDefault: + return parseDefaultDef(def, refs); + case ZodFirstPartyTypeKind.ZodBranded: + return parseBrandedDef(def, refs); + case ZodFirstPartyTypeKind.ZodReadonly: + return parseReadonlyDef(def, refs); + case ZodFirstPartyTypeKind.ZodCatch: + return parseCatchDef(def, refs); + case ZodFirstPartyTypeKind.ZodPipeline: + return parsePipelineDef(def, refs); + case ZodFirstPartyTypeKind.ZodFunction: + case ZodFirstPartyTypeKind.ZodVoid: + case ZodFirstPartyTypeKind.ZodSymbol: + return undefined; + default: + return ((_) => undefined)(typeName); + } +}; +const addMeta = (def, refs, jsonSchema) => { + if (def.description) { + jsonSchema.description = def.description; + if (refs.markdownDescription) { + jsonSchema.markdownDescription = def.description; + } + } + return jsonSchema; +}; +//# sourceMappingURL=parseDef.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parseDef.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.mjs.map new file mode 100644 index 0000000..feaecec --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parseDef.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"parseDef.mjs","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/parseDef.ts"],"names":[],"mappings":"OAAO,EAAE,qBAAqB,EAAc,MAAM,KAAK;OAChD,EAAsB,WAAW,EAAE;OACnC,EAAwB,aAAa,EAAE;OACvC,EAAyB,cAAc,EAAE;OACzC,EAA0B,eAAe,EAAE;OAC3C,EAAE,eAAe,EAAE;OACnB,EAAE,aAAa,EAAE;OACjB,EAAuB,YAAY,EAAE;OACrC,EAAE,eAAe,EAAE;OACnB,EAAE,eAAe,EAAE;OACnB,EAAuB,YAAY,EAAE;OACrC,EAAwB,oBAAoB,EAAE;OAC9C,EAA0B,eAAe,EAAE;OAC3C,EAAsB,WAAW,EAAE;OACnC,EAA6B,kBAAkB,EAAE;OACjD,EAAwB,aAAa,EAAE;OACvC,EAAuB,YAAY,EAAE;OACrC,EAA2B,gBAAgB,EAAE;OAC7C,EAAyB,cAAc,EAAE;OACzC,EAAyB,cAAc,EAAE;OACzC,EAAE,gBAAgB,EAAE;OACpB,EAAE,gBAAgB,EAAE;OACpB,EAAE,eAAe,EAAE;OACnB,EAAyB,cAAc,EAAE;OACzC,EAAsB,WAAW,EAAE;OACnC,EAAyB,cAAc,EAAE;OACzC,EAAwB,aAAa,EAAE;OACvC,EAA4B,iBAAiB,EAAE;OAC/C,EAAwB,aAAa,EAAE;OACvC,EAA0B,eAAe,EAAE;OAE3C,EAAE,gBAAgB,EAAE;OACpB,EAAE,cAAc,EAAE;AAsCzB,MAAM,UAAU,QAAQ,CACtB,GAAe,EACf,IAAU,EACV,eAAe,GAAG,KAAK;IAEvB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IAEpC,IAAI,IAAI,CAAC,QAAQ,EAAE;QACjB,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,QAAQ,EAAE,eAAe,CAAC,CAAC;QAE7E,IAAI,cAAc,KAAK,cAAc,EAAE;YACrC,OAAO,cAAc,CAAC;SACvB;KACF;IAED,IAAI,QAAQ,IAAI,CAAC,eAAe,EAAE;QAChC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QAE3C,IAAI,UAAU,KAAK,SAAS,EAAE;YAC5B,IAAI,MAAM,IAAI,UAAU,EAAE;gBACxB,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;aACpC;YAED,OAAO,UAAU,CAAC;SACnB;KACF;IAED,MAAM,OAAO,GAAS,EAAE,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;IAE7E,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;IAE5B,MAAM,UAAU,GAAG,YAAY,CAAC,GAAG,EAAG,GAAW,CAAC,QAAQ,EAAE,IAAI,EAAE,eAAe,CAAC,CAAC;IAEnF,IAAI,UAAU,EAAE;QACd,OAAO,CAAC,GAAG,EAAE,IAAI,EAAE,UAAU,CAAC,CAAC;KAChC;IAED,OAAO,CAAC,UAAU,GAAG,UAAU,CAAC;IAEhC,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,MAAM,OAAO,GAAG,CACd,IAAU,EACV,IAAU,EAME,EAAE;IACd,QAAQ,IAAI,CAAC,YAAY,EAAE;QACzB,KAAK,MAAM;YACT,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;QACvC,oFAAoF;QACpF,0FAA0F;QAC1F,oFAAoF;QACpF,qDAAqD;QACrD,EAAE;QACF,2CAA2C;QAC3C,mFAAmF;QACnF,kFAAkF;QAClF,KAAK,iBAAiB;YACpB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAEjE,yEAAyE;YACzE,gCAAgC;YAChC,IAAI,IAAI,KAAK,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,YAAY,KAAK,eAAe,EAAE;gBAC/D,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC;aACnC;YAED,OAAO,EAAE,IAAI,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;QAC3E,KAAK,UAAU;YACb,OAAO,EAAE,IAAI,EAAE,eAAe,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QAChE,KAAK,MAAM,CAAC;QACZ,KAAK,MAAM,CAAC,CAAC;YACX,IACE,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM;gBAC1C,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,KAAK,KAAK,CAAC,EACpE;gBACA,OAAO,CAAC,IAAI,CAAC,mCAAmC,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC;gBAEjG,OAAO,EAAE,CAAC;aACX;YAED,OAAO,IAAI,CAAC,YAAY,KAAK,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;SACtD;KACF;AACH,CAAC,CAAC;AAEF,MAAM,eAAe,GAAG,CAAC,KAAe,EAAE,KAAe,EAAE,EAAE;IAC3D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,KAAK,CAAC,MAAM,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAChD,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC;YAAE,MAAM;KAClC;IACD,OAAO,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,EAAE,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACtE,CAAC,CAAC;AAEF,MAAM,YAAY,GAAG,CACnB,GAAQ,EACR,QAA+B,EAC/B,IAAU,EACV,eAAwB,EACK,EAAE;IAC/B,QAAQ,QAAQ,EAAE;QAChB,KAAK,qBAAqB,CAAC,SAAS;YAClC,OAAO,cAAc,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,qBAAqB,CAAC,SAAS;YAClC,OAAO,cAAc,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,qBAAqB,CAAC,SAAS;YAClC,OAAO,cAAc,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,qBAAqB,CAAC,SAAS;YAClC,OAAO,cAAc,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,qBAAqB,CAAC,UAAU;YACnC,OAAO,eAAe,EAAE,CAAC;QAC3B,KAAK,qBAAqB,CAAC,OAAO;YAChC,OAAO,YAAY,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACjC,KAAK,qBAAqB,CAAC,YAAY;YACrC,OAAO,iBAAiB,EAAE,CAAC;QAC7B,KAAK,qBAAqB,CAAC,OAAO;YAChC,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;QAC5B,KAAK,qBAAqB,CAAC,QAAQ;YACjC,OAAO,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAClC,KAAK,qBAAqB,CAAC,QAAQ,CAAC;QACpC,KAAK,qBAAqB,CAAC,qBAAqB;YAC9C,OAAO,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAClC,KAAK,qBAAqB,CAAC,eAAe;YACxC,OAAO,oBAAoB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACzC,KAAK,qBAAqB,CAAC,QAAQ;YACjC,OAAO,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAClC,KAAK,qBAAqB,CAAC,SAAS;YAClC,OAAO,cAAc,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,KAAK,qBAAqB,CAAC,UAAU;YACnC,OAAO,eAAe,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACpC,KAAK,qBAAqB,CAAC,OAAO;YAChC,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC;QAC3B,KAAK,qBAAqB,CAAC,aAAa;YACtC,OAAO,kBAAkB,CAAC,GAAG,CAAC,CAAC;QACjC,KAAK,qBAAqB,CAAC,WAAW;YACpC,OAAO,gBAAgB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACrC,KAAK,qBAAqB,CAAC,WAAW;YACpC,OAAO,gBAAgB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACrC,KAAK,qBAAqB,CAAC,MAAM;YAC/B,OAAO,WAAW,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAChC,KAAK,qBAAqB,CAAC,MAAM;YAC/B,OAAO,WAAW,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAChC,KAAK,qBAAqB,CAAC,OAAO;YAChC,OAAO,QAAQ,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;QAC3C,KAAK,qBAAqB,CAAC,UAAU;YACnC,OAAO,eAAe,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACpC,KAAK,qBAAqB,CAAC,MAAM,CAAC;QAClC,KAAK,qBAAqB,CAAC,QAAQ;YACjC,OAAO,aAAa,EAAE,CAAC;QACzB,KAAK,qBAAqB,CAAC,UAAU;YACnC,OAAO,eAAe,CAAC,GAAG,EAAE,IAAI,EAAE,eAAe,CAAC,CAAC;QACrD,KAAK,qBAAqB,CAAC,MAAM;YAC/B,OAAO,WAAW,EAAE,CAAC;QACvB,KAAK,qBAAqB,CAAC,UAAU;YACnC,OAAO,eAAe,EAAE,CAAC;QAC3B,KAAK,qBAAqB,CAAC,UAAU;YACnC,OAAO,eAAe,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACpC,KAAK,qBAAqB,CAAC,UAAU;YACnC,OAAO,eAAe,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACpC,KAAK,qBAAqB,CAAC,WAAW;YACpC,OAAO,gBAAgB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACrC,KAAK,qBAAqB,CAAC,QAAQ;YACjC,OAAO,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAClC,KAAK,qBAAqB,CAAC,WAAW;YACpC,OAAO,gBAAgB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACrC,KAAK,qBAAqB,CAAC,WAAW,CAAC;QACvC,KAAK,qBAAqB,CAAC,OAAO,CAAC;QACnC,KAAK,qBAAqB,CAAC,SAAS;YAClC,OAAO,SAAS,CAAC;QACnB;YACE,OAAO,CAAC,CAAC,CAAQ,EAAE,EAAE,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,CAAC;KAC9C;AACH,CAAC,CAAC;AAEF,MAAM,OAAO,GAAG,CAAC,GAAe,EAAE,IAAU,EAAE,UAA2B,EAAmB,EAAE;IAC5F,IAAI,GAAG,CAAC,WAAW,EAAE;QACnB,UAAU,CAAC,WAAW,GAAG,GAAG,CAAC,WAAW,CAAC;QAEzC,IAAI,IAAI,CAAC,mBAAmB,EAAE;YAC5B,UAAU,CAAC,mBAAmB,GAAG,GAAG,CAAC,WAAW,CAAC;SAClD;KACF;IACD,OAAO,UAAU,CAAC;AACpB,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.d.ts new file mode 100644 index 0000000..587884c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.d.ts @@ -0,0 +1,3 @@ +export type JsonSchema7AnyType = {}; +export declare function parseAnyDef(): JsonSchema7AnyType; +//# sourceMappingURL=any.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.d.ts.map new file mode 100644 index 0000000..dcd2334 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"any.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/any.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,kBAAkB,GAAG,EAAE,CAAC;AAEpC,wBAAgB,WAAW,IAAI,kBAAkB,CAEhD"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.js new file mode 100644 index 0000000..b3bfa8f --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseAnyDef = void 0; +function parseAnyDef() { + return {}; +} +exports.parseAnyDef = parseAnyDef; +//# sourceMappingURL=any.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.js.map new file mode 100644 index 0000000..b5c98a4 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.js.map @@ -0,0 +1 @@ +{"version":3,"file":"any.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/any.ts"],"names":[],"mappings":";;;AAEA,SAAgB,WAAW;IACzB,OAAO,EAAE,CAAC;AACZ,CAAC;AAFD,kCAEC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.mjs new file mode 100644 index 0000000..082dc76 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.mjs @@ -0,0 +1,4 @@ +export function parseAnyDef() { + return {}; +} +//# sourceMappingURL=any.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.mjs.map new file mode 100644 index 0000000..4f0715d --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/any.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"any.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/any.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,WAAW;IACzB,OAAO,EAAE,CAAC;AACZ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.d.ts new file mode 100644 index 0000000..af38f20 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.d.ts @@ -0,0 +1,13 @@ +import { ZodArrayDef } from 'zod'; +import { ErrorMessages } from "../errorMessages.js"; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export type JsonSchema7ArrayType = { + type: 'array'; + items?: JsonSchema7Type | undefined; + minItems?: number; + maxItems?: number; + errorMessages?: ErrorMessages; +}; +export declare function parseArrayDef(def: ZodArrayDef, refs: Refs): JsonSchema7ArrayType; +//# sourceMappingURL=array.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.d.ts.map new file mode 100644 index 0000000..f6e73a9 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"array.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/array.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAyB,MAAM,KAAK,CAAC;AACzD,OAAO,EAAE,aAAa,EAA6B,MAAM,kBAAkB,CAAC;AAC5E,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,MAAM,MAAM,oBAAoB,GAAG;IACjC,IAAI,EAAE,OAAO,CAAC;IACd,KAAK,CAAC,EAAE,eAAe,GAAG,SAAS,CAAC;IACpC,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,aAAa,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;CAC9D,CAAC;AAEF,wBAAgB,aAAa,CAAC,GAAG,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,wBAsBzD"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.js new file mode 100644 index 0000000..d299b01 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseArrayDef = void 0; +const zod_1 = require("zod"); +const errorMessages_1 = require("../errorMessages.js"); +const parseDef_1 = require("../parseDef.js"); +function parseArrayDef(def, refs) { + const res = { + type: 'array', + }; + if (def.type?._def?.typeName !== zod_1.ZodFirstPartyTypeKind.ZodAny) { + res.items = (0, parseDef_1.parseDef)(def.type._def, { + ...refs, + currentPath: [...refs.currentPath, 'items'], + }); + } + if (def.minLength) { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'minItems', def.minLength.value, def.minLength.message, refs); + } + if (def.maxLength) { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'maxItems', def.maxLength.value, def.maxLength.message, refs); + } + if (def.exactLength) { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'minItems', def.exactLength.value, def.exactLength.message, refs); + (0, errorMessages_1.setResponseValueAndErrors)(res, 'maxItems', def.exactLength.value, def.exactLength.message, refs); + } + return res; +} +exports.parseArrayDef = parseArrayDef; +//# sourceMappingURL=array.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.js.map new file mode 100644 index 0000000..16da8fb --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.js.map @@ -0,0 +1 @@ +{"version":3,"file":"array.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/array.ts"],"names":[],"mappings":";;;AAAA,6BAAyD;AACzD,uDAA4E;AAC5E,6CAAwD;AAWxD,SAAgB,aAAa,CAAC,GAAgB,EAAE,IAAU;IACxD,MAAM,GAAG,GAAyB;QAChC,IAAI,EAAE,OAAO;KACd,CAAC;IACF,IAAI,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,KAAK,2BAAqB,CAAC,MAAM,EAAE;QAC7D,GAAG,CAAC,KAAK,GAAG,IAAA,mBAAQ,EAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE;YAClC,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,CAAC;SAC5C,CAAC,CAAC;KACJ;IAED,IAAI,GAAG,CAAC,SAAS,EAAE;QACjB,IAAA,yCAAyB,EAAC,GAAG,EAAE,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAC9F;IACD,IAAI,GAAG,CAAC,SAAS,EAAE;QACjB,IAAA,yCAAyB,EAAC,GAAG,EAAE,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAC9F;IACD,IAAI,GAAG,CAAC,WAAW,EAAE;QACnB,IAAA,yCAAyB,EAAC,GAAG,EAAE,UAAU,EAAE,GAAG,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,WAAW,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACjG,IAAA,yCAAyB,EAAC,GAAG,EAAE,UAAU,EAAE,GAAG,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,WAAW,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAClG;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AAtBD,sCAsBC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.mjs new file mode 100644 index 0000000..599a1ab --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.mjs @@ -0,0 +1,26 @@ +import { ZodFirstPartyTypeKind } from 'zod'; +import { setResponseValueAndErrors } from "../errorMessages.mjs"; +import { parseDef } from "../parseDef.mjs"; +export function parseArrayDef(def, refs) { + const res = { + type: 'array', + }; + if (def.type?._def?.typeName !== ZodFirstPartyTypeKind.ZodAny) { + res.items = parseDef(def.type._def, { + ...refs, + currentPath: [...refs.currentPath, 'items'], + }); + } + if (def.minLength) { + setResponseValueAndErrors(res, 'minItems', def.minLength.value, def.minLength.message, refs); + } + if (def.maxLength) { + setResponseValueAndErrors(res, 'maxItems', def.maxLength.value, def.maxLength.message, refs); + } + if (def.exactLength) { + setResponseValueAndErrors(res, 'minItems', def.exactLength.value, def.exactLength.message, refs); + setResponseValueAndErrors(res, 'maxItems', def.exactLength.value, def.exactLength.message, refs); + } + return res; +} +//# sourceMappingURL=array.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.mjs.map new file mode 100644 index 0000000..2092d52 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/array.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"array.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/array.ts"],"names":[],"mappings":"OAAO,EAAe,qBAAqB,EAAE,MAAM,KAAK;OACjD,EAAiB,yBAAyB,EAAE;OAC5C,EAAmB,QAAQ,EAAE;AAWpC,MAAM,UAAU,aAAa,CAAC,GAAgB,EAAE,IAAU;IACxD,MAAM,GAAG,GAAyB;QAChC,IAAI,EAAE,OAAO;KACd,CAAC;IACF,IAAI,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,QAAQ,KAAK,qBAAqB,CAAC,MAAM,EAAE;QAC7D,GAAG,CAAC,KAAK,GAAG,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE;YAClC,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,CAAC;SAC5C,CAAC,CAAC;KACJ;IAED,IAAI,GAAG,CAAC,SAAS,EAAE;QACjB,yBAAyB,CAAC,GAAG,EAAE,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAC9F;IACD,IAAI,GAAG,CAAC,SAAS,EAAE;QACjB,yBAAyB,CAAC,GAAG,EAAE,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAC9F;IACD,IAAI,GAAG,CAAC,WAAW,EAAE;QACnB,yBAAyB,CAAC,GAAG,EAAE,UAAU,EAAE,GAAG,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,WAAW,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACjG,yBAAyB,CAAC,GAAG,EAAE,UAAU,EAAE,GAAG,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,WAAW,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAClG;IACD,OAAO,GAAG,CAAC;AACb,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.d.ts new file mode 100644 index 0000000..a029808 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.d.ts @@ -0,0 +1,15 @@ +import { ZodBigIntDef } from 'zod'; +import { Refs } from "../Refs.js"; +import { ErrorMessages } from "../errorMessages.js"; +export type JsonSchema7BigintType = { + type: 'integer'; + format: 'int64'; + minimum?: BigInt; + exclusiveMinimum?: BigInt; + maximum?: BigInt; + exclusiveMaximum?: BigInt; + multipleOf?: BigInt; + errorMessage?: ErrorMessages; +}; +export declare function parseBigintDef(def: ZodBigIntDef, refs: Refs): JsonSchema7BigintType; +//# sourceMappingURL=bigint.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.d.ts.map new file mode 100644 index 0000000..c8e0f62 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bigint.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/bigint.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,KAAK,CAAC;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAC/B,OAAO,EAAE,aAAa,EAA6B,MAAM,kBAAkB,CAAC;AAE5E,MAAM,MAAM,qBAAqB,GAAG;IAClC,IAAI,EAAE,SAAS,CAAC;IAChB,MAAM,EAAE,OAAO,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;CACrD,CAAC;AAEF,wBAAgB,cAAc,CAAC,GAAG,EAAE,YAAY,EAAE,IAAI,EAAE,IAAI,GAAG,qBAAqB,CA4CnF"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.js new file mode 100644 index 0000000..dab6ff2 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseBigintDef = void 0; +const errorMessages_1 = require("../errorMessages.js"); +function parseBigintDef(def, refs) { + const res = { + type: 'integer', + format: 'int64', + }; + if (!def.checks) + return res; + for (const check of def.checks) { + switch (check.kind) { + case 'min': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'minimum', check.value, check.message, refs); + } + else { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'exclusiveMinimum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMinimum = true; + } + (0, errorMessages_1.setResponseValueAndErrors)(res, 'minimum', check.value, check.message, refs); + } + break; + case 'max': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'maximum', check.value, check.message, refs); + } + else { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'exclusiveMaximum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMaximum = true; + } + (0, errorMessages_1.setResponseValueAndErrors)(res, 'maximum', check.value, check.message, refs); + } + break; + case 'multipleOf': + (0, errorMessages_1.setResponseValueAndErrors)(res, 'multipleOf', check.value, check.message, refs); + break; + } + } + return res; +} +exports.parseBigintDef = parseBigintDef; +//# sourceMappingURL=bigint.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.js.map new file mode 100644 index 0000000..7756690 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bigint.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/bigint.ts"],"names":[],"mappings":";;;AAEA,uDAA4E;AAa5E,SAAgB,cAAc,CAAC,GAAiB,EAAE,IAAU;IAC1D,MAAM,GAAG,GAA0B;QACjC,IAAI,EAAE,SAAS;QACf,MAAM,EAAE,OAAO;KAChB,CAAC;IAEF,IAAI,CAAC,GAAG,CAAC,MAAM;QAAE,OAAO,GAAG,CAAC;IAE5B,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,MAAM,EAAE;QAC9B,QAAQ,KAAK,CAAC,IAAI,EAAE;YAClB,KAAK,KAAK;gBACR,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;oBACjC,IAAI,KAAK,CAAC,SAAS,EAAE;wBACnB,IAAA,yCAAyB,EAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7E;yBAAM;wBACL,IAAA,yCAAyB,EAAC,GAAG,EAAE,kBAAkB,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACtF;iBACF;qBAAM;oBACL,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;wBACpB,GAAG,CAAC,gBAAgB,GAAG,IAAW,CAAC;qBACpC;oBACD,IAAA,yCAAyB,EAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC7E;gBACD,MAAM;YACR,KAAK,KAAK;gBACR,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;oBACjC,IAAI,KAAK,CAAC,SAAS,EAAE;wBACnB,IAAA,yCAAyB,EAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7E;yBAAM;wBACL,IAAA,yCAAyB,EAAC,GAAG,EAAE,kBAAkB,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACtF;iBACF;qBAAM;oBACL,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;wBACpB,GAAG,CAAC,gBAAgB,GAAG,IAAW,CAAC;qBACpC;oBACD,IAAA,yCAAyB,EAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC7E;gBACD,MAAM;YACR,KAAK,YAAY;gBACf,IAAA,yCAAyB,EAAC,GAAG,EAAE,YAAY,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;gBAC/E,MAAM;SACT;KACF;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AA5CD,wCA4CC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.mjs new file mode 100644 index 0000000..1275b3d --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.mjs @@ -0,0 +1,50 @@ +import { setResponseValueAndErrors } from "../errorMessages.mjs"; +export function parseBigintDef(def, refs) { + const res = { + type: 'integer', + format: 'int64', + }; + if (!def.checks) + return res; + for (const check of def.checks) { + switch (check.kind) { + case 'min': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + else { + setResponseValueAndErrors(res, 'exclusiveMinimum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMinimum = true; + } + setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + break; + case 'max': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + else { + setResponseValueAndErrors(res, 'exclusiveMaximum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMaximum = true; + } + setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + break; + case 'multipleOf': + setResponseValueAndErrors(res, 'multipleOf', check.value, check.message, refs); + break; + } + } + return res; +} +//# sourceMappingURL=bigint.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.mjs.map new file mode 100644 index 0000000..0eb471c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"bigint.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/bigint.ts"],"names":[],"mappings":"OAEO,EAAiB,yBAAyB,EAAE;AAanD,MAAM,UAAU,cAAc,CAAC,GAAiB,EAAE,IAAU;IAC1D,MAAM,GAAG,GAA0B;QACjC,IAAI,EAAE,SAAS;QACf,MAAM,EAAE,OAAO;KAChB,CAAC;IAEF,IAAI,CAAC,GAAG,CAAC,MAAM;QAAE,OAAO,GAAG,CAAC;IAE5B,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,MAAM,EAAE;QAC9B,QAAQ,KAAK,CAAC,IAAI,EAAE;YAClB,KAAK,KAAK;gBACR,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;oBACjC,IAAI,KAAK,CAAC,SAAS,EAAE;wBACnB,yBAAyB,CAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7E;yBAAM;wBACL,yBAAyB,CAAC,GAAG,EAAE,kBAAkB,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACtF;iBACF;qBAAM;oBACL,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;wBACpB,GAAG,CAAC,gBAAgB,GAAG,IAAW,CAAC;qBACpC;oBACD,yBAAyB,CAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC7E;gBACD,MAAM;YACR,KAAK,KAAK;gBACR,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;oBACjC,IAAI,KAAK,CAAC,SAAS,EAAE;wBACnB,yBAAyB,CAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7E;yBAAM;wBACL,yBAAyB,CAAC,GAAG,EAAE,kBAAkB,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACtF;iBACF;qBAAM;oBACL,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;wBACpB,GAAG,CAAC,gBAAgB,GAAG,IAAW,CAAC;qBACpC;oBACD,yBAAyB,CAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC7E;gBACD,MAAM;YACR,KAAK,YAAY;gBACf,yBAAyB,CAAC,GAAG,EAAE,YAAY,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;gBAC/E,MAAM;SACT;KACF;IACD,OAAO,GAAG,CAAC;AACb,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.d.ts new file mode 100644 index 0000000..7de6510 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.d.ts @@ -0,0 +1,5 @@ +export type JsonSchema7BooleanType = { + type: 'boolean'; +}; +export declare function parseBooleanDef(): JsonSchema7BooleanType; +//# sourceMappingURL=boolean.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.d.ts.map new file mode 100644 index 0000000..ac0a8f2 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"boolean.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/boolean.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,sBAAsB,GAAG;IACnC,IAAI,EAAE,SAAS,CAAC;CACjB,CAAC;AAEF,wBAAgB,eAAe,IAAI,sBAAsB,CAIxD"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.js new file mode 100644 index 0000000..4ea31cf --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseBooleanDef = void 0; +function parseBooleanDef() { + return { + type: 'boolean', + }; +} +exports.parseBooleanDef = parseBooleanDef; +//# sourceMappingURL=boolean.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.js.map new file mode 100644 index 0000000..df674bf --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.js.map @@ -0,0 +1 @@ +{"version":3,"file":"boolean.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/boolean.ts"],"names":[],"mappings":";;;AAIA,SAAgB,eAAe;IAC7B,OAAO;QACL,IAAI,EAAE,SAAS;KAChB,CAAC;AACJ,CAAC;AAJD,0CAIC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.mjs new file mode 100644 index 0000000..3f3ee5a --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.mjs @@ -0,0 +1,6 @@ +export function parseBooleanDef() { + return { + type: 'boolean', + }; +} +//# sourceMappingURL=boolean.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.mjs.map new file mode 100644 index 0000000..574a99a --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"boolean.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/boolean.ts"],"names":[],"mappings":"AAIA,MAAM,UAAU,eAAe;IAC7B,OAAO;QACL,IAAI,EAAE,SAAS;KAChB,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.d.ts new file mode 100644 index 0000000..298ebc5 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.d.ts @@ -0,0 +1,4 @@ +import { ZodBrandedDef } from 'zod'; +import { Refs } from "../Refs.js"; +export declare function parseBrandedDef(_def: ZodBrandedDef, refs: Refs): import("../parseDef").JsonSchema7Type | undefined; +//# sourceMappingURL=branded.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.d.ts.map new file mode 100644 index 0000000..21f1f65 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"branded.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/branded.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AAEpC,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,wBAAgB,eAAe,CAAC,IAAI,EAAE,aAAa,CAAC,GAAG,CAAC,EAAE,IAAI,EAAE,IAAI,qDAEnE"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.js new file mode 100644 index 0000000..d1207cc --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseBrandedDef = void 0; +const parseDef_1 = require("../parseDef.js"); +function parseBrandedDef(_def, refs) { + return (0, parseDef_1.parseDef)(_def.type._def, refs); +} +exports.parseBrandedDef = parseBrandedDef; +//# sourceMappingURL=branded.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.js.map new file mode 100644 index 0000000..2f3b4ae --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.js.map @@ -0,0 +1 @@ +{"version":3,"file":"branded.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/branded.ts"],"names":[],"mappings":";;;AACA,6CAAuC;AAGvC,SAAgB,eAAe,CAAC,IAAwB,EAAE,IAAU;IAClE,OAAO,IAAA,mBAAQ,EAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AACxC,CAAC;AAFD,0CAEC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.mjs new file mode 100644 index 0000000..fcd6d3e --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.mjs @@ -0,0 +1,5 @@ +import { parseDef } from "../parseDef.mjs"; +export function parseBrandedDef(_def, refs) { + return parseDef(_def.type._def, refs); +} +//# sourceMappingURL=branded.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.mjs.map new file mode 100644 index 0000000..4520f5f --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"branded.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/branded.ts"],"names":[],"mappings":"OACO,EAAE,QAAQ,EAAE;AAGnB,MAAM,UAAU,eAAe,CAAC,IAAwB,EAAE,IAAU;IAClE,OAAO,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AACxC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.d.ts new file mode 100644 index 0000000..055b593 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.d.ts @@ -0,0 +1,4 @@ +import { ZodCatchDef } from 'zod'; +import { Refs } from "../Refs.js"; +export declare const parseCatchDef: (def: ZodCatchDef, refs: Refs) => import("../parseDef").JsonSchema7Type | undefined; +//# sourceMappingURL=catch.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.d.ts.map new file mode 100644 index 0000000..f9cedf0 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"catch.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/catch.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,KAAK,CAAC;AAElC,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,eAAO,MAAM,aAAa,QAAS,YAAY,GAAG,CAAC,QAAQ,IAAI,sDAE9D,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.js new file mode 100644 index 0000000..79aa9ba --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseCatchDef = void 0; +const parseDef_1 = require("../parseDef.js"); +const parseCatchDef = (def, refs) => { + return (0, parseDef_1.parseDef)(def.innerType._def, refs); +}; +exports.parseCatchDef = parseCatchDef; +//# sourceMappingURL=catch.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.js.map new file mode 100644 index 0000000..680a799 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.js.map @@ -0,0 +1 @@ +{"version":3,"file":"catch.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/catch.ts"],"names":[],"mappings":";;;AACA,6CAAuC;AAGhC,MAAM,aAAa,GAAG,CAAC,GAAqB,EAAE,IAAU,EAAE,EAAE;IACjE,OAAO,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAC5C,CAAC,CAAC;AAFW,QAAA,aAAa,iBAExB"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.mjs new file mode 100644 index 0000000..ac7abd5 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.mjs @@ -0,0 +1,5 @@ +import { parseDef } from "../parseDef.mjs"; +export const parseCatchDef = (def, refs) => { + return parseDef(def.innerType._def, refs); +}; +//# sourceMappingURL=catch.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.mjs.map new file mode 100644 index 0000000..3812720 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"catch.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/catch.ts"],"names":[],"mappings":"OACO,EAAE,QAAQ,EAAE;AAGnB,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,GAAqB,EAAE,IAAU,EAAE,EAAE;IACjE,OAAO,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAC5C,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.d.ts new file mode 100644 index 0000000..1361f22 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.d.ts @@ -0,0 +1,16 @@ +import { ZodDateDef } from 'zod'; +import { Refs } from "../Refs.js"; +import { ErrorMessages } from "../errorMessages.js"; +import { JsonSchema7NumberType } from "./number.js"; +import { DateStrategy } from "../Options.js"; +export type JsonSchema7DateType = { + type: 'integer' | 'string'; + format: 'unix-time' | 'date-time' | 'date'; + minimum?: number; + maximum?: number; + errorMessage?: ErrorMessages; +} | { + anyOf: JsonSchema7DateType[]; +}; +export declare function parseDateDef(def: ZodDateDef, refs: Refs, overrideDateStrategy?: DateStrategy): JsonSchema7DateType; +//# sourceMappingURL=date.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.d.ts.map new file mode 100644 index 0000000..f29fe7d --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"date.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/date.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,KAAK,CAAC;AACjC,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAC/B,OAAO,EAAE,aAAa,EAA6B,MAAM,kBAAkB,CAAC;AAC5E,OAAO,EAAE,qBAAqB,EAAE,MAAM,UAAU,CAAC;AACjD,OAAO,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAE1C,MAAM,MAAM,mBAAmB,GAC3B;IACE,IAAI,EAAE,SAAS,GAAG,QAAQ,CAAC;IAC3B,MAAM,EAAE,WAAW,GAAG,WAAW,GAAG,MAAM,CAAC;IAC3C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,YAAY,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;CACrD,GACD;IACE,KAAK,EAAE,mBAAmB,EAAE,CAAC;CAC9B,CAAC;AAEN,wBAAgB,YAAY,CAC1B,GAAG,EAAE,UAAU,EACf,IAAI,EAAE,IAAI,EACV,oBAAoB,CAAC,EAAE,YAAY,GAClC,mBAAmB,CAwBrB"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.js new file mode 100644 index 0000000..6cce8ae --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseDateDef = void 0; +const errorMessages_1 = require("../errorMessages.js"); +function parseDateDef(def, refs, overrideDateStrategy) { + const strategy = overrideDateStrategy ?? refs.dateStrategy; + if (Array.isArray(strategy)) { + return { + anyOf: strategy.map((item, i) => parseDateDef(def, refs, item)), + }; + } + switch (strategy) { + case 'string': + case 'format:date-time': + return { + type: 'string', + format: 'date-time', + }; + case 'format:date': + return { + type: 'string', + format: 'date', + }; + case 'integer': + return integerDateParser(def, refs); + } +} +exports.parseDateDef = parseDateDef; +const integerDateParser = (def, refs) => { + const res = { + type: 'integer', + format: 'unix-time', + }; + if (refs.target === 'openApi3') { + return res; + } + for (const check of def.checks) { + switch (check.kind) { + case 'min': + (0, errorMessages_1.setResponseValueAndErrors)(res, 'minimum', check.value, // This is in milliseconds + check.message, refs); + break; + case 'max': + (0, errorMessages_1.setResponseValueAndErrors)(res, 'maximum', check.value, // This is in milliseconds + check.message, refs); + break; + } + } + return res; +}; +//# sourceMappingURL=date.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.js.map new file mode 100644 index 0000000..db21a70 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.js.map @@ -0,0 +1 @@ +{"version":3,"file":"date.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/date.ts"],"names":[],"mappings":";;;AAEA,uDAA4E;AAgB5E,SAAgB,YAAY,CAC1B,GAAe,EACf,IAAU,EACV,oBAAmC;IAEnC,MAAM,QAAQ,GAAG,oBAAoB,IAAI,IAAI,CAAC,YAAY,CAAC;IAE3D,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;QAC3B,OAAO;YACL,KAAK,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;SAChE,CAAC;KACH;IAED,QAAQ,QAAQ,EAAE;QAChB,KAAK,QAAQ,CAAC;QACd,KAAK,kBAAkB;YACrB,OAAO;gBACL,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,WAAW;aACpB,CAAC;QACJ,KAAK,aAAa;YAChB,OAAO;gBACL,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,MAAM;aACf,CAAC;QACJ,KAAK,SAAS;YACZ,OAAO,iBAAiB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;KACvC;AACH,CAAC;AA5BD,oCA4BC;AAED,MAAM,iBAAiB,GAAG,CAAC,GAAe,EAAE,IAAU,EAAE,EAAE;IACxD,MAAM,GAAG,GAAwB;QAC/B,IAAI,EAAE,SAAS;QACf,MAAM,EAAE,WAAW;KACpB,CAAC;IAEF,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,EAAE;QAC9B,OAAO,GAAG,CAAC;KACZ;IAED,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,MAAM,EAAE;QAC9B,QAAQ,KAAK,CAAC,IAAI,EAAE;YAClB,KAAK,KAAK;gBACR,IAAA,yCAAyB,EACvB,GAAG,EACH,SAAS,EACT,KAAK,CAAC,KAAK,EAAE,0BAA0B;gBACvC,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;gBACF,MAAM;YACR,KAAK,KAAK;gBACR,IAAA,yCAAyB,EACvB,GAAG,EACH,SAAS,EACT,KAAK,CAAC,KAAK,EAAE,0BAA0B;gBACvC,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;gBACF,MAAM;SACT;KACF;IAED,OAAO,GAAG,CAAC;AACb,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.mjs new file mode 100644 index 0000000..640bb00 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.mjs @@ -0,0 +1,47 @@ +import { setResponseValueAndErrors } from "../errorMessages.mjs"; +export function parseDateDef(def, refs, overrideDateStrategy) { + const strategy = overrideDateStrategy ?? refs.dateStrategy; + if (Array.isArray(strategy)) { + return { + anyOf: strategy.map((item, i) => parseDateDef(def, refs, item)), + }; + } + switch (strategy) { + case 'string': + case 'format:date-time': + return { + type: 'string', + format: 'date-time', + }; + case 'format:date': + return { + type: 'string', + format: 'date', + }; + case 'integer': + return integerDateParser(def, refs); + } +} +const integerDateParser = (def, refs) => { + const res = { + type: 'integer', + format: 'unix-time', + }; + if (refs.target === 'openApi3') { + return res; + } + for (const check of def.checks) { + switch (check.kind) { + case 'min': + setResponseValueAndErrors(res, 'minimum', check.value, // This is in milliseconds + check.message, refs); + break; + case 'max': + setResponseValueAndErrors(res, 'maximum', check.value, // This is in milliseconds + check.message, refs); + break; + } + } + return res; +}; +//# sourceMappingURL=date.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.mjs.map new file mode 100644 index 0000000..e519e44 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/date.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"date.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/date.ts"],"names":[],"mappings":"OAEO,EAAiB,yBAAyB,EAAE;AAgBnD,MAAM,UAAU,YAAY,CAC1B,GAAe,EACf,IAAU,EACV,oBAAmC;IAEnC,MAAM,QAAQ,GAAG,oBAAoB,IAAI,IAAI,CAAC,YAAY,CAAC;IAE3D,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;QAC3B,OAAO;YACL,KAAK,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;SAChE,CAAC;KACH;IAED,QAAQ,QAAQ,EAAE;QAChB,KAAK,QAAQ,CAAC;QACd,KAAK,kBAAkB;YACrB,OAAO;gBACL,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,WAAW;aACpB,CAAC;QACJ,KAAK,aAAa;YAChB,OAAO;gBACL,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,MAAM;aACf,CAAC;QACJ,KAAK,SAAS;YACZ,OAAO,iBAAiB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;KACvC;AACH,CAAC;AAED,MAAM,iBAAiB,GAAG,CAAC,GAAe,EAAE,IAAU,EAAE,EAAE;IACxD,MAAM,GAAG,GAAwB;QAC/B,IAAI,EAAE,SAAS;QACf,MAAM,EAAE,WAAW;KACpB,CAAC;IAEF,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,EAAE;QAC9B,OAAO,GAAG,CAAC;KACZ;IAED,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,MAAM,EAAE;QAC9B,QAAQ,KAAK,CAAC,IAAI,EAAE;YAClB,KAAK,KAAK;gBACR,yBAAyB,CACvB,GAAG,EACH,SAAS,EACT,KAAK,CAAC,KAAK,EAAE,0BAA0B;gBACvC,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;gBACF,MAAM;YACR,KAAK,KAAK;gBACR,yBAAyB,CACvB,GAAG,EACH,SAAS,EACT,KAAK,CAAC,KAAK,EAAE,0BAA0B;gBACvC,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;gBACF,MAAM;SACT;KACF;IAED,OAAO,GAAG,CAAC;AACb,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.d.ts new file mode 100644 index 0000000..b572a7b --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.d.ts @@ -0,0 +1,7 @@ +import { ZodDefaultDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export declare function parseDefaultDef(_def: ZodDefaultDef, refs: Refs): JsonSchema7Type & { + default: any; +}; +//# sourceMappingURL=default.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.d.ts.map new file mode 100644 index 0000000..e05d446 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"default.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/default.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AACpC,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,wBAAgB,eAAe,CAAC,IAAI,EAAE,aAAa,EAAE,IAAI,EAAE,IAAI,GAAG,eAAe,GAAG;IAAE,OAAO,EAAE,GAAG,CAAA;CAAE,CAKnG"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.js new file mode 100644 index 0000000..aeefb83 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseDefaultDef = void 0; +const parseDef_1 = require("../parseDef.js"); +function parseDefaultDef(_def, refs) { + return { + ...(0, parseDef_1.parseDef)(_def.innerType._def, refs), + default: _def.defaultValue(), + }; +} +exports.parseDefaultDef = parseDefaultDef; +//# sourceMappingURL=default.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.js.map new file mode 100644 index 0000000..e69e74b --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.js.map @@ -0,0 +1 @@ +{"version":3,"file":"default.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/default.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAGxD,SAAgB,eAAe,CAAC,IAAmB,EAAE,IAAU;IAC7D,OAAO;QACL,GAAG,IAAA,mBAAQ,EAAC,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC;QACtC,OAAO,EAAE,IAAI,CAAC,YAAY,EAAE;KAC7B,CAAC;AACJ,CAAC;AALD,0CAKC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.mjs new file mode 100644 index 0000000..446639d --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.mjs @@ -0,0 +1,8 @@ +import { parseDef } from "../parseDef.mjs"; +export function parseDefaultDef(_def, refs) { + return { + ...parseDef(_def.innerType._def, refs), + default: _def.defaultValue(), + }; +} +//# sourceMappingURL=default.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.mjs.map new file mode 100644 index 0000000..6b0a172 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/default.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"default.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/default.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;AAGpC,MAAM,UAAU,eAAe,CAAC,IAAmB,EAAE,IAAU;IAC7D,OAAO;QACL,GAAG,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC;QACtC,OAAO,EAAE,IAAI,CAAC,YAAY,EAAE;KAC7B,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.d.ts new file mode 100644 index 0000000..7b31970 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.d.ts @@ -0,0 +1,5 @@ +import { ZodEffectsDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export declare function parseEffectsDef(_def: ZodEffectsDef, refs: Refs, forceResolution: boolean): JsonSchema7Type | undefined; +//# sourceMappingURL=effects.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.d.ts.map new file mode 100644 index 0000000..9e52f63 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"effects.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/effects.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AACpC,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,wBAAgB,eAAe,CAC7B,IAAI,EAAE,aAAa,EACnB,IAAI,EAAE,IAAI,EACV,eAAe,EAAE,OAAO,GACvB,eAAe,GAAG,SAAS,CAE7B"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.js new file mode 100644 index 0000000..1fbfedc --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseEffectsDef = void 0; +const parseDef_1 = require("../parseDef.js"); +function parseEffectsDef(_def, refs, forceResolution) { + return refs.effectStrategy === 'input' ? (0, parseDef_1.parseDef)(_def.schema._def, refs, forceResolution) : {}; +} +exports.parseEffectsDef = parseEffectsDef; +//# sourceMappingURL=effects.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.js.map new file mode 100644 index 0000000..11946d4 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.js.map @@ -0,0 +1 @@ +{"version":3,"file":"effects.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/effects.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAGxD,SAAgB,eAAe,CAC7B,IAAmB,EACnB,IAAU,EACV,eAAwB;IAExB,OAAO,IAAI,CAAC,cAAc,KAAK,OAAO,CAAC,CAAC,CAAC,IAAA,mBAAQ,EAAC,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,IAAI,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AAClG,CAAC;AAND,0CAMC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.mjs new file mode 100644 index 0000000..fdff42c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.mjs @@ -0,0 +1,5 @@ +import { parseDef } from "../parseDef.mjs"; +export function parseEffectsDef(_def, refs, forceResolution) { + return refs.effectStrategy === 'input' ? parseDef(_def.schema._def, refs, forceResolution) : {}; +} +//# sourceMappingURL=effects.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.mjs.map new file mode 100644 index 0000000..56fa6bb --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"effects.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/effects.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;AAGpC,MAAM,UAAU,eAAe,CAC7B,IAAmB,EACnB,IAAU,EACV,eAAwB;IAExB,OAAO,IAAI,CAAC,cAAc,KAAK,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,IAAI,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AAClG,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.d.ts new file mode 100644 index 0000000..2a9f4f8 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.d.ts @@ -0,0 +1,7 @@ +import { ZodEnumDef } from 'zod'; +export type JsonSchema7EnumType = { + type: 'string'; + enum: string[]; +}; +export declare function parseEnumDef(def: ZodEnumDef): JsonSchema7EnumType; +//# sourceMappingURL=enum.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.d.ts.map new file mode 100644 index 0000000..96a16db --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"enum.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/enum.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,KAAK,CAAC;AAEjC,MAAM,MAAM,mBAAmB,GAAG;IAChC,IAAI,EAAE,QAAQ,CAAC;IACf,IAAI,EAAE,MAAM,EAAE,CAAC;CAChB,CAAC;AAEF,wBAAgB,YAAY,CAAC,GAAG,EAAE,UAAU,GAAG,mBAAmB,CAKjE"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.js new file mode 100644 index 0000000..2d4800f --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseEnumDef = void 0; +function parseEnumDef(def) { + return { + type: 'string', + enum: [...def.values], + }; +} +exports.parseEnumDef = parseEnumDef; +//# sourceMappingURL=enum.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.js.map new file mode 100644 index 0000000..71515a1 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.js.map @@ -0,0 +1 @@ +{"version":3,"file":"enum.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/enum.ts"],"names":[],"mappings":";;;AAOA,SAAgB,YAAY,CAAC,GAAe;IAC1C,OAAO;QACL,IAAI,EAAE,QAAQ;QACd,IAAI,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,CAAC;KACtB,CAAC;AACJ,CAAC;AALD,oCAKC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.mjs new file mode 100644 index 0000000..7c4dd29 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.mjs @@ -0,0 +1,7 @@ +export function parseEnumDef(def) { + return { + type: 'string', + enum: [...def.values], + }; +} +//# sourceMappingURL=enum.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.mjs.map new file mode 100644 index 0000000..c038705 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"enum.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/enum.ts"],"names":[],"mappings":"AAOA,MAAM,UAAU,YAAY,CAAC,GAAe;IAC1C,OAAO;QACL,IAAI,EAAE,QAAQ;QACd,IAAI,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,CAAC;KACtB,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.d.ts new file mode 100644 index 0000000..b7be619 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.d.ts @@ -0,0 +1,9 @@ +import { ZodIntersectionDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export type JsonSchema7AllOfType = { + allOf: JsonSchema7Type[]; + unevaluatedProperties?: boolean; +}; +export declare function parseIntersectionDef(def: ZodIntersectionDef, refs: Refs): JsonSchema7AllOfType | JsonSchema7Type | undefined; +//# sourceMappingURL=intersection.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.d.ts.map new file mode 100644 index 0000000..730369c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"intersection.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/intersection.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,KAAK,CAAC;AACzC,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAG/B,MAAM,MAAM,oBAAoB,GAAG;IACjC,KAAK,EAAE,eAAe,EAAE,CAAC;IACzB,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC,CAAC;AASF,wBAAgB,oBAAoB,CAClC,GAAG,EAAE,kBAAkB,EACvB,IAAI,EAAE,IAAI,GACT,oBAAoB,GAAG,eAAe,GAAG,SAAS,CA2CpD"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.js new file mode 100644 index 0000000..4dc9d17 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseIntersectionDef = void 0; +const parseDef_1 = require("../parseDef.js"); +const isJsonSchema7AllOfType = (type) => { + if ('type' in type && type.type === 'string') + return false; + return 'allOf' in type; +}; +function parseIntersectionDef(def, refs) { + const allOf = [ + (0, parseDef_1.parseDef)(def.left._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '0'], + }), + (0, parseDef_1.parseDef)(def.right._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '1'], + }), + ].filter((x) => !!x); + let unevaluatedProperties = refs.target === 'jsonSchema2019-09' ? { unevaluatedProperties: false } : undefined; + const mergedAllOf = []; + // If either of the schemas is an allOf, merge them into a single allOf + allOf.forEach((schema) => { + if (isJsonSchema7AllOfType(schema)) { + mergedAllOf.push(...schema.allOf); + if (schema.unevaluatedProperties === undefined) { + // If one of the schemas has no unevaluatedProperties set, + // the merged schema should also have no unevaluatedProperties set + unevaluatedProperties = undefined; + } + } + else { + let nestedSchema = schema; + if ('additionalProperties' in schema && schema.additionalProperties === false) { + const { additionalProperties, ...rest } = schema; + nestedSchema = rest; + } + else { + // As soon as one of the schemas has additionalProperties set not to false, we allow unevaluatedProperties + unevaluatedProperties = undefined; + } + mergedAllOf.push(nestedSchema); + } + }); + return mergedAllOf.length ? + { + allOf: mergedAllOf, + ...unevaluatedProperties, + } + : undefined; +} +exports.parseIntersectionDef = parseIntersectionDef; +//# sourceMappingURL=intersection.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.js.map new file mode 100644 index 0000000..6966e62 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.js.map @@ -0,0 +1 @@ +{"version":3,"file":"intersection.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/intersection.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AASxD,MAAM,sBAAsB,GAAG,CAC7B,IAA6C,EACf,EAAE;IAChC,IAAI,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,KAAK,QAAQ;QAAE,OAAO,KAAK,CAAC;IAC3D,OAAO,OAAO,IAAI,IAAI,CAAC;AACzB,CAAC,CAAC;AAEF,SAAgB,oBAAoB,CAClC,GAAuB,EACvB,IAAU;IAEV,MAAM,KAAK,GAAG;QACZ,IAAA,mBAAQ,EAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE;YACtB,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;SACjD,CAAC;QACF,IAAA,mBAAQ,EAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE;YACvB,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;SACjD,CAAC;KACH,CAAC,MAAM,CAAC,CAAC,CAAC,EAAwB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAE3C,IAAI,qBAAqB,GACvB,IAAI,CAAC,MAAM,KAAK,mBAAmB,CAAC,CAAC,CAAC,EAAE,qBAAqB,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;IAErF,MAAM,WAAW,GAAsB,EAAE,CAAC;IAC1C,uEAAuE;IACvE,KAAK,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE;QACvB,IAAI,sBAAsB,CAAC,MAAM,CAAC,EAAE;YAClC,WAAW,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;YAClC,IAAI,MAAM,CAAC,qBAAqB,KAAK,SAAS,EAAE;gBAC9C,0DAA0D;gBAC1D,kEAAkE;gBAClE,qBAAqB,GAAG,SAAS,CAAC;aACnC;SACF;aAAM;YACL,IAAI,YAAY,GAAoB,MAAM,CAAC;YAC3C,IAAI,sBAAsB,IAAI,MAAM,IAAI,MAAM,CAAC,oBAAoB,KAAK,KAAK,EAAE;gBAC7E,MAAM,EAAE,oBAAoB,EAAE,GAAG,IAAI,EAAE,GAAG,MAAM,CAAC;gBACjD,YAAY,GAAG,IAAI,CAAC;aACrB;iBAAM;gBACL,0GAA0G;gBAC1G,qBAAqB,GAAG,SAAS,CAAC;aACnC;YACD,WAAW,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;SAChC;IACH,CAAC,CAAC,CAAC;IACH,OAAO,WAAW,CAAC,MAAM,CAAC,CAAC;QACvB;YACE,KAAK,EAAE,WAAW;YAClB,GAAG,qBAAqB;SACzB;QACH,CAAC,CAAC,SAAS,CAAC;AAChB,CAAC;AA9CD,oDA8CC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.mjs new file mode 100644 index 0000000..c11f839 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.mjs @@ -0,0 +1,50 @@ +import { parseDef } from "../parseDef.mjs"; +const isJsonSchema7AllOfType = (type) => { + if ('type' in type && type.type === 'string') + return false; + return 'allOf' in type; +}; +export function parseIntersectionDef(def, refs) { + const allOf = [ + parseDef(def.left._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '0'], + }), + parseDef(def.right._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '1'], + }), + ].filter((x) => !!x); + let unevaluatedProperties = refs.target === 'jsonSchema2019-09' ? { unevaluatedProperties: false } : undefined; + const mergedAllOf = []; + // If either of the schemas is an allOf, merge them into a single allOf + allOf.forEach((schema) => { + if (isJsonSchema7AllOfType(schema)) { + mergedAllOf.push(...schema.allOf); + if (schema.unevaluatedProperties === undefined) { + // If one of the schemas has no unevaluatedProperties set, + // the merged schema should also have no unevaluatedProperties set + unevaluatedProperties = undefined; + } + } + else { + let nestedSchema = schema; + if ('additionalProperties' in schema && schema.additionalProperties === false) { + const { additionalProperties, ...rest } = schema; + nestedSchema = rest; + } + else { + // As soon as one of the schemas has additionalProperties set not to false, we allow unevaluatedProperties + unevaluatedProperties = undefined; + } + mergedAllOf.push(nestedSchema); + } + }); + return mergedAllOf.length ? + { + allOf: mergedAllOf, + ...unevaluatedProperties, + } + : undefined; +} +//# sourceMappingURL=intersection.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.mjs.map new file mode 100644 index 0000000..3b5e250 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"intersection.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/intersection.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;AASpC,MAAM,sBAAsB,GAAG,CAC7B,IAA6C,EACf,EAAE;IAChC,IAAI,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,KAAK,QAAQ;QAAE,OAAO,KAAK,CAAC;IAC3D,OAAO,OAAO,IAAI,IAAI,CAAC;AACzB,CAAC,CAAC;AAEF,MAAM,UAAU,oBAAoB,CAClC,GAAuB,EACvB,IAAU;IAEV,MAAM,KAAK,GAAG;QACZ,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE;YACtB,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;SACjD,CAAC;QACF,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE;YACvB,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;SACjD,CAAC;KACH,CAAC,MAAM,CAAC,CAAC,CAAC,EAAwB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAE3C,IAAI,qBAAqB,GACvB,IAAI,CAAC,MAAM,KAAK,mBAAmB,CAAC,CAAC,CAAC,EAAE,qBAAqB,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;IAErF,MAAM,WAAW,GAAsB,EAAE,CAAC;IAC1C,uEAAuE;IACvE,KAAK,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE;QACvB,IAAI,sBAAsB,CAAC,MAAM,CAAC,EAAE;YAClC,WAAW,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;YAClC,IAAI,MAAM,CAAC,qBAAqB,KAAK,SAAS,EAAE;gBAC9C,0DAA0D;gBAC1D,kEAAkE;gBAClE,qBAAqB,GAAG,SAAS,CAAC;aACnC;SACF;aAAM;YACL,IAAI,YAAY,GAAoB,MAAM,CAAC;YAC3C,IAAI,sBAAsB,IAAI,MAAM,IAAI,MAAM,CAAC,oBAAoB,KAAK,KAAK,EAAE;gBAC7E,MAAM,EAAE,oBAAoB,EAAE,GAAG,IAAI,EAAE,GAAG,MAAM,CAAC;gBACjD,YAAY,GAAG,IAAI,CAAC;aACrB;iBAAM;gBACL,0GAA0G;gBAC1G,qBAAqB,GAAG,SAAS,CAAC;aACnC;YACD,WAAW,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;SAChC;IACH,CAAC,CAAC,CAAC;IACH,OAAO,WAAW,CAAC,MAAM,CAAC,CAAC;QACvB;YACE,KAAK,EAAE,WAAW;YAClB,GAAG,qBAAqB;SACzB;QACH,CAAC,CAAC,SAAS,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.d.ts new file mode 100644 index 0000000..36b539e --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.d.ts @@ -0,0 +1,10 @@ +import { ZodLiteralDef } from 'zod'; +import { Refs } from "../Refs.js"; +export type JsonSchema7LiteralType = { + type: 'string' | 'number' | 'integer' | 'boolean'; + const: string | number | boolean; +} | { + type: 'object' | 'array'; +}; +export declare function parseLiteralDef(def: ZodLiteralDef, refs: Refs): JsonSchema7LiteralType; +//# sourceMappingURL=literal.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.d.ts.map new file mode 100644 index 0000000..c8c0899 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"literal.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/literal.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AACpC,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,MAAM,MAAM,sBAAsB,GAC9B;IACE,IAAI,EAAE,QAAQ,GAAG,QAAQ,GAAG,SAAS,GAAG,SAAS,CAAC;IAClD,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;CAClC,GACD;IACE,IAAI,EAAE,QAAQ,GAAG,OAAO,CAAC;CAC1B,CAAC;AAEN,wBAAgB,eAAe,CAAC,GAAG,EAAE,aAAa,EAAE,IAAI,EAAE,IAAI,GAAG,sBAAsB,CAwBtF"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.js new file mode 100644 index 0000000..59eead7 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseLiteralDef = void 0; +function parseLiteralDef(def, refs) { + const parsedType = typeof def.value; + if (parsedType !== 'bigint' && + parsedType !== 'number' && + parsedType !== 'boolean' && + parsedType !== 'string') { + return { + type: Array.isArray(def.value) ? 'array' : 'object', + }; + } + if (refs.target === 'openApi3') { + return { + type: parsedType === 'bigint' ? 'integer' : parsedType, + enum: [def.value], + }; + } + return { + type: parsedType === 'bigint' ? 'integer' : parsedType, + const: def.value, + }; +} +exports.parseLiteralDef = parseLiteralDef; +//# sourceMappingURL=literal.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.js.map new file mode 100644 index 0000000..f15ac42 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.js.map @@ -0,0 +1 @@ +{"version":3,"file":"literal.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/literal.ts"],"names":[],"mappings":";;;AAYA,SAAgB,eAAe,CAAC,GAAkB,EAAE,IAAU;IAC5D,MAAM,UAAU,GAAG,OAAO,GAAG,CAAC,KAAK,CAAC;IACpC,IACE,UAAU,KAAK,QAAQ;QACvB,UAAU,KAAK,QAAQ;QACvB,UAAU,KAAK,SAAS;QACxB,UAAU,KAAK,QAAQ,EACvB;QACA,OAAO;YACL,IAAI,EAAE,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ;SACpD,CAAC;KACH;IAED,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,EAAE;QAC9B,OAAO;YACL,IAAI,EAAE,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU;YACtD,IAAI,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC;SACX,CAAC;KACV;IAED,OAAO;QACL,IAAI,EAAE,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU;QACtD,KAAK,EAAE,GAAG,CAAC,KAAK;KACjB,CAAC;AACJ,CAAC;AAxBD,0CAwBC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.mjs new file mode 100644 index 0000000..a851972 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.mjs @@ -0,0 +1,22 @@ +export function parseLiteralDef(def, refs) { + const parsedType = typeof def.value; + if (parsedType !== 'bigint' && + parsedType !== 'number' && + parsedType !== 'boolean' && + parsedType !== 'string') { + return { + type: Array.isArray(def.value) ? 'array' : 'object', + }; + } + if (refs.target === 'openApi3') { + return { + type: parsedType === 'bigint' ? 'integer' : parsedType, + enum: [def.value], + }; + } + return { + type: parsedType === 'bigint' ? 'integer' : parsedType, + const: def.value, + }; +} +//# sourceMappingURL=literal.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.mjs.map new file mode 100644 index 0000000..d93f2a5 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"literal.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/literal.ts"],"names":[],"mappings":"AAYA,MAAM,UAAU,eAAe,CAAC,GAAkB,EAAE,IAAU;IAC5D,MAAM,UAAU,GAAG,OAAO,GAAG,CAAC,KAAK,CAAC;IACpC,IACE,UAAU,KAAK,QAAQ;QACvB,UAAU,KAAK,QAAQ;QACvB,UAAU,KAAK,SAAS;QACxB,UAAU,KAAK,QAAQ,EACvB;QACA,OAAO;YACL,IAAI,EAAE,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ;SACpD,CAAC;KACH;IAED,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,EAAE;QAC9B,OAAO;YACL,IAAI,EAAE,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU;YACtD,IAAI,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC;SACX,CAAC;KACV;IAED,OAAO;QACL,IAAI,EAAE,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU;QACtD,KAAK,EAAE,GAAG,CAAC,KAAK;KACjB,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.d.ts new file mode 100644 index 0000000..3016f89 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.d.ts @@ -0,0 +1,16 @@ +import { ZodMapDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +import { JsonSchema7RecordType } from "./record.js"; +export type JsonSchema7MapType = { + type: 'array'; + maxItems: 125; + items: { + type: 'array'; + items: [JsonSchema7Type, JsonSchema7Type]; + minItems: 2; + maxItems: 2; + }; +}; +export declare function parseMapDef(def: ZodMapDef, refs: Refs): JsonSchema7MapType | JsonSchema7RecordType; +//# sourceMappingURL=map.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.d.ts.map new file mode 100644 index 0000000..a52c428 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"map.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/map.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,KAAK,CAAC;AAChC,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAC/B,OAAO,EAAE,qBAAqB,EAAkB,MAAM,UAAU,CAAC;AAEjE,MAAM,MAAM,kBAAkB,GAAG;IAC/B,IAAI,EAAE,OAAO,CAAC;IACd,QAAQ,EAAE,GAAG,CAAC;IACd,KAAK,EAAE;QACL,IAAI,EAAE,OAAO,CAAC;QACd,KAAK,EAAE,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;QAC1C,QAAQ,EAAE,CAAC,CAAC;QACZ,QAAQ,EAAE,CAAC,CAAC;KACb,CAAC;CACH,CAAC;AAEF,wBAAgB,WAAW,CAAC,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,GAAG,kBAAkB,GAAG,qBAAqB,CAyBlG"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.js new file mode 100644 index 0000000..07bf1ee --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseMapDef = void 0; +const parseDef_1 = require("../parseDef.js"); +const record_1 = require("./record.js"); +function parseMapDef(def, refs) { + if (refs.mapStrategy === 'record') { + return (0, record_1.parseRecordDef)(def, refs); + } + const keys = (0, parseDef_1.parseDef)(def.keyType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', 'items', '0'], + }) || {}; + const values = (0, parseDef_1.parseDef)(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', 'items', '1'], + }) || {}; + return { + type: 'array', + maxItems: 125, + items: { + type: 'array', + items: [keys, values], + minItems: 2, + maxItems: 2, + }, + }; +} +exports.parseMapDef = parseMapDef; +//# sourceMappingURL=map.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.js.map new file mode 100644 index 0000000..4043d66 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.js.map @@ -0,0 +1 @@ +{"version":3,"file":"map.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/map.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAExD,wCAAiE;AAajE,SAAgB,WAAW,CAAC,GAAc,EAAE,IAAU;IACpD,IAAI,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;QACjC,OAAO,IAAA,uBAAc,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;KAClC;IAED,MAAM,IAAI,GACR,IAAA,mBAAQ,EAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE;QACzB,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,CAAC;KAC1D,CAAC,IAAI,EAAE,CAAC;IACX,MAAM,MAAM,GACV,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;QAC3B,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,CAAC;KAC1D,CAAC,IAAI,EAAE,CAAC;IACX,OAAO;QACL,IAAI,EAAE,OAAO;QACb,QAAQ,EAAE,GAAG;QACb,KAAK,EAAE;YACL,IAAI,EAAE,OAAO;YACb,KAAK,EAAE,CAAC,IAAI,EAAE,MAAM,CAAC;YACrB,QAAQ,EAAE,CAAC;YACX,QAAQ,EAAE,CAAC;SACZ;KACF,CAAC;AACJ,CAAC;AAzBD,kCAyBC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.mjs new file mode 100644 index 0000000..428849e --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.mjs @@ -0,0 +1,26 @@ +import { parseDef } from "../parseDef.mjs"; +import { parseRecordDef } from "./record.mjs"; +export function parseMapDef(def, refs) { + if (refs.mapStrategy === 'record') { + return parseRecordDef(def, refs); + } + const keys = parseDef(def.keyType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', 'items', '0'], + }) || {}; + const values = parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', 'items', '1'], + }) || {}; + return { + type: 'array', + maxItems: 125, + items: { + type: 'array', + items: [keys, values], + minItems: 2, + maxItems: 2, + }, + }; +} +//# sourceMappingURL=map.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.mjs.map new file mode 100644 index 0000000..49cf41b --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/map.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"map.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/map.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;OAE7B,EAAyB,cAAc,EAAE;AAahD,MAAM,UAAU,WAAW,CAAC,GAAc,EAAE,IAAU;IACpD,IAAI,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;QACjC,OAAO,cAAc,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;KAClC;IAED,MAAM,IAAI,GACR,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE;QACzB,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,CAAC;KAC1D,CAAC,IAAI,EAAE,CAAC;IACX,MAAM,MAAM,GACV,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;QAC3B,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,CAAC;KAC1D,CAAC,IAAI,EAAE,CAAC;IACX,OAAO;QACL,IAAI,EAAE,OAAO;QACb,QAAQ,EAAE,GAAG;QACb,KAAK,EAAE;YACL,IAAI,EAAE,OAAO;YACb,KAAK,EAAE,CAAC,IAAI,EAAE,MAAM,CAAC;YACrB,QAAQ,EAAE,CAAC;YACX,QAAQ,EAAE,CAAC;SACZ;KACF,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.d.ts new file mode 100644 index 0000000..43cf2e2 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.d.ts @@ -0,0 +1,7 @@ +import { ZodNativeEnumDef } from 'zod'; +export type JsonSchema7NativeEnumType = { + type: 'string' | 'number' | ['string', 'number']; + enum: (string | number)[]; +}; +export declare function parseNativeEnumDef(def: ZodNativeEnumDef): JsonSchema7NativeEnumType; +//# sourceMappingURL=nativeEnum.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.d.ts.map new file mode 100644 index 0000000..a5de1fc --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"nativeEnum.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/nativeEnum.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,KAAK,CAAC;AAEvC,MAAM,MAAM,yBAAyB,GAAG;IACtC,IAAI,EAAE,QAAQ,GAAG,QAAQ,GAAG,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;IACjD,IAAI,EAAE,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,CAAC;CAC3B,CAAC;AAEF,wBAAgB,kBAAkB,CAAC,GAAG,EAAE,gBAAgB,GAAG,yBAAyB,CAmBnF"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.js new file mode 100644 index 0000000..5bd7599 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseNativeEnumDef = void 0; +function parseNativeEnumDef(def) { + const object = def.values; + const actualKeys = Object.keys(def.values).filter((key) => { + return typeof object[object[key]] !== 'number'; + }); + const actualValues = actualKeys.map((key) => object[key]); + const parsedTypes = Array.from(new Set(actualValues.map((values) => typeof values))); + return { + type: parsedTypes.length === 1 ? + parsedTypes[0] === 'string' ? + 'string' + : 'number' + : ['string', 'number'], + enum: actualValues, + }; +} +exports.parseNativeEnumDef = parseNativeEnumDef; +//# sourceMappingURL=nativeEnum.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.js.map new file mode 100644 index 0000000..8dd242b --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.js.map @@ -0,0 +1 @@ +{"version":3,"file":"nativeEnum.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/nativeEnum.ts"],"names":[],"mappings":";;;AAOA,SAAgB,kBAAkB,CAAC,GAAqB;IACtD,MAAM,MAAM,GAAG,GAAG,CAAC,MAAM,CAAC;IAC1B,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,GAAW,EAAE,EAAE;QAChE,OAAO,OAAO,MAAM,CAAC,MAAM,CAAC,GAAG,CAAE,CAAC,KAAK,QAAQ,CAAC;IAClD,CAAC,CAAC,CAAC;IAEH,MAAM,YAAY,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC,GAAW,EAAE,EAAE,CAAC,MAAM,CAAC,GAAG,CAAE,CAAC,CAAC;IAEnE,MAAM,WAAW,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,MAAuB,EAAE,EAAE,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAEtG,OAAO;QACL,IAAI,EACF,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;YACxB,WAAW,CAAC,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC;gBAC3B,QAAQ;gBACV,CAAC,CAAC,QAAQ;YACZ,CAAC,CAAC,CAAC,QAAQ,EAAE,QAAQ,CAAC;QACxB,IAAI,EAAE,YAAY;KACnB,CAAC;AACJ,CAAC;AAnBD,gDAmBC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.mjs new file mode 100644 index 0000000..6e1a0ac --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.mjs @@ -0,0 +1,17 @@ +export function parseNativeEnumDef(def) { + const object = def.values; + const actualKeys = Object.keys(def.values).filter((key) => { + return typeof object[object[key]] !== 'number'; + }); + const actualValues = actualKeys.map((key) => object[key]); + const parsedTypes = Array.from(new Set(actualValues.map((values) => typeof values))); + return { + type: parsedTypes.length === 1 ? + parsedTypes[0] === 'string' ? + 'string' + : 'number' + : ['string', 'number'], + enum: actualValues, + }; +} +//# sourceMappingURL=nativeEnum.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.mjs.map new file mode 100644 index 0000000..34d5262 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"nativeEnum.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/nativeEnum.ts"],"names":[],"mappings":"AAOA,MAAM,UAAU,kBAAkB,CAAC,GAAqB;IACtD,MAAM,MAAM,GAAG,GAAG,CAAC,MAAM,CAAC;IAC1B,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,GAAW,EAAE,EAAE;QAChE,OAAO,OAAO,MAAM,CAAC,MAAM,CAAC,GAAG,CAAE,CAAC,KAAK,QAAQ,CAAC;IAClD,CAAC,CAAC,CAAC;IAEH,MAAM,YAAY,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC,GAAW,EAAE,EAAE,CAAC,MAAM,CAAC,GAAG,CAAE,CAAC,CAAC;IAEnE,MAAM,WAAW,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,MAAuB,EAAE,EAAE,CAAC,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAEtG,OAAO;QACL,IAAI,EACF,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;YACxB,WAAW,CAAC,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC;gBAC3B,QAAQ;gBACV,CAAC,CAAC,QAAQ;YACZ,CAAC,CAAC,CAAC,QAAQ,EAAE,QAAQ,CAAC;QACxB,IAAI,EAAE,YAAY;KACnB,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.d.ts new file mode 100644 index 0000000..d56edd0 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.d.ts @@ -0,0 +1,5 @@ +export type JsonSchema7NeverType = { + not: {}; +}; +export declare function parseNeverDef(): JsonSchema7NeverType; +//# sourceMappingURL=never.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.d.ts.map new file mode 100644 index 0000000..0bc5ba2 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"never.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/never.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,oBAAoB,GAAG;IACjC,GAAG,EAAE,EAAE,CAAC;CACT,CAAC;AAEF,wBAAgB,aAAa,IAAI,oBAAoB,CAIpD"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.js new file mode 100644 index 0000000..cefc033 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseNeverDef = void 0; +function parseNeverDef() { + return { + not: {}, + }; +} +exports.parseNeverDef = parseNeverDef; +//# sourceMappingURL=never.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.js.map new file mode 100644 index 0000000..72d98b3 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.js.map @@ -0,0 +1 @@ +{"version":3,"file":"never.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/never.ts"],"names":[],"mappings":";;;AAIA,SAAgB,aAAa;IAC3B,OAAO;QACL,GAAG,EAAE,EAAE;KACR,CAAC;AACJ,CAAC;AAJD,sCAIC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.mjs new file mode 100644 index 0000000..dd877d7 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.mjs @@ -0,0 +1,6 @@ +export function parseNeverDef() { + return { + not: {}, + }; +} +//# sourceMappingURL=never.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.mjs.map new file mode 100644 index 0000000..2a7ca45 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/never.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"never.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/never.ts"],"names":[],"mappings":"AAIA,MAAM,UAAU,aAAa;IAC3B,OAAO;QACL,GAAG,EAAE,EAAE;KACR,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.d.ts new file mode 100644 index 0000000..b2b993d --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.d.ts @@ -0,0 +1,6 @@ +import { Refs } from "../Refs.js"; +export type JsonSchema7NullType = { + type: 'null'; +}; +export declare function parseNullDef(refs: Refs): JsonSchema7NullType; +//# sourceMappingURL=null.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.d.ts.map new file mode 100644 index 0000000..2c4a415 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"null.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/null.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,MAAM,MAAM,mBAAmB,GAAG;IAChC,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAEF,wBAAgB,YAAY,CAAC,IAAI,EAAE,IAAI,GAAG,mBAAmB,CAS5D"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.js new file mode 100644 index 0000000..fa68838 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseNullDef = void 0; +function parseNullDef(refs) { + return refs.target === 'openApi3' ? + { + enum: ['null'], + nullable: true, + } + : { + type: 'null', + }; +} +exports.parseNullDef = parseNullDef; +//# sourceMappingURL=null.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.js.map new file mode 100644 index 0000000..6a60e2e --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.js.map @@ -0,0 +1 @@ +{"version":3,"file":"null.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/null.ts"],"names":[],"mappings":";;;AAMA,SAAgB,YAAY,CAAC,IAAU;IACrC,OAAO,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC;QAC9B;YACC,IAAI,EAAE,CAAC,MAAM,CAAC;YACd,QAAQ,EAAE,IAAI;SACP;QACX,CAAC,CAAC;YACE,IAAI,EAAE,MAAM;SACb,CAAC;AACR,CAAC;AATD,oCASC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.mjs new file mode 100644 index 0000000..cd8ce3b --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.mjs @@ -0,0 +1,11 @@ +export function parseNullDef(refs) { + return refs.target === 'openApi3' ? + { + enum: ['null'], + nullable: true, + } + : { + type: 'null', + }; +} +//# sourceMappingURL=null.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.mjs.map new file mode 100644 index 0000000..d67be9c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/null.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"null.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/null.ts"],"names":[],"mappings":"AAMA,MAAM,UAAU,YAAY,CAAC,IAAU;IACrC,OAAO,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC;QAC9B;YACC,IAAI,EAAE,CAAC,MAAM,CAAC;YACd,QAAQ,EAAE,IAAI;SACP;QACX,CAAC,CAAC;YACE,IAAI,EAAE,MAAM;SACb,CAAC;AACR,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.d.ts new file mode 100644 index 0000000..40292b8 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.d.ts @@ -0,0 +1,11 @@ +import { ZodNullableDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +import { JsonSchema7NullType } from "./null.js"; +export type JsonSchema7NullableType = { + anyOf: [JsonSchema7Type, JsonSchema7NullType]; +} | { + type: [string, 'null']; +}; +export declare function parseNullableDef(def: ZodNullableDef, refs: Refs): JsonSchema7NullableType | undefined; +//# sourceMappingURL=nullable.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.d.ts.map new file mode 100644 index 0000000..8e20f3d --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"nullable.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/nullable.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AACrC,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAC/B,OAAO,EAAE,mBAAmB,EAAE,MAAM,QAAQ,CAAC;AAG7C,MAAM,MAAM,uBAAuB,GAC/B;IACE,KAAK,EAAE,CAAC,eAAe,EAAE,mBAAmB,CAAC,CAAC;CAC/C,GACD;IACE,IAAI,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACxB,CAAC;AAEN,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,cAAc,EAAE,IAAI,EAAE,IAAI,GAAG,uBAAuB,GAAG,SAAS,CAkCrG"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.js new file mode 100644 index 0000000..d41e08f --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseNullableDef = void 0; +const parseDef_1 = require("../parseDef.js"); +const union_1 = require("./union.js"); +function parseNullableDef(def, refs) { + if (['ZodString', 'ZodNumber', 'ZodBigInt', 'ZodBoolean', 'ZodNull'].includes(def.innerType._def.typeName) && + (!def.innerType._def.checks || !def.innerType._def.checks.length)) { + if (refs.target === 'openApi3' || refs.nullableStrategy === 'property') { + return { + type: union_1.primitiveMappings[def.innerType._def.typeName], + nullable: true, + }; + } + return { + type: [union_1.primitiveMappings[def.innerType._def.typeName], 'null'], + }; + } + if (refs.target === 'openApi3') { + const base = (0, parseDef_1.parseDef)(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath], + }); + if (base && '$ref' in base) + return { allOf: [base], nullable: true }; + return base && { ...base, nullable: true }; + } + const base = (0, parseDef_1.parseDef)(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', '0'], + }); + return base && { anyOf: [base, { type: 'null' }] }; +} +exports.parseNullableDef = parseNullableDef; +//# sourceMappingURL=nullable.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.js.map new file mode 100644 index 0000000..8c39bba --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.js.map @@ -0,0 +1 @@ +{"version":3,"file":"nullable.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/nullable.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAGxD,sCAA4C;AAU5C,SAAgB,gBAAgB,CAAC,GAAmB,EAAE,IAAU;IAC9D,IACE,CAAC,WAAW,EAAE,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC;QACtG,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EACjE;QACA,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,IAAI,IAAI,CAAC,gBAAgB,KAAK,UAAU,EAAE;YACtE,OAAO;gBACL,IAAI,EAAE,yBAAiB,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,QAA0C,CAAC;gBACtF,QAAQ,EAAE,IAAI;aACR,CAAC;SACV;QAED,OAAO;YACL,IAAI,EAAE,CAAC,yBAAiB,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,QAA0C,CAAC,EAAE,MAAM,CAAC;SACjG,CAAC;KACH;IAED,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,EAAE;QAC9B,MAAM,IAAI,GAAG,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;YACxC,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,CAAC;SACnC,CAAC,CAAC;QAEH,IAAI,IAAI,IAAI,MAAM,IAAI,IAAI;YAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,IAAI,EAAS,CAAC;QAE5E,OAAO,IAAI,IAAK,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAU,CAAC;KACrD;IAED,MAAM,IAAI,GAAG,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;QACxC,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;KACjD,CAAC,CAAC;IAEH,OAAO,IAAI,IAAI,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC;AACrD,CAAC;AAlCD,4CAkCC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.mjs new file mode 100644 index 0000000..057ad33 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.mjs @@ -0,0 +1,31 @@ +import { parseDef } from "../parseDef.mjs"; +import { primitiveMappings } from "./union.mjs"; +export function parseNullableDef(def, refs) { + if (['ZodString', 'ZodNumber', 'ZodBigInt', 'ZodBoolean', 'ZodNull'].includes(def.innerType._def.typeName) && + (!def.innerType._def.checks || !def.innerType._def.checks.length)) { + if (refs.target === 'openApi3' || refs.nullableStrategy === 'property') { + return { + type: primitiveMappings[def.innerType._def.typeName], + nullable: true, + }; + } + return { + type: [primitiveMappings[def.innerType._def.typeName], 'null'], + }; + } + if (refs.target === 'openApi3') { + const base = parseDef(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath], + }); + if (base && '$ref' in base) + return { allOf: [base], nullable: true }; + return base && { ...base, nullable: true }; + } + const base = parseDef(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', '0'], + }); + return base && { anyOf: [base, { type: 'null' }] }; +} +//# sourceMappingURL=nullable.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.mjs.map new file mode 100644 index 0000000..54c4175 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"nullable.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/nullable.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;OAG7B,EAAE,iBAAiB,EAAE;AAU5B,MAAM,UAAU,gBAAgB,CAAC,GAAmB,EAAE,IAAU;IAC9D,IACE,CAAC,WAAW,EAAE,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC;QACtG,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EACjE;QACA,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,IAAI,IAAI,CAAC,gBAAgB,KAAK,UAAU,EAAE;YACtE,OAAO;gBACL,IAAI,EAAE,iBAAiB,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,QAA0C,CAAC;gBACtF,QAAQ,EAAE,IAAI;aACR,CAAC;SACV;QAED,OAAO;YACL,IAAI,EAAE,CAAC,iBAAiB,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,QAA0C,CAAC,EAAE,MAAM,CAAC;SACjG,CAAC;KACH;IAED,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,EAAE;QAC9B,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;YACxC,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,CAAC;SACnC,CAAC,CAAC;QAEH,IAAI,IAAI,IAAI,MAAM,IAAI,IAAI;YAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,IAAI,EAAS,CAAC;QAE5E,OAAO,IAAI,IAAK,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAU,CAAC;KACrD;IAED,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;QACxC,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;KACjD,CAAC,CAAC;IAEH,OAAO,IAAI,IAAI,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC;AACrD,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.d.ts new file mode 100644 index 0000000..364b0ed --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.d.ts @@ -0,0 +1,14 @@ +import { ZodNumberDef } from 'zod'; +import { ErrorMessages } from "../errorMessages.js"; +import { Refs } from "../Refs.js"; +export type JsonSchema7NumberType = { + type: 'number' | 'integer'; + minimum?: number; + exclusiveMinimum?: number; + maximum?: number; + exclusiveMaximum?: number; + multipleOf?: number; + errorMessage?: ErrorMessages; +}; +export declare function parseNumberDef(def: ZodNumberDef, refs: Refs): JsonSchema7NumberType; +//# sourceMappingURL=number.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.d.ts.map new file mode 100644 index 0000000..d10933f --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"number.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/number.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,KAAK,CAAC;AACnC,OAAO,EAAmB,aAAa,EAA6B,MAAM,kBAAkB,CAAC;AAC7F,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,MAAM,MAAM,qBAAqB,GAAG;IAClC,IAAI,EAAE,QAAQ,GAAG,SAAS,CAAC;IAC3B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;CACrD,CAAC;AAEF,wBAAgB,cAAc,CAAC,GAAG,EAAE,YAAY,EAAE,IAAI,EAAE,IAAI,GAAG,qBAAqB,CA+CnF"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.js new file mode 100644 index 0000000..e59aa75 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseNumberDef = void 0; +const errorMessages_1 = require("../errorMessages.js"); +function parseNumberDef(def, refs) { + const res = { + type: 'number', + }; + if (!def.checks) + return res; + for (const check of def.checks) { + switch (check.kind) { + case 'int': + res.type = 'integer'; + (0, errorMessages_1.addErrorMessage)(res, 'type', check.message, refs); + break; + case 'min': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'minimum', check.value, check.message, refs); + } + else { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'exclusiveMinimum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMinimum = true; + } + (0, errorMessages_1.setResponseValueAndErrors)(res, 'minimum', check.value, check.message, refs); + } + break; + case 'max': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'maximum', check.value, check.message, refs); + } + else { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'exclusiveMaximum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMaximum = true; + } + (0, errorMessages_1.setResponseValueAndErrors)(res, 'maximum', check.value, check.message, refs); + } + break; + case 'multipleOf': + (0, errorMessages_1.setResponseValueAndErrors)(res, 'multipleOf', check.value, check.message, refs); + break; + } + } + return res; +} +exports.parseNumberDef = parseNumberDef; +//# sourceMappingURL=number.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.js.map new file mode 100644 index 0000000..af67bf3 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.js.map @@ -0,0 +1 @@ +{"version":3,"file":"number.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/number.ts"],"names":[],"mappings":";;;AACA,uDAA6F;AAa7F,SAAgB,cAAc,CAAC,GAAiB,EAAE,IAAU;IAC1D,MAAM,GAAG,GAA0B;QACjC,IAAI,EAAE,QAAQ;KACf,CAAC;IAEF,IAAI,CAAC,GAAG,CAAC,MAAM;QAAE,OAAO,GAAG,CAAC;IAE5B,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,MAAM,EAAE;QAC9B,QAAQ,KAAK,CAAC,IAAI,EAAE;YAClB,KAAK,KAAK;gBACR,GAAG,CAAC,IAAI,GAAG,SAAS,CAAC;gBACrB,IAAA,+BAAe,EAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;gBAClD,MAAM;YACR,KAAK,KAAK;gBACR,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;oBACjC,IAAI,KAAK,CAAC,SAAS,EAAE;wBACnB,IAAA,yCAAyB,EAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7E;yBAAM;wBACL,IAAA,yCAAyB,EAAC,GAAG,EAAE,kBAAkB,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACtF;iBACF;qBAAM;oBACL,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;wBACpB,GAAG,CAAC,gBAAgB,GAAG,IAAW,CAAC;qBACpC;oBACD,IAAA,yCAAyB,EAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC7E;gBACD,MAAM;YACR,KAAK,KAAK;gBACR,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;oBACjC,IAAI,KAAK,CAAC,SAAS,EAAE;wBACnB,IAAA,yCAAyB,EAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7E;yBAAM;wBACL,IAAA,yCAAyB,EAAC,GAAG,EAAE,kBAAkB,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACtF;iBACF;qBAAM;oBACL,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;wBACpB,GAAG,CAAC,gBAAgB,GAAG,IAAW,CAAC;qBACpC;oBACD,IAAA,yCAAyB,EAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC7E;gBACD,MAAM;YACR,KAAK,YAAY;gBACf,IAAA,yCAAyB,EAAC,GAAG,EAAE,YAAY,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;gBAC/E,MAAM;SACT;KACF;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AA/CD,wCA+CC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.mjs new file mode 100644 index 0000000..1560b00 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.mjs @@ -0,0 +1,53 @@ +import { addErrorMessage, setResponseValueAndErrors } from "../errorMessages.mjs"; +export function parseNumberDef(def, refs) { + const res = { + type: 'number', + }; + if (!def.checks) + return res; + for (const check of def.checks) { + switch (check.kind) { + case 'int': + res.type = 'integer'; + addErrorMessage(res, 'type', check.message, refs); + break; + case 'min': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + else { + setResponseValueAndErrors(res, 'exclusiveMinimum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMinimum = true; + } + setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + break; + case 'max': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + else { + setResponseValueAndErrors(res, 'exclusiveMaximum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMaximum = true; + } + setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + break; + case 'multipleOf': + setResponseValueAndErrors(res, 'multipleOf', check.value, check.message, refs); + break; + } + } + return res; +} +//# sourceMappingURL=number.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.mjs.map new file mode 100644 index 0000000..77a2039 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/number.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"number.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/number.ts"],"names":[],"mappings":"OACO,EAAE,eAAe,EAAiB,yBAAyB,EAAE;AAapE,MAAM,UAAU,cAAc,CAAC,GAAiB,EAAE,IAAU;IAC1D,MAAM,GAAG,GAA0B;QACjC,IAAI,EAAE,QAAQ;KACf,CAAC;IAEF,IAAI,CAAC,GAAG,CAAC,MAAM;QAAE,OAAO,GAAG,CAAC;IAE5B,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,MAAM,EAAE;QAC9B,QAAQ,KAAK,CAAC,IAAI,EAAE;YAClB,KAAK,KAAK;gBACR,GAAG,CAAC,IAAI,GAAG,SAAS,CAAC;gBACrB,eAAe,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;gBAClD,MAAM;YACR,KAAK,KAAK;gBACR,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;oBACjC,IAAI,KAAK,CAAC,SAAS,EAAE;wBACnB,yBAAyB,CAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7E;yBAAM;wBACL,yBAAyB,CAAC,GAAG,EAAE,kBAAkB,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACtF;iBACF;qBAAM;oBACL,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;wBACpB,GAAG,CAAC,gBAAgB,GAAG,IAAW,CAAC;qBACpC;oBACD,yBAAyB,CAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC7E;gBACD,MAAM;YACR,KAAK,KAAK;gBACR,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;oBACjC,IAAI,KAAK,CAAC,SAAS,EAAE;wBACnB,yBAAyB,CAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7E;yBAAM;wBACL,yBAAyB,CAAC,GAAG,EAAE,kBAAkB,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACtF;iBACF;qBAAM;oBACL,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;wBACpB,GAAG,CAAC,gBAAgB,GAAG,IAAW,CAAC;qBACpC;oBACD,yBAAyB,CAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC7E;gBACD,MAAM;YACR,KAAK,YAAY;gBACf,yBAAyB,CAAC,GAAG,EAAE,YAAY,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;gBAC/E,MAAM;SACT;KACF;IACD,OAAO,GAAG,CAAC;AACb,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.d.ts new file mode 100644 index 0000000..9dc95e1 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.d.ts @@ -0,0 +1,11 @@ +import { ZodObjectDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export type JsonSchema7ObjectType = { + type: 'object'; + properties: Record; + additionalProperties: boolean | JsonSchema7Type; + required?: string[]; +}; +export declare function parseObjectDef(def: ZodObjectDef, refs: Refs): JsonSchema7ObjectType; +//# sourceMappingURL=object.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.d.ts.map new file mode 100644 index 0000000..924128f --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"object.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/object.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,KAAK,CAAC;AACnC,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAoB/B,MAAM,MAAM,qBAAqB,GAAG;IAClC,IAAI,EAAE,QAAQ,CAAC;IACf,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;IAC5C,oBAAoB,EAAE,OAAO,GAAG,eAAe,CAAC;IAChD,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;CACrB,CAAC;AAEF,wBAAgB,cAAc,CAAC,GAAG,EAAE,YAAY,EAAE,IAAI,EAAE,IAAI,yBAiC3D"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.js new file mode 100644 index 0000000..240874b --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseObjectDef = void 0; +const parseDef_1 = require("../parseDef.js"); +function decideAdditionalProperties(def, refs) { + if (refs.removeAdditionalStrategy === 'strict') { + return def.catchall._def.typeName === 'ZodNever' ? + def.unknownKeys !== 'strict' + : (0, parseDef_1.parseDef)(def.catchall._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? true; + } + else { + return def.catchall._def.typeName === 'ZodNever' ? + def.unknownKeys === 'passthrough' + : (0, parseDef_1.parseDef)(def.catchall._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? true; + } +} +function parseObjectDef(def, refs) { + const result = { + type: 'object', + ...Object.entries(def.shape()).reduce((acc, [propName, propDef]) => { + if (propDef === undefined || propDef._def === undefined) + return acc; + const parsedDef = (0, parseDef_1.parseDef)(propDef._def, { + ...refs, + currentPath: [...refs.currentPath, 'properties', propName], + propertyPath: [...refs.currentPath, 'properties', propName], + }); + if (parsedDef === undefined) + return acc; + return { + properties: { + ...acc.properties, + [propName]: parsedDef, + }, + required: propDef.isOptional() && !refs.openaiStrictMode ? acc.required : [...acc.required, propName], + }; + }, { properties: {}, required: [] }), + additionalProperties: decideAdditionalProperties(def, refs), + }; + if (!result.required.length) + delete result.required; + return result; +} +exports.parseObjectDef = parseObjectDef; +//# sourceMappingURL=object.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.js.map new file mode 100644 index 0000000..e0689bd --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.js.map @@ -0,0 +1 @@ +{"version":3,"file":"object.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/object.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAGxD,SAAS,0BAA0B,CAAC,GAAiB,EAAE,IAAU;IAC/D,IAAI,IAAI,CAAC,wBAAwB,KAAK,QAAQ,EAAE;QAC9C,OAAO,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,KAAK,UAAU,CAAC,CAAC;YAC9C,GAAG,CAAC,WAAW,KAAK,QAAQ;YAC9B,CAAC,CAAC,IAAA,mBAAQ,EAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE;gBAC1B,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,sBAAsB,CAAC;aAC3D,CAAC,IAAI,IAAI,CAAC;KAChB;SAAM;QACL,OAAO,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,KAAK,UAAU,CAAC,CAAC;YAC9C,GAAG,CAAC,WAAW,KAAK,aAAa;YACnC,CAAC,CAAC,IAAA,mBAAQ,EAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE;gBAC1B,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,sBAAsB,CAAC;aAC3D,CAAC,IAAI,IAAI,CAAC;KAChB;AACH,CAAC;AASD,SAAgB,cAAc,CAAC,GAAiB,EAAE,IAAU;IAC1D,MAAM,MAAM,GAA0B;QACpC,IAAI,EAAE,QAAQ;QACd,GAAG,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CACnC,CACE,GAGC,EACD,CAAC,QAAQ,EAAE,OAAO,CAAC,EACnB,EAAE;YACF,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS;gBAAE,OAAO,GAAG,CAAC;YACpE,MAAM,SAAS,GAAG,IAAA,mBAAQ,EAAC,OAAO,CAAC,IAAI,EAAE;gBACvC,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,YAAY,EAAE,QAAQ,CAAC;gBAC1D,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,YAAY,EAAE,QAAQ,CAAC;aAC5D,CAAC,CAAC;YACH,IAAI,SAAS,KAAK,SAAS;gBAAE,OAAO,GAAG,CAAC;YACxC,OAAO;gBACL,UAAU,EAAE;oBACV,GAAG,GAAG,CAAC,UAAU;oBACjB,CAAC,QAAQ,CAAC,EAAE,SAAS;iBACtB;gBACD,QAAQ,EACN,OAAO,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,QAAQ,EAAE,QAAQ,CAAC;aAC9F,CAAC;QACJ,CAAC,EACD,EAAE,UAAU,EAAE,EAAE,EAAE,QAAQ,EAAE,EAAE,EAAE,CACjC;QACD,oBAAoB,EAAE,0BAA0B,CAAC,GAAG,EAAE,IAAI,CAAC;KAC5D,CAAC;IACF,IAAI,CAAC,MAAM,CAAC,QAAS,CAAC,MAAM;QAAE,OAAO,MAAM,CAAC,QAAQ,CAAC;IACrD,OAAO,MAAM,CAAC;AAChB,CAAC;AAjCD,wCAiCC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.mjs new file mode 100644 index 0000000..1aefaa5 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.mjs @@ -0,0 +1,47 @@ +import { parseDef } from "../parseDef.mjs"; +function decideAdditionalProperties(def, refs) { + if (refs.removeAdditionalStrategy === 'strict') { + return def.catchall._def.typeName === 'ZodNever' ? + def.unknownKeys !== 'strict' + : parseDef(def.catchall._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? true; + } + else { + return def.catchall._def.typeName === 'ZodNever' ? + def.unknownKeys === 'passthrough' + : parseDef(def.catchall._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? true; + } +} +export function parseObjectDef(def, refs) { + const result = { + type: 'object', + ...Object.entries(def.shape()).reduce((acc, [propName, propDef]) => { + if (propDef === undefined || propDef._def === undefined) + return acc; + const parsedDef = parseDef(propDef._def, { + ...refs, + currentPath: [...refs.currentPath, 'properties', propName], + propertyPath: [...refs.currentPath, 'properties', propName], + }); + if (parsedDef === undefined) + return acc; + return { + properties: { + ...acc.properties, + [propName]: parsedDef, + }, + required: propDef.isOptional() && !refs.openaiStrictMode ? acc.required : [...acc.required, propName], + }; + }, { properties: {}, required: [] }), + additionalProperties: decideAdditionalProperties(def, refs), + }; + if (!result.required.length) + delete result.required; + return result; +} +//# sourceMappingURL=object.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.mjs.map new file mode 100644 index 0000000..e998d4e --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/object.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"object.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/object.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;AAGpC,SAAS,0BAA0B,CAAC,GAAiB,EAAE,IAAU;IAC/D,IAAI,IAAI,CAAC,wBAAwB,KAAK,QAAQ,EAAE;QAC9C,OAAO,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,KAAK,UAAU,CAAC,CAAC;YAC9C,GAAG,CAAC,WAAW,KAAK,QAAQ;YAC9B,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE;gBAC1B,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,sBAAsB,CAAC;aAC3D,CAAC,IAAI,IAAI,CAAC;KAChB;SAAM;QACL,OAAO,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,KAAK,UAAU,CAAC,CAAC;YAC9C,GAAG,CAAC,WAAW,KAAK,aAAa;YACnC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE;gBAC1B,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,sBAAsB,CAAC;aAC3D,CAAC,IAAI,IAAI,CAAC;KAChB;AACH,CAAC;AASD,MAAM,UAAU,cAAc,CAAC,GAAiB,EAAE,IAAU;IAC1D,MAAM,MAAM,GAA0B;QACpC,IAAI,EAAE,QAAQ;QACd,GAAG,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CACnC,CACE,GAGC,EACD,CAAC,QAAQ,EAAE,OAAO,CAAC,EACnB,EAAE;YACF,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS;gBAAE,OAAO,GAAG,CAAC;YACpE,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE;gBACvC,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,YAAY,EAAE,QAAQ,CAAC;gBAC1D,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,YAAY,EAAE,QAAQ,CAAC;aAC5D,CAAC,CAAC;YACH,IAAI,SAAS,KAAK,SAAS;gBAAE,OAAO,GAAG,CAAC;YACxC,OAAO;gBACL,UAAU,EAAE;oBACV,GAAG,GAAG,CAAC,UAAU;oBACjB,CAAC,QAAQ,CAAC,EAAE,SAAS;iBACtB;gBACD,QAAQ,EACN,OAAO,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,QAAQ,EAAE,QAAQ,CAAC;aAC9F,CAAC;QACJ,CAAC,EACD,EAAE,UAAU,EAAE,EAAE,EAAE,QAAQ,EAAE,EAAE,EAAE,CACjC;QACD,oBAAoB,EAAE,0BAA0B,CAAC,GAAG,EAAE,IAAI,CAAC;KAC5D,CAAC;IACF,IAAI,CAAC,MAAM,CAAC,QAAS,CAAC,MAAM;QAAE,OAAO,MAAM,CAAC,QAAQ,CAAC;IACrD,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.d.ts new file mode 100644 index 0000000..6160c99 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.d.ts @@ -0,0 +1,5 @@ +import { ZodOptionalDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export declare const parseOptionalDef: (def: ZodOptionalDef, refs: Refs) => JsonSchema7Type | undefined; +//# sourceMappingURL=optional.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.d.ts.map new file mode 100644 index 0000000..caf8fd5 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"optional.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/optional.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AACrC,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,eAAO,MAAM,gBAAgB,QAAS,cAAc,QAAQ,IAAI,KAAG,eAAe,GAAG,SAoBpF,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.js new file mode 100644 index 0000000..e87f727 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseOptionalDef = void 0; +const parseDef_1 = require("../parseDef.js"); +const parseOptionalDef = (def, refs) => { + if (refs.currentPath.toString() === refs.propertyPath?.toString()) { + return (0, parseDef_1.parseDef)(def.innerType._def, refs); + } + const innerSchema = (0, parseDef_1.parseDef)(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', '1'], + }); + return innerSchema ? + { + anyOf: [ + { + not: {}, + }, + innerSchema, + ], + } + : {}; +}; +exports.parseOptionalDef = parseOptionalDef; +//# sourceMappingURL=optional.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.js.map new file mode 100644 index 0000000..001fb2a --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.js.map @@ -0,0 +1 @@ +{"version":3,"file":"optional.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/optional.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAGjD,MAAM,gBAAgB,GAAG,CAAC,GAAmB,EAAE,IAAU,EAA+B,EAAE;IAC/F,IAAI,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,IAAI,CAAC,YAAY,EAAE,QAAQ,EAAE,EAAE;QACjE,OAAO,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;KAC3C;IAED,MAAM,WAAW,GAAG,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;QAC/C,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;KACjD,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC,CAAC;QAChB;YACE,KAAK,EAAE;gBACL;oBACE,GAAG,EAAE,EAAE;iBACR;gBACD,WAAW;aACZ;SACF;QACH,CAAC,CAAC,EAAE,CAAC;AACT,CAAC,CAAC;AApBW,QAAA,gBAAgB,oBAoB3B"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.mjs new file mode 100644 index 0000000..9e47a44 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.mjs @@ -0,0 +1,21 @@ +import { parseDef } from "../parseDef.mjs"; +export const parseOptionalDef = (def, refs) => { + if (refs.currentPath.toString() === refs.propertyPath?.toString()) { + return parseDef(def.innerType._def, refs); + } + const innerSchema = parseDef(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', '1'], + }); + return innerSchema ? + { + anyOf: [ + { + not: {}, + }, + innerSchema, + ], + } + : {}; +}; +//# sourceMappingURL=optional.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.mjs.map new file mode 100644 index 0000000..033a566 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"optional.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/optional.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;AAGpC,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,GAAmB,EAAE,IAAU,EAA+B,EAAE;IAC/F,IAAI,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,IAAI,CAAC,YAAY,EAAE,QAAQ,EAAE,EAAE;QACjE,OAAO,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;KAC3C;IAED,MAAM,WAAW,GAAG,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;QAC/C,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;KACjD,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC,CAAC;QAChB;YACE,KAAK,EAAE;gBACL;oBACE,GAAG,EAAE,EAAE;iBACR;gBACD,WAAW;aACZ;SACF;QACH,CAAC,CAAC,EAAE,CAAC;AACT,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.d.ts new file mode 100644 index 0000000..4fe9b1b --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.d.ts @@ -0,0 +1,6 @@ +import { ZodPipelineDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +import { JsonSchema7AllOfType } from "./intersection.js"; +export declare const parsePipelineDef: (def: ZodPipelineDef, refs: Refs) => JsonSchema7AllOfType | JsonSchema7Type | undefined; +//# sourceMappingURL=pipeline.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.d.ts.map new file mode 100644 index 0000000..e6aa218 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pipeline.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/pipeline.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AACrC,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAC/B,OAAO,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAEtD,eAAO,MAAM,gBAAgB,QACtB,eAAe,GAAG,EAAE,GAAG,CAAC,QACvB,IAAI,KACT,oBAAoB,GAAG,eAAe,GAAG,SAmB3C,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.js new file mode 100644 index 0000000..34a2337 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parsePipelineDef = void 0; +const parseDef_1 = require("../parseDef.js"); +const parsePipelineDef = (def, refs) => { + if (refs.pipeStrategy === 'input') { + return (0, parseDef_1.parseDef)(def.in._def, refs); + } + else if (refs.pipeStrategy === 'output') { + return (0, parseDef_1.parseDef)(def.out._def, refs); + } + const a = (0, parseDef_1.parseDef)(def.in._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '0'], + }); + const b = (0, parseDef_1.parseDef)(def.out._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', a ? '1' : '0'], + }); + return { + allOf: [a, b].filter((x) => x !== undefined), + }; +}; +exports.parsePipelineDef = parsePipelineDef; +//# sourceMappingURL=pipeline.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.js.map new file mode 100644 index 0000000..b85c2ab --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pipeline.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/pipeline.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAIjD,MAAM,gBAAgB,GAAG,CAC9B,GAA6B,EAC7B,IAAU,EAC0C,EAAE;IACtD,IAAI,IAAI,CAAC,YAAY,KAAK,OAAO,EAAE;QACjC,OAAO,IAAA,mBAAQ,EAAC,GAAG,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;KACpC;SAAM,IAAI,IAAI,CAAC,YAAY,KAAK,QAAQ,EAAE;QACzC,OAAO,IAAA,mBAAQ,EAAC,GAAG,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;KACrC;IAED,MAAM,CAAC,GAAG,IAAA,mBAAQ,EAAC,GAAG,CAAC,EAAE,CAAC,IAAI,EAAE;QAC9B,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;KACjD,CAAC,CAAC;IACH,MAAM,CAAC,GAAG,IAAA,mBAAQ,EAAC,GAAG,CAAC,GAAG,CAAC,IAAI,EAAE;QAC/B,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;KAC3D,CAAC,CAAC;IAEH,OAAO;QACL,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAwB,EAAE,CAAC,CAAC,KAAK,SAAS,CAAC;KACnE,CAAC;AACJ,CAAC,CAAC;AAtBW,QAAA,gBAAgB,oBAsB3B"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.mjs new file mode 100644 index 0000000..540d15a --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.mjs @@ -0,0 +1,21 @@ +import { parseDef } from "../parseDef.mjs"; +export const parsePipelineDef = (def, refs) => { + if (refs.pipeStrategy === 'input') { + return parseDef(def.in._def, refs); + } + else if (refs.pipeStrategy === 'output') { + return parseDef(def.out._def, refs); + } + const a = parseDef(def.in._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '0'], + }); + const b = parseDef(def.out._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', a ? '1' : '0'], + }); + return { + allOf: [a, b].filter((x) => x !== undefined), + }; +}; +//# sourceMappingURL=pipeline.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.mjs.map new file mode 100644 index 0000000..4458572 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"pipeline.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/pipeline.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;AAIpC,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,GAA6B,EAC7B,IAAU,EAC0C,EAAE;IACtD,IAAI,IAAI,CAAC,YAAY,KAAK,OAAO,EAAE;QACjC,OAAO,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;KACpC;SAAM,IAAI,IAAI,CAAC,YAAY,KAAK,QAAQ,EAAE;QACzC,OAAO,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;KACrC;IAED,MAAM,CAAC,GAAG,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,EAAE;QAC9B,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC;KACjD,CAAC,CAAC;IACH,MAAM,CAAC,GAAG,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,EAAE;QAC/B,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;KAC3D,CAAC,CAAC;IAEH,OAAO;QACL,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAwB,EAAE,CAAC,CAAC,KAAK,SAAS,CAAC;KACnE,CAAC;AACJ,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.d.ts new file mode 100644 index 0000000..5182c7a --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.d.ts @@ -0,0 +1,5 @@ +import { ZodPromiseDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export declare function parsePromiseDef(def: ZodPromiseDef, refs: Refs): JsonSchema7Type | undefined; +//# sourceMappingURL=promise.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.d.ts.map new file mode 100644 index 0000000..7241d1c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"promise.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/promise.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AACpC,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,wBAAgB,eAAe,CAAC,GAAG,EAAE,aAAa,EAAE,IAAI,EAAE,IAAI,GAAG,eAAe,GAAG,SAAS,CAE3F"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.js new file mode 100644 index 0000000..6a0794f --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parsePromiseDef = void 0; +const parseDef_1 = require("../parseDef.js"); +function parsePromiseDef(def, refs) { + return (0, parseDef_1.parseDef)(def.type._def, refs); +} +exports.parsePromiseDef = parsePromiseDef; +//# sourceMappingURL=promise.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.js.map new file mode 100644 index 0000000..d802490 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.js.map @@ -0,0 +1 @@ +{"version":3,"file":"promise.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/promise.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAGxD,SAAgB,eAAe,CAAC,GAAkB,EAAE,IAAU;IAC5D,OAAO,IAAA,mBAAQ,EAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AACvC,CAAC;AAFD,0CAEC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.mjs new file mode 100644 index 0000000..cfe6b73 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.mjs @@ -0,0 +1,5 @@ +import { parseDef } from "../parseDef.mjs"; +export function parsePromiseDef(def, refs) { + return parseDef(def.type._def, refs); +} +//# sourceMappingURL=promise.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.mjs.map new file mode 100644 index 0000000..2cee9b0 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"promise.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/promise.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;AAGpC,MAAM,UAAU,eAAe,CAAC,GAAkB,EAAE,IAAU;IAC5D,OAAO,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AACvC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.d.ts new file mode 100644 index 0000000..abcd39c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.d.ts @@ -0,0 +1,4 @@ +import { ZodReadonlyDef } from 'zod'; +import { Refs } from "../Refs.js"; +export declare const parseReadonlyDef: (def: ZodReadonlyDef, refs: Refs) => import("../parseDef").JsonSchema7Type | undefined; +//# sourceMappingURL=readonly.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.d.ts.map new file mode 100644 index 0000000..0828825 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"readonly.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/readonly.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AAErC,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,eAAO,MAAM,gBAAgB,QAAS,eAAe,GAAG,CAAC,QAAQ,IAAI,sDAEpE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.js new file mode 100644 index 0000000..e3f91c4 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseReadonlyDef = void 0; +const parseDef_1 = require("../parseDef.js"); +const parseReadonlyDef = (def, refs) => { + return (0, parseDef_1.parseDef)(def.innerType._def, refs); +}; +exports.parseReadonlyDef = parseReadonlyDef; +//# sourceMappingURL=readonly.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.js.map new file mode 100644 index 0000000..0d4d60d --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.js.map @@ -0,0 +1 @@ +{"version":3,"file":"readonly.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/readonly.ts"],"names":[],"mappings":";;;AACA,6CAAuC;AAGhC,MAAM,gBAAgB,GAAG,CAAC,GAAwB,EAAE,IAAU,EAAE,EAAE;IACvE,OAAO,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAC5C,CAAC,CAAC;AAFW,QAAA,gBAAgB,oBAE3B"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.mjs new file mode 100644 index 0000000..852288a --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.mjs @@ -0,0 +1,5 @@ +import { parseDef } from "../parseDef.mjs"; +export const parseReadonlyDef = (def, refs) => { + return parseDef(def.innerType._def, refs); +}; +//# sourceMappingURL=readonly.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.mjs.map new file mode 100644 index 0000000..64dfbc7 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"readonly.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/readonly.ts"],"names":[],"mappings":"OACO,EAAE,QAAQ,EAAE;AAGnB,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,GAAwB,EAAE,IAAU,EAAE,EAAE;IACvE,OAAO,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAC5C,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.d.ts new file mode 100644 index 0000000..7a5d5b6 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.d.ts @@ -0,0 +1,14 @@ +import { ZodMapDef, ZodRecordDef, ZodTypeAny } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +import { JsonSchema7EnumType } from "./enum.js"; +import { JsonSchema7StringType } from "./string.js"; +type JsonSchema7RecordPropertyNamesType = Omit | Omit; +export type JsonSchema7RecordType = { + type: 'object'; + additionalProperties: JsonSchema7Type; + propertyNames?: JsonSchema7RecordPropertyNamesType; +}; +export declare function parseRecordDef(def: ZodRecordDef | ZodMapDef, refs: Refs): JsonSchema7RecordType; +export {}; +//# sourceMappingURL=record.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.d.ts.map new file mode 100644 index 0000000..99b9fdf --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"record.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/record.ts"],"names":[],"mappings":"AAAA,OAAO,EAAyB,SAAS,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,KAAK,CAAC;AACjF,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAC/B,OAAO,EAAE,mBAAmB,EAAE,MAAM,QAAQ,CAAC;AAE7C,OAAO,EAAE,qBAAqB,EAAkB,MAAM,UAAU,CAAC;AAEjE,KAAK,kCAAkC,GACnC,IAAI,CAAC,qBAAqB,EAAE,MAAM,CAAC,GACnC,IAAI,CAAC,mBAAmB,EAAE,MAAM,CAAC,CAAC;AAEtC,MAAM,MAAM,qBAAqB,GAAG;IAClC,IAAI,EAAE,QAAQ,CAAC;IACf,oBAAoB,EAAE,eAAe,CAAC;IACtC,aAAa,CAAC,EAAE,kCAAkC,CAAC;CACpD,CAAC;AAEF,wBAAgB,cAAc,CAC5B,GAAG,EAAE,YAAY,CAAC,UAAU,EAAE,UAAU,CAAC,GAAG,SAAS,EACrD,IAAI,EAAE,IAAI,GACT,qBAAqB,CAoDvB"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.js new file mode 100644 index 0000000..8a5bb20 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseRecordDef = void 0; +const zod_1 = require("zod"); +const parseDef_1 = require("../parseDef.js"); +const string_1 = require("./string.js"); +function parseRecordDef(def, refs) { + if (refs.target === 'openApi3' && def.keyType?._def.typeName === zod_1.ZodFirstPartyTypeKind.ZodEnum) { + return { + type: 'object', + required: def.keyType._def.values, + properties: def.keyType._def.values.reduce((acc, key) => ({ + ...acc, + [key]: (0, parseDef_1.parseDef)(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'properties', key], + }) ?? {}, + }), {}), + additionalProperties: false, + }; + } + const schema = { + type: 'object', + additionalProperties: (0, parseDef_1.parseDef)(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? {}, + }; + if (refs.target === 'openApi3') { + return schema; + } + if (def.keyType?._def.typeName === zod_1.ZodFirstPartyTypeKind.ZodString && def.keyType._def.checks?.length) { + const keyType = Object.entries((0, string_1.parseStringDef)(def.keyType._def, refs)).reduce((acc, [key, value]) => (key === 'type' ? acc : { ...acc, [key]: value }), {}); + return { + ...schema, + propertyNames: keyType, + }; + } + else if (def.keyType?._def.typeName === zod_1.ZodFirstPartyTypeKind.ZodEnum) { + return { + ...schema, + propertyNames: { + enum: def.keyType._def.values, + }, + }; + } + return schema; +} +exports.parseRecordDef = parseRecordDef; +//# sourceMappingURL=record.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.js.map new file mode 100644 index 0000000..1a6e3ee --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.js.map @@ -0,0 +1 @@ +{"version":3,"file":"record.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/record.ts"],"names":[],"mappings":";;;AAAA,6BAAiF;AACjF,6CAAwD;AAIxD,wCAAiE;AAYjE,SAAgB,cAAc,CAC5B,GAAqD,EACrD,IAAU;IAEV,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,IAAI,GAAG,CAAC,OAAO,EAAE,IAAI,CAAC,QAAQ,KAAK,2BAAqB,CAAC,OAAO,EAAE;QAC9F,OAAO;YACL,IAAI,EAAE,QAAQ;YACd,QAAQ,EAAE,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM;YACjC,UAAU,EAAE,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CACxC,CAAC,GAAoC,EAAE,GAAW,EAAE,EAAE,CAAC,CAAC;gBACtD,GAAG,GAAG;gBACN,CAAC,GAAG,CAAC,EACH,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;oBAC3B,GAAG,IAAI;oBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,YAAY,EAAE,GAAG,CAAC;iBACtD,CAAC,IAAI,EAAE;aACX,CAAC,EACF,EAAE,CACH;YACD,oBAAoB,EAAE,KAAK;SACW,CAAC;KAC1C;IAED,MAAM,MAAM,GAA0B;QACpC,IAAI,EAAE,QAAQ;QACd,oBAAoB,EAClB,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;YAC3B,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,sBAAsB,CAAC;SAC3D,CAAC,IAAI,EAAE;KACX,CAAC;IAEF,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,EAAE;QAC9B,OAAO,MAAM,CAAC;KACf;IAED,IAAI,GAAG,CAAC,OAAO,EAAE,IAAI,CAAC,QAAQ,KAAK,2BAAqB,CAAC,SAAS,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE;QACrG,MAAM,OAAO,GAAuC,MAAM,CAAC,OAAO,CAChE,IAAA,uBAAc,EAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,CACvC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,KAAK,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;QAEvF,OAAO;YACL,GAAG,MAAM;YACT,aAAa,EAAE,OAAO;SACvB,CAAC;KACH;SAAM,IAAI,GAAG,CAAC,OAAO,EAAE,IAAI,CAAC,QAAQ,KAAK,2BAAqB,CAAC,OAAO,EAAE;QACvE,OAAO;YACL,GAAG,MAAM;YACT,aAAa,EAAE;gBACb,IAAI,EAAE,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM;aAC9B;SACF,CAAC;KACH;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAvDD,wCAuDC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.mjs new file mode 100644 index 0000000..5ac62ea --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.mjs @@ -0,0 +1,46 @@ +import { ZodFirstPartyTypeKind } from 'zod'; +import { parseDef } from "../parseDef.mjs"; +import { parseStringDef } from "./string.mjs"; +export function parseRecordDef(def, refs) { + if (refs.target === 'openApi3' && def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodEnum) { + return { + type: 'object', + required: def.keyType._def.values, + properties: def.keyType._def.values.reduce((acc, key) => ({ + ...acc, + [key]: parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'properties', key], + }) ?? {}, + }), {}), + additionalProperties: false, + }; + } + const schema = { + type: 'object', + additionalProperties: parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? {}, + }; + if (refs.target === 'openApi3') { + return schema; + } + if (def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodString && def.keyType._def.checks?.length) { + const keyType = Object.entries(parseStringDef(def.keyType._def, refs)).reduce((acc, [key, value]) => (key === 'type' ? acc : { ...acc, [key]: value }), {}); + return { + ...schema, + propertyNames: keyType, + }; + } + else if (def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodEnum) { + return { + ...schema, + propertyNames: { + enum: def.keyType._def.values, + }, + }; + } + return schema; +} +//# sourceMappingURL=record.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.mjs.map new file mode 100644 index 0000000..2e78825 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/record.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"record.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/record.ts"],"names":[],"mappings":"OAAO,EAAE,qBAAqB,EAAuC,MAAM,KAAK;OACzE,EAAmB,QAAQ,EAAE;OAI7B,EAAyB,cAAc,EAAE;AAYhD,MAAM,UAAU,cAAc,CAC5B,GAAqD,EACrD,IAAU;IAEV,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,IAAI,GAAG,CAAC,OAAO,EAAE,IAAI,CAAC,QAAQ,KAAK,qBAAqB,CAAC,OAAO,EAAE;QAC9F,OAAO;YACL,IAAI,EAAE,QAAQ;YACd,QAAQ,EAAE,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM;YACjC,UAAU,EAAE,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CACxC,CAAC,GAAoC,EAAE,GAAW,EAAE,EAAE,CAAC,CAAC;gBACtD,GAAG,GAAG;gBACN,CAAC,GAAG,CAAC,EACH,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;oBAC3B,GAAG,IAAI;oBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,YAAY,EAAE,GAAG,CAAC;iBACtD,CAAC,IAAI,EAAE;aACX,CAAC,EACF,EAAE,CACH;YACD,oBAAoB,EAAE,KAAK;SACW,CAAC;KAC1C;IAED,MAAM,MAAM,GAA0B;QACpC,IAAI,EAAE,QAAQ;QACd,oBAAoB,EAClB,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;YAC3B,GAAG,IAAI;YACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,sBAAsB,CAAC;SAC3D,CAAC,IAAI,EAAE;KACX,CAAC;IAEF,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU,EAAE;QAC9B,OAAO,MAAM,CAAC;KACf;IAED,IAAI,GAAG,CAAC,OAAO,EAAE,IAAI,CAAC,QAAQ,KAAK,qBAAqB,CAAC,SAAS,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE;QACrG,MAAM,OAAO,GAAuC,MAAM,CAAC,OAAO,CAChE,cAAc,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,CACvC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,KAAK,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;QAEvF,OAAO;YACL,GAAG,MAAM;YACT,aAAa,EAAE,OAAO;SACvB,CAAC;KACH;SAAM,IAAI,GAAG,CAAC,OAAO,EAAE,IAAI,CAAC,QAAQ,KAAK,qBAAqB,CAAC,OAAO,EAAE;QACvE,OAAO;YACL,GAAG,MAAM;YACT,aAAa,EAAE;gBACb,IAAI,EAAE,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM;aAC9B;SACF,CAAC;KACH;IAED,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.d.ts new file mode 100644 index 0000000..b0dbb90 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.d.ts @@ -0,0 +1,14 @@ +import { ZodSetDef } from 'zod'; +import { ErrorMessages } from "../errorMessages.js"; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export type JsonSchema7SetType = { + type: 'array'; + uniqueItems: true; + items?: JsonSchema7Type | undefined; + minItems?: number; + maxItems?: number; + errorMessage?: ErrorMessages; +}; +export declare function parseSetDef(def: ZodSetDef, refs: Refs): JsonSchema7SetType; +//# sourceMappingURL=set.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.d.ts.map new file mode 100644 index 0000000..66b99a4 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"set.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/set.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,KAAK,CAAC;AAChC,OAAO,EAAE,aAAa,EAA6B,MAAM,kBAAkB,CAAC;AAC5E,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,MAAM,MAAM,kBAAkB,GAAG;IAC/B,IAAI,EAAE,OAAO,CAAC;IACd,WAAW,EAAE,IAAI,CAAC;IAClB,KAAK,CAAC,EAAE,eAAe,GAAG,SAAS,CAAC;IACpC,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,YAAY,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;CAClD,CAAC;AAEF,wBAAgB,WAAW,CAAC,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,GAAG,kBAAkB,CAqB1E"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.js new file mode 100644 index 0000000..dad30d6 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseSetDef = void 0; +const errorMessages_1 = require("../errorMessages.js"); +const parseDef_1 = require("../parseDef.js"); +function parseSetDef(def, refs) { + const items = (0, parseDef_1.parseDef)(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items'], + }); + const schema = { + type: 'array', + uniqueItems: true, + items, + }; + if (def.minSize) { + (0, errorMessages_1.setResponseValueAndErrors)(schema, 'minItems', def.minSize.value, def.minSize.message, refs); + } + if (def.maxSize) { + (0, errorMessages_1.setResponseValueAndErrors)(schema, 'maxItems', def.maxSize.value, def.maxSize.message, refs); + } + return schema; +} +exports.parseSetDef = parseSetDef; +//# sourceMappingURL=set.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.js.map new file mode 100644 index 0000000..526a281 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.js.map @@ -0,0 +1 @@ +{"version":3,"file":"set.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/set.ts"],"names":[],"mappings":";;;AACA,uDAA4E;AAC5E,6CAAwD;AAYxD,SAAgB,WAAW,CAAC,GAAc,EAAE,IAAU;IACpD,MAAM,KAAK,GAAG,IAAA,mBAAQ,EAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;QACzC,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,CAAC;KAC5C,CAAC,CAAC;IAEH,MAAM,MAAM,GAAuB;QACjC,IAAI,EAAE,OAAO;QACb,WAAW,EAAE,IAAI;QACjB,KAAK;KACN,CAAC;IAEF,IAAI,GAAG,CAAC,OAAO,EAAE;QACf,IAAA,yCAAyB,EAAC,MAAM,EAAE,UAAU,EAAE,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAC7F;IAED,IAAI,GAAG,CAAC,OAAO,EAAE;QACf,IAAA,yCAAyB,EAAC,MAAM,EAAE,UAAU,EAAE,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAC7F;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AArBD,kCAqBC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.mjs new file mode 100644 index 0000000..a235277 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.mjs @@ -0,0 +1,21 @@ +import { setResponseValueAndErrors } from "../errorMessages.mjs"; +import { parseDef } from "../parseDef.mjs"; +export function parseSetDef(def, refs) { + const items = parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items'], + }); + const schema = { + type: 'array', + uniqueItems: true, + items, + }; + if (def.minSize) { + setResponseValueAndErrors(schema, 'minItems', def.minSize.value, def.minSize.message, refs); + } + if (def.maxSize) { + setResponseValueAndErrors(schema, 'maxItems', def.maxSize.value, def.maxSize.message, refs); + } + return schema; +} +//# sourceMappingURL=set.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.mjs.map new file mode 100644 index 0000000..caeaf49 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/set.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"set.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/set.ts"],"names":[],"mappings":"OACO,EAAiB,yBAAyB,EAAE;OAC5C,EAAmB,QAAQ,EAAE;AAYpC,MAAM,UAAU,WAAW,CAAC,GAAc,EAAE,IAAU;IACpD,MAAM,KAAK,GAAG,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,EAAE;QACzC,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,CAAC;KAC5C,CAAC,CAAC;IAEH,MAAM,MAAM,GAAuB;QACjC,IAAI,EAAE,OAAO;QACb,WAAW,EAAE,IAAI;QACjB,KAAK;KACN,CAAC;IAEF,IAAI,GAAG,CAAC,OAAO,EAAE;QACf,yBAAyB,CAAC,MAAM,EAAE,UAAU,EAAE,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAC7F;IAED,IAAI,GAAG,CAAC,OAAO,EAAE;QACf,yBAAyB,CAAC,MAAM,EAAE,UAAU,EAAE,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;KAC7F;IAED,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.d.ts new file mode 100644 index 0000000..ce2e2c8 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.d.ts @@ -0,0 +1,70 @@ +import { ZodStringDef } from 'zod'; +import { ErrorMessages } from "../errorMessages.js"; +import { Refs } from "../Refs.js"; +/** + * Generated from the regular expressions found here as of 2024-05-22: + * https://github.com/colinhacks/zod/blob/master/src/types.ts. + * + * Expressions with /i flag have been changed accordingly. + */ +export declare const zodPatterns: { + /** + * `c` was changed to `[cC]` to replicate /i flag + */ + readonly cuid: RegExp; + readonly cuid2: RegExp; + readonly ulid: RegExp; + /** + * `a-z` was added to replicate /i flag + */ + readonly email: RegExp; + /** + * Constructed a valid Unicode RegExp + * + * Lazily instantiate since this type of regex isn't supported + * in all envs (e.g. React Native). + * + * See: + * https://github.com/colinhacks/zod/issues/2433 + * Fix in Zod: + * https://github.com/colinhacks/zod/commit/9340fd51e48576a75adc919bff65dbc4a5d4c99b + */ + readonly emoji: () => RegExp; + /** + * Unused + */ + readonly uuid: RegExp; + /** + * Unused + */ + readonly ipv4: RegExp; + /** + * Unused + */ + readonly ipv6: RegExp; + readonly base64: RegExp; + readonly nanoid: RegExp; +}; +export type JsonSchema7StringType = { + type: 'string'; + minLength?: number; + maxLength?: number; + format?: 'email' | 'idn-email' | 'uri' | 'uuid' | 'date-time' | 'ipv4' | 'ipv6' | 'date' | 'time' | 'duration'; + pattern?: string; + allOf?: { + pattern: string; + errorMessage?: ErrorMessages<{ + pattern: string; + }>; + }[]; + anyOf?: { + format: string; + errorMessage?: ErrorMessages<{ + format: string; + }>; + }[]; + errorMessage?: ErrorMessages; + contentEncoding?: string; +}; +export declare function parseStringDef(def: ZodStringDef, refs: Refs): JsonSchema7StringType; +//# sourceMappingURL=string.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.d.ts.map new file mode 100644 index 0000000..8890f60 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"string.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/string.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,MAAM,KAAK,CAAC;AACnC,OAAO,EAAE,aAAa,EAA6B,MAAM,kBAAkB,CAAC;AAC5E,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAI/B;;;;;GAKG;AACH,eAAO,MAAM,WAAW;IACtB;;OAEG;;;;IAIH;;OAEG;;IAEH;;;;;;;;;;OAUG;;IAOH;;OAEG;;IAEH;;OAEG;;IAEH;;OAEG;;;;CAIK,CAAC;AAEX,MAAM,MAAM,qBAAqB,GAAG;IAClC,IAAI,EAAE,QAAQ,CAAC;IACf,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EACH,OAAO,GACP,WAAW,GACX,KAAK,GACL,MAAM,GACN,WAAW,GACX,MAAM,GACN,MAAM,GACN,MAAM,GACN,MAAM,GACN,UAAU,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE;QACN,OAAO,EAAE,MAAM,CAAC;QAChB,YAAY,CAAC,EAAE,aAAa,CAAC;YAAE,OAAO,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC;KACnD,EAAE,CAAC;IACJ,KAAK,CAAC,EAAE;QACN,MAAM,EAAE,MAAM,CAAC;QACf,YAAY,CAAC,EAAE,aAAa,CAAC;YAAE,MAAM,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC;KAClD,EAAE,CAAC;IACJ,YAAY,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;IACpD,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,CAAC;AAEF,wBAAgB,cAAc,CAAC,GAAG,EAAE,YAAY,EAAE,IAAI,EAAE,IAAI,GAAG,qBAAqB,CAoJnF"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.js new file mode 100644 index 0000000..15196a6 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.js @@ -0,0 +1,316 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseStringDef = exports.zodPatterns = void 0; +const errorMessages_1 = require("../errorMessages.js"); +let emojiRegex; +/** + * Generated from the regular expressions found here as of 2024-05-22: + * https://github.com/colinhacks/zod/blob/master/src/types.ts. + * + * Expressions with /i flag have been changed accordingly. + */ +exports.zodPatterns = { + /** + * `c` was changed to `[cC]` to replicate /i flag + */ + cuid: /^[cC][^\s-]{8,}$/, + cuid2: /^[0-9a-z]+$/, + ulid: /^[0-9A-HJKMNP-TV-Z]{26}$/, + /** + * `a-z` was added to replicate /i flag + */ + email: /^(?!\.)(?!.*\.\.)([a-zA-Z0-9_'+\-\.]*)[a-zA-Z0-9_+-]@([a-zA-Z0-9][a-zA-Z0-9\-]*\.)+[a-zA-Z]{2,}$/, + /** + * Constructed a valid Unicode RegExp + * + * Lazily instantiate since this type of regex isn't supported + * in all envs (e.g. React Native). + * + * See: + * https://github.com/colinhacks/zod/issues/2433 + * Fix in Zod: + * https://github.com/colinhacks/zod/commit/9340fd51e48576a75adc919bff65dbc4a5d4c99b + */ + emoji: () => { + if (emojiRegex === undefined) { + emojiRegex = RegExp('^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$', 'u'); + } + return emojiRegex; + }, + /** + * Unused + */ + uuid: /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/, + /** + * Unused + */ + ipv4: /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/, + /** + * Unused + */ + ipv6: /^(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))$/, + base64: /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/, + nanoid: /^[a-zA-Z0-9_-]{21}$/, +}; +function parseStringDef(def, refs) { + const res = { + type: 'string', + }; + function processPattern(value) { + return refs.patternStrategy === 'escape' ? escapeNonAlphaNumeric(value) : value; + } + if (def.checks) { + for (const check of def.checks) { + switch (check.kind) { + case 'min': + (0, errorMessages_1.setResponseValueAndErrors)(res, 'minLength', typeof res.minLength === 'number' ? Math.max(res.minLength, check.value) : check.value, check.message, refs); + break; + case 'max': + (0, errorMessages_1.setResponseValueAndErrors)(res, 'maxLength', typeof res.maxLength === 'number' ? Math.min(res.maxLength, check.value) : check.value, check.message, refs); + break; + case 'email': + switch (refs.emailStrategy) { + case 'format:email': + addFormat(res, 'email', check.message, refs); + break; + case 'format:idn-email': + addFormat(res, 'idn-email', check.message, refs); + break; + case 'pattern:zod': + addPattern(res, exports.zodPatterns.email, check.message, refs); + break; + } + break; + case 'url': + addFormat(res, 'uri', check.message, refs); + break; + case 'uuid': + addFormat(res, 'uuid', check.message, refs); + break; + case 'regex': + addPattern(res, check.regex, check.message, refs); + break; + case 'cuid': + addPattern(res, exports.zodPatterns.cuid, check.message, refs); + break; + case 'cuid2': + addPattern(res, exports.zodPatterns.cuid2, check.message, refs); + break; + case 'startsWith': + addPattern(res, RegExp(`^${processPattern(check.value)}`), check.message, refs); + break; + case 'endsWith': + addPattern(res, RegExp(`${processPattern(check.value)}$`), check.message, refs); + break; + case 'datetime': + addFormat(res, 'date-time', check.message, refs); + break; + case 'date': + addFormat(res, 'date', check.message, refs); + break; + case 'time': + addFormat(res, 'time', check.message, refs); + break; + case 'duration': + addFormat(res, 'duration', check.message, refs); + break; + case 'length': + (0, errorMessages_1.setResponseValueAndErrors)(res, 'minLength', typeof res.minLength === 'number' ? Math.max(res.minLength, check.value) : check.value, check.message, refs); + (0, errorMessages_1.setResponseValueAndErrors)(res, 'maxLength', typeof res.maxLength === 'number' ? Math.min(res.maxLength, check.value) : check.value, check.message, refs); + break; + case 'includes': { + addPattern(res, RegExp(processPattern(check.value)), check.message, refs); + break; + } + case 'ip': { + if (check.version !== 'v6') { + addFormat(res, 'ipv4', check.message, refs); + } + if (check.version !== 'v4') { + addFormat(res, 'ipv6', check.message, refs); + } + break; + } + case 'emoji': + addPattern(res, exports.zodPatterns.emoji, check.message, refs); + break; + case 'ulid': { + addPattern(res, exports.zodPatterns.ulid, check.message, refs); + break; + } + case 'base64': { + switch (refs.base64Strategy) { + case 'format:binary': { + addFormat(res, 'binary', check.message, refs); + break; + } + case 'contentEncoding:base64': { + (0, errorMessages_1.setResponseValueAndErrors)(res, 'contentEncoding', 'base64', check.message, refs); + break; + } + case 'pattern:zod': { + addPattern(res, exports.zodPatterns.base64, check.message, refs); + break; + } + } + break; + } + case 'nanoid': { + addPattern(res, exports.zodPatterns.nanoid, check.message, refs); + } + case 'toLowerCase': + case 'toUpperCase': + case 'trim': + break; + default: + ((_) => { })(check); + } + } + } + return res; +} +exports.parseStringDef = parseStringDef; +const escapeNonAlphaNumeric = (value) => Array.from(value) + .map((c) => (/[a-zA-Z0-9]/.test(c) ? c : `\\${c}`)) + .join(''); +const addFormat = (schema, value, message, refs) => { + if (schema.format || schema.anyOf?.some((x) => x.format)) { + if (!schema.anyOf) { + schema.anyOf = []; + } + if (schema.format) { + schema.anyOf.push({ + format: schema.format, + ...(schema.errorMessage && + refs.errorMessages && { + errorMessage: { format: schema.errorMessage.format }, + }), + }); + delete schema.format; + if (schema.errorMessage) { + delete schema.errorMessage.format; + if (Object.keys(schema.errorMessage).length === 0) { + delete schema.errorMessage; + } + } + } + schema.anyOf.push({ + format: value, + ...(message && refs.errorMessages && { errorMessage: { format: message } }), + }); + } + else { + (0, errorMessages_1.setResponseValueAndErrors)(schema, 'format', value, message, refs); + } +}; +const addPattern = (schema, regex, message, refs) => { + if (schema.pattern || schema.allOf?.some((x) => x.pattern)) { + if (!schema.allOf) { + schema.allOf = []; + } + if (schema.pattern) { + schema.allOf.push({ + pattern: schema.pattern, + ...(schema.errorMessage && + refs.errorMessages && { + errorMessage: { pattern: schema.errorMessage.pattern }, + }), + }); + delete schema.pattern; + if (schema.errorMessage) { + delete schema.errorMessage.pattern; + if (Object.keys(schema.errorMessage).length === 0) { + delete schema.errorMessage; + } + } + } + schema.allOf.push({ + pattern: processRegExp(regex, refs), + ...(message && refs.errorMessages && { errorMessage: { pattern: message } }), + }); + } + else { + (0, errorMessages_1.setResponseValueAndErrors)(schema, 'pattern', processRegExp(regex, refs), message, refs); + } +}; +// Mutate z.string.regex() in a best attempt to accommodate for regex flags when applyRegexFlags is true +const processRegExp = (regexOrFunction, refs) => { + const regex = typeof regexOrFunction === 'function' ? regexOrFunction() : regexOrFunction; + if (!refs.applyRegexFlags || !regex.flags) + return regex.source; + // Currently handled flags + const flags = { + i: regex.flags.includes('i'), + m: regex.flags.includes('m'), + s: regex.flags.includes('s'), // `.` matches newlines + }; + // The general principle here is to step through each character, one at a time, applying mutations as flags require. We keep track when the current character is escaped, and when it's inside a group /like [this]/ or (also) a range like /[a-z]/. The following is fairly brittle imperative code; edit at your peril! + const source = flags.i ? regex.source.toLowerCase() : regex.source; + let pattern = ''; + let isEscaped = false; + let inCharGroup = false; + let inCharRange = false; + for (let i = 0; i < source.length; i++) { + if (isEscaped) { + pattern += source[i]; + isEscaped = false; + continue; + } + if (flags.i) { + if (inCharGroup) { + if (source[i].match(/[a-z]/)) { + if (inCharRange) { + pattern += source[i]; + pattern += `${source[i - 2]}-${source[i]}`.toUpperCase(); + inCharRange = false; + } + else if (source[i + 1] === '-' && source[i + 2]?.match(/[a-z]/)) { + pattern += source[i]; + inCharRange = true; + } + else { + pattern += `${source[i]}${source[i].toUpperCase()}`; + } + continue; + } + } + else if (source[i].match(/[a-z]/)) { + pattern += `[${source[i]}${source[i].toUpperCase()}]`; + continue; + } + } + if (flags.m) { + if (source[i] === '^') { + pattern += `(^|(?<=[\r\n]))`; + continue; + } + else if (source[i] === '$') { + pattern += `($|(?=[\r\n]))`; + continue; + } + } + if (flags.s && source[i] === '.') { + pattern += inCharGroup ? `${source[i]}\r\n` : `[${source[i]}\r\n]`; + continue; + } + pattern += source[i]; + if (source[i] === '\\') { + isEscaped = true; + } + else if (inCharGroup && source[i] === ']') { + inCharGroup = false; + } + else if (!inCharGroup && source[i] === '[') { + inCharGroup = true; + } + } + try { + const regexTest = new RegExp(pattern); + } + catch { + console.warn(`Could not convert regex pattern at ${refs.currentPath.join('/')} to a flag-independent form! Falling back to the flag-ignorant source`); + return regex.source; + } + return pattern; +}; +//# sourceMappingURL=string.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.js.map new file mode 100644 index 0000000..d7d79bc --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.js.map @@ -0,0 +1 @@ +{"version":3,"file":"string.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/string.ts"],"names":[],"mappings":";;;AAEA,uDAA4E;AAG5E,IAAI,UAA8B,CAAC;AAEnC;;;;;GAKG;AACU,QAAA,WAAW,GAAG;IACzB;;OAEG;IACH,IAAI,EAAE,kBAAkB;IACxB,KAAK,EAAE,aAAa;IACpB,IAAI,EAAE,0BAA0B;IAChC;;OAEG;IACH,KAAK,EAAE,kGAAkG;IACzG;;;;;;;;;;OAUG;IACH,KAAK,EAAE,GAAG,EAAE;QACV,IAAI,UAAU,KAAK,SAAS,EAAE;YAC5B,UAAU,GAAG,MAAM,CAAC,sDAAsD,EAAE,GAAG,CAAC,CAAC;SAClF;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IACD;;OAEG;IACH,IAAI,EAAE,uFAAuF;IAC7F;;OAEG;IACH,IAAI,EAAE,qHAAqH;IAC3H;;OAEG;IACH,IAAI,EAAE,8XAA8X;IACpY,MAAM,EAAE,kEAAkE;IAC1E,MAAM,EAAE,qBAAqB;CACrB,CAAC;AA8BX,SAAgB,cAAc,CAAC,GAAiB,EAAE,IAAU;IAC1D,MAAM,GAAG,GAA0B;QACjC,IAAI,EAAE,QAAQ;KACf,CAAC;IAEF,SAAS,cAAc,CAAC,KAAa;QACnC,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,CAAC,CAAC,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;IAClF,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,EAAE;QACd,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,MAAM,EAAE;YAC9B,QAAQ,KAAK,CAAC,IAAI,EAAE;gBAClB,KAAK,KAAK;oBACR,IAAA,yCAAyB,EACvB,GAAG,EACH,WAAW,EACX,OAAO,GAAG,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EACtF,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAA,yCAAyB,EACvB,GAAG,EACH,WAAW,EACX,OAAO,GAAG,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EACtF,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;oBAEF,MAAM;gBACR,KAAK,OAAO;oBACV,QAAQ,IAAI,CAAC,aAAa,EAAE;wBAC1B,KAAK,cAAc;4BACjB,SAAS,CAAC,GAAG,EAAE,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BAC7C,MAAM;wBACR,KAAK,kBAAkB;4BACrB,SAAS,CAAC,GAAG,EAAE,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACjD,MAAM;wBACR,KAAK,aAAa;4BAChB,UAAU,CAAC,GAAG,EAAE,mBAAW,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACxD,MAAM;qBACT;oBAED,MAAM;gBACR,KAAK,KAAK;oBACR,SAAS,CAAC,GAAG,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC3C,MAAM;gBACR,KAAK,MAAM;oBACT,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC5C,MAAM;gBACR,KAAK,OAAO;oBACV,UAAU,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAClD,MAAM;gBACR,KAAK,MAAM;oBACT,UAAU,CAAC,GAAG,EAAE,mBAAW,CAAC,IAAI,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACvD,MAAM;gBACR,KAAK,OAAO;oBACV,UAAU,CAAC,GAAG,EAAE,mBAAW,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACxD,MAAM;gBACR,KAAK,YAAY;oBACf,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,IAAI,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAChF,MAAM;gBACR,KAAK,UAAU;oBACb,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,GAAG,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAChF,MAAM;gBAER,KAAK,UAAU;oBACb,SAAS,CAAC,GAAG,EAAE,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACjD,MAAM;gBACR,KAAK,MAAM;oBACT,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC5C,MAAM;gBACR,KAAK,MAAM;oBACT,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC5C,MAAM;gBACR,KAAK,UAAU;oBACb,SAAS,CAAC,GAAG,EAAE,UAAU,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAChD,MAAM;gBACR,KAAK,QAAQ;oBACX,IAAA,yCAAyB,EACvB,GAAG,EACH,WAAW,EACX,OAAO,GAAG,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EACtF,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;oBACF,IAAA,yCAAyB,EACvB,GAAG,EACH,WAAW,EACX,OAAO,GAAG,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EACtF,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;oBACF,MAAM;gBACR,KAAK,UAAU,CAAC,CAAC;oBACf,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC1E,MAAM;iBACP;gBACD,KAAK,IAAI,CAAC,CAAC;oBACT,IAAI,KAAK,CAAC,OAAO,KAAK,IAAI,EAAE;wBAC1B,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7C;oBACD,IAAI,KAAK,CAAC,OAAO,KAAK,IAAI,EAAE;wBAC1B,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7C;oBACD,MAAM;iBACP;gBACD,KAAK,OAAO;oBACV,UAAU,CAAC,GAAG,EAAE,mBAAW,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACxD,MAAM;gBACR,KAAK,MAAM,CAAC,CAAC;oBACX,UAAU,CAAC,GAAG,EAAE,mBAAW,CAAC,IAAI,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACvD,MAAM;iBACP;gBACD,KAAK,QAAQ,CAAC,CAAC;oBACb,QAAQ,IAAI,CAAC,cAAc,EAAE;wBAC3B,KAAK,eAAe,CAAC,CAAC;4BACpB,SAAS,CAAC,GAAG,EAAE,QAAe,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACrD,MAAM;yBACP;wBAED,KAAK,wBAAwB,CAAC,CAAC;4BAC7B,IAAA,yCAAyB,EAAC,GAAG,EAAE,iBAAiB,EAAE,QAAQ,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACjF,MAAM;yBACP;wBAED,KAAK,aAAa,CAAC,CAAC;4BAClB,UAAU,CAAC,GAAG,EAAE,mBAAW,CAAC,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACzD,MAAM;yBACP;qBACF;oBACD,MAAM;iBACP;gBACD,KAAK,QAAQ,CAAC,CAAC;oBACb,UAAU,CAAC,GAAG,EAAE,mBAAW,CAAC,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC1D;gBACD,KAAK,aAAa,CAAC;gBACnB,KAAK,aAAa,CAAC;gBACnB,KAAK,MAAM;oBACT,MAAM;gBACR;oBACE,CAAC,CAAC,CAAQ,EAAE,EAAE,GAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;aAC7B;SACF;KACF;IAED,OAAO,GAAG,CAAC;AACb,CAAC;AApJD,wCAoJC;AAED,MAAM,qBAAqB,GAAG,CAAC,KAAa,EAAE,EAAE,CAC9C,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;KACd,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;KAClD,IAAI,CAAC,EAAE,CAAC,CAAC;AAEd,MAAM,SAAS,GAAG,CAChB,MAA6B,EAC7B,KAAgD,EAChD,OAA2B,EAC3B,IAAU,EACV,EAAE;IACF,IAAI,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE;QACxD,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;YACjB,MAAM,CAAC,KAAK,GAAG,EAAE,CAAC;SACnB;QAED,IAAI,MAAM,CAAC,MAAM,EAAE;YACjB,MAAM,CAAC,KAAM,CAAC,IAAI,CAAC;gBACjB,MAAM,EAAE,MAAM,CAAC,MAAM;gBACrB,GAAG,CAAC,MAAM,CAAC,YAAY;oBACrB,IAAI,CAAC,aAAa,IAAI;oBACpB,YAAY,EAAE,EAAE,MAAM,EAAE,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE;iBACrD,CAAC;aACL,CAAC,CAAC;YACH,OAAO,MAAM,CAAC,MAAM,CAAC;YACrB,IAAI,MAAM,CAAC,YAAY,EAAE;gBACvB,OAAO,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC;gBAClC,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE;oBACjD,OAAO,MAAM,CAAC,YAAY,CAAC;iBAC5B;aACF;SACF;QAED,MAAM,CAAC,KAAM,CAAC,IAAI,CAAC;YACjB,MAAM,EAAE,KAAK;YACb,GAAG,CAAC,OAAO,IAAI,IAAI,CAAC,aAAa,IAAI,EAAE,YAAY,EAAE,EAAE,MAAM,EAAE,OAAO,EAAE,EAAE,CAAC;SAC5E,CAAC,CAAC;KACJ;SAAM;QACL,IAAA,yCAAyB,EAAC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;KACnE;AACH,CAAC,CAAC;AAEF,MAAM,UAAU,GAAG,CACjB,MAA6B,EAC7B,KAA8B,EAC9B,OAA2B,EAC3B,IAAU,EACV,EAAE;IACF,IAAI,MAAM,CAAC,OAAO,IAAI,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE;QAC1D,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;YACjB,MAAM,CAAC,KAAK,GAAG,EAAE,CAAC;SACnB;QAED,IAAI,MAAM,CAAC,OAAO,EAAE;YAClB,MAAM,CAAC,KAAM,CAAC,IAAI,CAAC;gBACjB,OAAO,EAAE,MAAM,CAAC,OAAO;gBACvB,GAAG,CAAC,MAAM,CAAC,YAAY;oBACrB,IAAI,CAAC,aAAa,IAAI;oBACpB,YAAY,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE;iBACvD,CAAC;aACL,CAAC,CAAC;YACH,OAAO,MAAM,CAAC,OAAO,CAAC;YACtB,IAAI,MAAM,CAAC,YAAY,EAAE;gBACvB,OAAO,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC;gBACnC,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE;oBACjD,OAAO,MAAM,CAAC,YAAY,CAAC;iBAC5B;aACF;SACF;QAED,MAAM,CAAC,KAAM,CAAC,IAAI,CAAC;YACjB,OAAO,EAAE,aAAa,CAAC,KAAK,EAAE,IAAI,CAAC;YACnC,GAAG,CAAC,OAAO,IAAI,IAAI,CAAC,aAAa,IAAI,EAAE,YAAY,EAAE,EAAE,OAAO,EAAE,OAAO,EAAE,EAAE,CAAC;SAC7E,CAAC,CAAC;KACJ;SAAM;QACL,IAAA,yCAAyB,EAAC,MAAM,EAAE,SAAS,EAAE,aAAa,CAAC,KAAK,EAAE,IAAI,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;KACzF;AACH,CAAC,CAAC;AAEF,wGAAwG;AACxG,MAAM,aAAa,GAAG,CAAC,eAAwC,EAAE,IAAU,EAAU,EAAE;IACrF,MAAM,KAAK,GAAG,OAAO,eAAe,KAAK,UAAU,CAAC,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC;IAC1F,IAAI,CAAC,IAAI,CAAC,eAAe,IAAI,CAAC,KAAK,CAAC,KAAK;QAAE,OAAO,KAAK,CAAC,MAAM,CAAC;IAE/D,0BAA0B;IAC1B,MAAM,KAAK,GAAG;QACZ,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC;QAC5B,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC;QAC5B,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,uBAAuB;KACtD,CAAC;IAEF,yTAAyT;IAEzT,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC;IACnE,IAAI,OAAO,GAAG,EAAE,CAAC;IACjB,IAAI,SAAS,GAAG,KAAK,CAAC;IACtB,IAAI,WAAW,GAAG,KAAK,CAAC;IACxB,IAAI,WAAW,GAAG,KAAK,CAAC;IAExB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,IAAI,SAAS,EAAE;YACb,OAAO,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;YACrB,SAAS,GAAG,KAAK,CAAC;YAClB,SAAS;SACV;QAED,IAAI,KAAK,CAAC,CAAC,EAAE;YACX,IAAI,WAAW,EAAE;gBACf,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;oBAC5B,IAAI,WAAW,EAAE;wBACf,OAAO,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;wBACrB,OAAO,IAAI,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC;wBACzD,WAAW,GAAG,KAAK,CAAC;qBACrB;yBAAM,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,GAAG,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,EAAE;wBACjE,OAAO,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;wBACrB,WAAW,GAAG,IAAI,CAAC;qBACpB;yBAAM;wBACL,OAAO,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC;qBACrD;oBACD,SAAS;iBACV;aACF;iBAAM,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;gBACnC,OAAO,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,GAAG,CAAC;gBACtD,SAAS;aACV;SACF;QAED,IAAI,KAAK,CAAC,CAAC,EAAE;YACX,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;gBACrB,OAAO,IAAI,iBAAiB,CAAC;gBAC7B,SAAS;aACV;iBAAM,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;gBAC5B,OAAO,IAAI,gBAAgB,CAAC;gBAC5B,SAAS;aACV;SACF;QAED,IAAI,KAAK,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YAChC,OAAO,IAAI,WAAW,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC;YACnE,SAAS;SACV;QAED,OAAO,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;QACrB,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;YACtB,SAAS,GAAG,IAAI,CAAC;SAClB;aAAM,IAAI,WAAW,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YAC3C,WAAW,GAAG,KAAK,CAAC;SACrB;aAAM,IAAI,CAAC,WAAW,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YAC5C,WAAW,GAAG,IAAI,CAAC;SACpB;KACF;IAED,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC;KACvC;IAAC,MAAM;QACN,OAAO,CAAC,IAAI,CACV,sCAAsC,IAAI,CAAC,WAAW,CAAC,IAAI,CACzD,GAAG,CACJ,uEAAuE,CACzE,CAAC;QACF,OAAO,KAAK,CAAC,MAAM,CAAC;KACrB;IAED,OAAO,OAAO,CAAC;AACjB,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.mjs new file mode 100644 index 0000000..c56e2c2 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.mjs @@ -0,0 +1,312 @@ +import { setResponseValueAndErrors } from "../errorMessages.mjs"; +let emojiRegex; +/** + * Generated from the regular expressions found here as of 2024-05-22: + * https://github.com/colinhacks/zod/blob/master/src/types.ts. + * + * Expressions with /i flag have been changed accordingly. + */ +export const zodPatterns = { + /** + * `c` was changed to `[cC]` to replicate /i flag + */ + cuid: /^[cC][^\s-]{8,}$/, + cuid2: /^[0-9a-z]+$/, + ulid: /^[0-9A-HJKMNP-TV-Z]{26}$/, + /** + * `a-z` was added to replicate /i flag + */ + email: /^(?!\.)(?!.*\.\.)([a-zA-Z0-9_'+\-\.]*)[a-zA-Z0-9_+-]@([a-zA-Z0-9][a-zA-Z0-9\-]*\.)+[a-zA-Z]{2,}$/, + /** + * Constructed a valid Unicode RegExp + * + * Lazily instantiate since this type of regex isn't supported + * in all envs (e.g. React Native). + * + * See: + * https://github.com/colinhacks/zod/issues/2433 + * Fix in Zod: + * https://github.com/colinhacks/zod/commit/9340fd51e48576a75adc919bff65dbc4a5d4c99b + */ + emoji: () => { + if (emojiRegex === undefined) { + emojiRegex = RegExp('^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$', 'u'); + } + return emojiRegex; + }, + /** + * Unused + */ + uuid: /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/, + /** + * Unused + */ + ipv4: /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/, + /** + * Unused + */ + ipv6: /^(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))$/, + base64: /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/, + nanoid: /^[a-zA-Z0-9_-]{21}$/, +}; +export function parseStringDef(def, refs) { + const res = { + type: 'string', + }; + function processPattern(value) { + return refs.patternStrategy === 'escape' ? escapeNonAlphaNumeric(value) : value; + } + if (def.checks) { + for (const check of def.checks) { + switch (check.kind) { + case 'min': + setResponseValueAndErrors(res, 'minLength', typeof res.minLength === 'number' ? Math.max(res.minLength, check.value) : check.value, check.message, refs); + break; + case 'max': + setResponseValueAndErrors(res, 'maxLength', typeof res.maxLength === 'number' ? Math.min(res.maxLength, check.value) : check.value, check.message, refs); + break; + case 'email': + switch (refs.emailStrategy) { + case 'format:email': + addFormat(res, 'email', check.message, refs); + break; + case 'format:idn-email': + addFormat(res, 'idn-email', check.message, refs); + break; + case 'pattern:zod': + addPattern(res, zodPatterns.email, check.message, refs); + break; + } + break; + case 'url': + addFormat(res, 'uri', check.message, refs); + break; + case 'uuid': + addFormat(res, 'uuid', check.message, refs); + break; + case 'regex': + addPattern(res, check.regex, check.message, refs); + break; + case 'cuid': + addPattern(res, zodPatterns.cuid, check.message, refs); + break; + case 'cuid2': + addPattern(res, zodPatterns.cuid2, check.message, refs); + break; + case 'startsWith': + addPattern(res, RegExp(`^${processPattern(check.value)}`), check.message, refs); + break; + case 'endsWith': + addPattern(res, RegExp(`${processPattern(check.value)}$`), check.message, refs); + break; + case 'datetime': + addFormat(res, 'date-time', check.message, refs); + break; + case 'date': + addFormat(res, 'date', check.message, refs); + break; + case 'time': + addFormat(res, 'time', check.message, refs); + break; + case 'duration': + addFormat(res, 'duration', check.message, refs); + break; + case 'length': + setResponseValueAndErrors(res, 'minLength', typeof res.minLength === 'number' ? Math.max(res.minLength, check.value) : check.value, check.message, refs); + setResponseValueAndErrors(res, 'maxLength', typeof res.maxLength === 'number' ? Math.min(res.maxLength, check.value) : check.value, check.message, refs); + break; + case 'includes': { + addPattern(res, RegExp(processPattern(check.value)), check.message, refs); + break; + } + case 'ip': { + if (check.version !== 'v6') { + addFormat(res, 'ipv4', check.message, refs); + } + if (check.version !== 'v4') { + addFormat(res, 'ipv6', check.message, refs); + } + break; + } + case 'emoji': + addPattern(res, zodPatterns.emoji, check.message, refs); + break; + case 'ulid': { + addPattern(res, zodPatterns.ulid, check.message, refs); + break; + } + case 'base64': { + switch (refs.base64Strategy) { + case 'format:binary': { + addFormat(res, 'binary', check.message, refs); + break; + } + case 'contentEncoding:base64': { + setResponseValueAndErrors(res, 'contentEncoding', 'base64', check.message, refs); + break; + } + case 'pattern:zod': { + addPattern(res, zodPatterns.base64, check.message, refs); + break; + } + } + break; + } + case 'nanoid': { + addPattern(res, zodPatterns.nanoid, check.message, refs); + } + case 'toLowerCase': + case 'toUpperCase': + case 'trim': + break; + default: + ((_) => { })(check); + } + } + } + return res; +} +const escapeNonAlphaNumeric = (value) => Array.from(value) + .map((c) => (/[a-zA-Z0-9]/.test(c) ? c : `\\${c}`)) + .join(''); +const addFormat = (schema, value, message, refs) => { + if (schema.format || schema.anyOf?.some((x) => x.format)) { + if (!schema.anyOf) { + schema.anyOf = []; + } + if (schema.format) { + schema.anyOf.push({ + format: schema.format, + ...(schema.errorMessage && + refs.errorMessages && { + errorMessage: { format: schema.errorMessage.format }, + }), + }); + delete schema.format; + if (schema.errorMessage) { + delete schema.errorMessage.format; + if (Object.keys(schema.errorMessage).length === 0) { + delete schema.errorMessage; + } + } + } + schema.anyOf.push({ + format: value, + ...(message && refs.errorMessages && { errorMessage: { format: message } }), + }); + } + else { + setResponseValueAndErrors(schema, 'format', value, message, refs); + } +}; +const addPattern = (schema, regex, message, refs) => { + if (schema.pattern || schema.allOf?.some((x) => x.pattern)) { + if (!schema.allOf) { + schema.allOf = []; + } + if (schema.pattern) { + schema.allOf.push({ + pattern: schema.pattern, + ...(schema.errorMessage && + refs.errorMessages && { + errorMessage: { pattern: schema.errorMessage.pattern }, + }), + }); + delete schema.pattern; + if (schema.errorMessage) { + delete schema.errorMessage.pattern; + if (Object.keys(schema.errorMessage).length === 0) { + delete schema.errorMessage; + } + } + } + schema.allOf.push({ + pattern: processRegExp(regex, refs), + ...(message && refs.errorMessages && { errorMessage: { pattern: message } }), + }); + } + else { + setResponseValueAndErrors(schema, 'pattern', processRegExp(regex, refs), message, refs); + } +}; +// Mutate z.string.regex() in a best attempt to accommodate for regex flags when applyRegexFlags is true +const processRegExp = (regexOrFunction, refs) => { + const regex = typeof regexOrFunction === 'function' ? regexOrFunction() : regexOrFunction; + if (!refs.applyRegexFlags || !regex.flags) + return regex.source; + // Currently handled flags + const flags = { + i: regex.flags.includes('i'), + m: regex.flags.includes('m'), + s: regex.flags.includes('s'), // `.` matches newlines + }; + // The general principle here is to step through each character, one at a time, applying mutations as flags require. We keep track when the current character is escaped, and when it's inside a group /like [this]/ or (also) a range like /[a-z]/. The following is fairly brittle imperative code; edit at your peril! + const source = flags.i ? regex.source.toLowerCase() : regex.source; + let pattern = ''; + let isEscaped = false; + let inCharGroup = false; + let inCharRange = false; + for (let i = 0; i < source.length; i++) { + if (isEscaped) { + pattern += source[i]; + isEscaped = false; + continue; + } + if (flags.i) { + if (inCharGroup) { + if (source[i].match(/[a-z]/)) { + if (inCharRange) { + pattern += source[i]; + pattern += `${source[i - 2]}-${source[i]}`.toUpperCase(); + inCharRange = false; + } + else if (source[i + 1] === '-' && source[i + 2]?.match(/[a-z]/)) { + pattern += source[i]; + inCharRange = true; + } + else { + pattern += `${source[i]}${source[i].toUpperCase()}`; + } + continue; + } + } + else if (source[i].match(/[a-z]/)) { + pattern += `[${source[i]}${source[i].toUpperCase()}]`; + continue; + } + } + if (flags.m) { + if (source[i] === '^') { + pattern += `(^|(?<=[\r\n]))`; + continue; + } + else if (source[i] === '$') { + pattern += `($|(?=[\r\n]))`; + continue; + } + } + if (flags.s && source[i] === '.') { + pattern += inCharGroup ? `${source[i]}\r\n` : `[${source[i]}\r\n]`; + continue; + } + pattern += source[i]; + if (source[i] === '\\') { + isEscaped = true; + } + else if (inCharGroup && source[i] === ']') { + inCharGroup = false; + } + else if (!inCharGroup && source[i] === '[') { + inCharGroup = true; + } + } + try { + const regexTest = new RegExp(pattern); + } + catch { + console.warn(`Could not convert regex pattern at ${refs.currentPath.join('/')} to a flag-independent form! Falling back to the flag-ignorant source`); + return regex.source; + } + return pattern; +}; +//# sourceMappingURL=string.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.mjs.map new file mode 100644 index 0000000..3d240a6 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/string.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"string.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/string.ts"],"names":[],"mappings":"OAEO,EAAiB,yBAAyB,EAAE;AAGnD,IAAI,UAA8B,CAAC;AAEnC;;;;;GAKG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG;IACzB;;OAEG;IACH,IAAI,EAAE,kBAAkB;IACxB,KAAK,EAAE,aAAa;IACpB,IAAI,EAAE,0BAA0B;IAChC;;OAEG;IACH,KAAK,EAAE,kGAAkG;IACzG;;;;;;;;;;OAUG;IACH,KAAK,EAAE,GAAG,EAAE;QACV,IAAI,UAAU,KAAK,SAAS,EAAE;YAC5B,UAAU,GAAG,MAAM,CAAC,sDAAsD,EAAE,GAAG,CAAC,CAAC;SAClF;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IACD;;OAEG;IACH,IAAI,EAAE,uFAAuF;IAC7F;;OAEG;IACH,IAAI,EAAE,qHAAqH;IAC3H;;OAEG;IACH,IAAI,EAAE,8XAA8X;IACpY,MAAM,EAAE,kEAAkE;IAC1E,MAAM,EAAE,qBAAqB;CACrB,CAAC;AA8BX,MAAM,UAAU,cAAc,CAAC,GAAiB,EAAE,IAAU;IAC1D,MAAM,GAAG,GAA0B;QACjC,IAAI,EAAE,QAAQ;KACf,CAAC;IAEF,SAAS,cAAc,CAAC,KAAa;QACnC,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,CAAC,CAAC,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;IAClF,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,EAAE;QACd,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,MAAM,EAAE;YAC9B,QAAQ,KAAK,CAAC,IAAI,EAAE;gBAClB,KAAK,KAAK;oBACR,yBAAyB,CACvB,GAAG,EACH,WAAW,EACX,OAAO,GAAG,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EACtF,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,yBAAyB,CACvB,GAAG,EACH,WAAW,EACX,OAAO,GAAG,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EACtF,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;oBAEF,MAAM;gBACR,KAAK,OAAO;oBACV,QAAQ,IAAI,CAAC,aAAa,EAAE;wBAC1B,KAAK,cAAc;4BACjB,SAAS,CAAC,GAAG,EAAE,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BAC7C,MAAM;wBACR,KAAK,kBAAkB;4BACrB,SAAS,CAAC,GAAG,EAAE,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACjD,MAAM;wBACR,KAAK,aAAa;4BAChB,UAAU,CAAC,GAAG,EAAE,WAAW,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACxD,MAAM;qBACT;oBAED,MAAM;gBACR,KAAK,KAAK;oBACR,SAAS,CAAC,GAAG,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC3C,MAAM;gBACR,KAAK,MAAM;oBACT,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC5C,MAAM;gBACR,KAAK,OAAO;oBACV,UAAU,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAClD,MAAM;gBACR,KAAK,MAAM;oBACT,UAAU,CAAC,GAAG,EAAE,WAAW,CAAC,IAAI,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACvD,MAAM;gBACR,KAAK,OAAO;oBACV,UAAU,CAAC,GAAG,EAAE,WAAW,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACxD,MAAM;gBACR,KAAK,YAAY;oBACf,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,IAAI,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAChF,MAAM;gBACR,KAAK,UAAU;oBACb,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,GAAG,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAChF,MAAM;gBAER,KAAK,UAAU;oBACb,SAAS,CAAC,GAAG,EAAE,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACjD,MAAM;gBACR,KAAK,MAAM;oBACT,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC5C,MAAM;gBACR,KAAK,MAAM;oBACT,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC5C,MAAM;gBACR,KAAK,UAAU;oBACb,SAAS,CAAC,GAAG,EAAE,UAAU,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAChD,MAAM;gBACR,KAAK,QAAQ;oBACX,yBAAyB,CACvB,GAAG,EACH,WAAW,EACX,OAAO,GAAG,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EACtF,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;oBACF,yBAAyB,CACvB,GAAG,EACH,WAAW,EACX,OAAO,GAAG,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EACtF,KAAK,CAAC,OAAO,EACb,IAAI,CACL,CAAC;oBACF,MAAM;gBACR,KAAK,UAAU,CAAC,CAAC;oBACf,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBAC1E,MAAM;iBACP;gBACD,KAAK,IAAI,CAAC,CAAC;oBACT,IAAI,KAAK,CAAC,OAAO,KAAK,IAAI,EAAE;wBAC1B,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7C;oBACD,IAAI,KAAK,CAAC,OAAO,KAAK,IAAI,EAAE;wBAC1B,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBAC7C;oBACD,MAAM;iBACP;gBACD,KAAK,OAAO;oBACV,UAAU,CAAC,GAAG,EAAE,WAAW,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACxD,MAAM;gBACR,KAAK,MAAM,CAAC,CAAC;oBACX,UAAU,CAAC,GAAG,EAAE,WAAW,CAAC,IAAI,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;oBACvD,MAAM;iBACP;gBACD,KAAK,QAAQ,CAAC,CAAC;oBACb,QAAQ,IAAI,CAAC,cAAc,EAAE;wBAC3B,KAAK,eAAe,CAAC,CAAC;4BACpB,SAAS,CAAC,GAAG,EAAE,QAAe,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACrD,MAAM;yBACP;wBAED,KAAK,wBAAwB,CAAC,CAAC;4BAC7B,yBAAyB,CAAC,GAAG,EAAE,iBAAiB,EAAE,QAAQ,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACjF,MAAM;yBACP;wBAED,KAAK,aAAa,CAAC,CAAC;4BAClB,UAAU,CAAC,GAAG,EAAE,WAAW,CAAC,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;4BACzD,MAAM;yBACP;qBACF;oBACD,MAAM;iBACP;gBACD,KAAK,QAAQ,CAAC,CAAC;oBACb,UAAU,CAAC,GAAG,EAAE,WAAW,CAAC,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;iBAC1D;gBACD,KAAK,aAAa,CAAC;gBACnB,KAAK,aAAa,CAAC;gBACnB,KAAK,MAAM;oBACT,MAAM;gBACR;oBACE,CAAC,CAAC,CAAQ,EAAE,EAAE,GAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;aAC7B;SACF;KACF;IAED,OAAO,GAAG,CAAC;AACb,CAAC;AAED,MAAM,qBAAqB,GAAG,CAAC,KAAa,EAAE,EAAE,CAC9C,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;KACd,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;KAClD,IAAI,CAAC,EAAE,CAAC,CAAC;AAEd,MAAM,SAAS,GAAG,CAChB,MAA6B,EAC7B,KAAgD,EAChD,OAA2B,EAC3B,IAAU,EACV,EAAE;IACF,IAAI,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE;QACxD,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;YACjB,MAAM,CAAC,KAAK,GAAG,EAAE,CAAC;SACnB;QAED,IAAI,MAAM,CAAC,MAAM,EAAE;YACjB,MAAM,CAAC,KAAM,CAAC,IAAI,CAAC;gBACjB,MAAM,EAAE,MAAM,CAAC,MAAM;gBACrB,GAAG,CAAC,MAAM,CAAC,YAAY;oBACrB,IAAI,CAAC,aAAa,IAAI;oBACpB,YAAY,EAAE,EAAE,MAAM,EAAE,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE;iBACrD,CAAC;aACL,CAAC,CAAC;YACH,OAAO,MAAM,CAAC,MAAM,CAAC;YACrB,IAAI,MAAM,CAAC,YAAY,EAAE;gBACvB,OAAO,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC;gBAClC,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE;oBACjD,OAAO,MAAM,CAAC,YAAY,CAAC;iBAC5B;aACF;SACF;QAED,MAAM,CAAC,KAAM,CAAC,IAAI,CAAC;YACjB,MAAM,EAAE,KAAK;YACb,GAAG,CAAC,OAAO,IAAI,IAAI,CAAC,aAAa,IAAI,EAAE,YAAY,EAAE,EAAE,MAAM,EAAE,OAAO,EAAE,EAAE,CAAC;SAC5E,CAAC,CAAC;KACJ;SAAM;QACL,yBAAyB,CAAC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;KACnE;AACH,CAAC,CAAC;AAEF,MAAM,UAAU,GAAG,CACjB,MAA6B,EAC7B,KAA8B,EAC9B,OAA2B,EAC3B,IAAU,EACV,EAAE;IACF,IAAI,MAAM,CAAC,OAAO,IAAI,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE;QAC1D,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;YACjB,MAAM,CAAC,KAAK,GAAG,EAAE,CAAC;SACnB;QAED,IAAI,MAAM,CAAC,OAAO,EAAE;YAClB,MAAM,CAAC,KAAM,CAAC,IAAI,CAAC;gBACjB,OAAO,EAAE,MAAM,CAAC,OAAO;gBACvB,GAAG,CAAC,MAAM,CAAC,YAAY;oBACrB,IAAI,CAAC,aAAa,IAAI;oBACpB,YAAY,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE;iBACvD,CAAC;aACL,CAAC,CAAC;YACH,OAAO,MAAM,CAAC,OAAO,CAAC;YACtB,IAAI,MAAM,CAAC,YAAY,EAAE;gBACvB,OAAO,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC;gBACnC,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE;oBACjD,OAAO,MAAM,CAAC,YAAY,CAAC;iBAC5B;aACF;SACF;QAED,MAAM,CAAC,KAAM,CAAC,IAAI,CAAC;YACjB,OAAO,EAAE,aAAa,CAAC,KAAK,EAAE,IAAI,CAAC;YACnC,GAAG,CAAC,OAAO,IAAI,IAAI,CAAC,aAAa,IAAI,EAAE,YAAY,EAAE,EAAE,OAAO,EAAE,OAAO,EAAE,EAAE,CAAC;SAC7E,CAAC,CAAC;KACJ;SAAM;QACL,yBAAyB,CAAC,MAAM,EAAE,SAAS,EAAE,aAAa,CAAC,KAAK,EAAE,IAAI,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;KACzF;AACH,CAAC,CAAC;AAEF,wGAAwG;AACxG,MAAM,aAAa,GAAG,CAAC,eAAwC,EAAE,IAAU,EAAU,EAAE;IACrF,MAAM,KAAK,GAAG,OAAO,eAAe,KAAK,UAAU,CAAC,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC;IAC1F,IAAI,CAAC,IAAI,CAAC,eAAe,IAAI,CAAC,KAAK,CAAC,KAAK;QAAE,OAAO,KAAK,CAAC,MAAM,CAAC;IAE/D,0BAA0B;IAC1B,MAAM,KAAK,GAAG;QACZ,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC;QAC5B,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC;QAC5B,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,uBAAuB;KACtD,CAAC;IAEF,yTAAyT;IAEzT,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC;IACnE,IAAI,OAAO,GAAG,EAAE,CAAC;IACjB,IAAI,SAAS,GAAG,KAAK,CAAC;IACtB,IAAI,WAAW,GAAG,KAAK,CAAC;IACxB,IAAI,WAAW,GAAG,KAAK,CAAC;IAExB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,IAAI,SAAS,EAAE;YACb,OAAO,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;YACrB,SAAS,GAAG,KAAK,CAAC;YAClB,SAAS;SACV;QAED,IAAI,KAAK,CAAC,CAAC,EAAE;YACX,IAAI,WAAW,EAAE;gBACf,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;oBAC5B,IAAI,WAAW,EAAE;wBACf,OAAO,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;wBACrB,OAAO,IAAI,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC;wBACzD,WAAW,GAAG,KAAK,CAAC;qBACrB;yBAAM,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,GAAG,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,EAAE;wBACjE,OAAO,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;wBACrB,WAAW,GAAG,IAAI,CAAC;qBACpB;yBAAM;wBACL,OAAO,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC;qBACrD;oBACD,SAAS;iBACV;aACF;iBAAM,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;gBACnC,OAAO,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,GAAG,CAAC;gBACtD,SAAS;aACV;SACF;QAED,IAAI,KAAK,CAAC,CAAC,EAAE;YACX,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;gBACrB,OAAO,IAAI,iBAAiB,CAAC;gBAC7B,SAAS;aACV;iBAAM,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;gBAC5B,OAAO,IAAI,gBAAgB,CAAC;gBAC5B,SAAS;aACV;SACF;QAED,IAAI,KAAK,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YAChC,OAAO,IAAI,WAAW,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC;YACnE,SAAS;SACV;QAED,OAAO,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;QACrB,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;YACtB,SAAS,GAAG,IAAI,CAAC;SAClB;aAAM,IAAI,WAAW,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YAC3C,WAAW,GAAG,KAAK,CAAC;SACrB;aAAM,IAAI,CAAC,WAAW,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YAC5C,WAAW,GAAG,IAAI,CAAC;SACpB;KACF;IAED,IAAI;QACF,MAAM,SAAS,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC;KACvC;IAAC,MAAM;QACN,OAAO,CAAC,IAAI,CACV,sCAAsC,IAAI,CAAC,WAAW,CAAC,IAAI,CACzD,GAAG,CACJ,uEAAuE,CACzE,CAAC;QACF,OAAO,KAAK,CAAC,MAAM,CAAC;KACrB;IAED,OAAO,OAAO,CAAC;AACjB,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.d.ts new file mode 100644 index 0000000..06cb1b7 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.d.ts @@ -0,0 +1,14 @@ +import { ZodTupleDef, ZodTupleItems, ZodTypeAny } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export type JsonSchema7TupleType = { + type: 'array'; + minItems: number; + items: JsonSchema7Type[]; +} & ({ + maxItems: number; +} | { + additionalItems?: JsonSchema7Type | undefined; +}); +export declare function parseTupleDef(def: ZodTupleDef, refs: Refs): JsonSchema7TupleType; +//# sourceMappingURL=tuple.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.d.ts.map new file mode 100644 index 0000000..4f06aec --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tuple.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/tuple.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,aAAa,EAAE,UAAU,EAAE,MAAM,KAAK,CAAC;AAC7D,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,MAAM,MAAM,oBAAoB,GAAG;IACjC,IAAI,EAAE,OAAO,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;IACjB,KAAK,EAAE,eAAe,EAAE,CAAC;CAC1B,GAAG,CACA;IACE,QAAQ,EAAE,MAAM,CAAC;CAClB,GACD;IACE,eAAe,CAAC,EAAE,eAAe,GAAG,SAAS,CAAC;CAC/C,CACJ,CAAC;AAEF,wBAAgB,aAAa,CAC3B,GAAG,EAAE,WAAW,CAAC,aAAa,GAAG,EAAE,EAAE,UAAU,GAAG,IAAI,CAAC,EACvD,IAAI,EAAE,IAAI,GACT,oBAAoB,CAiCtB"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.js new file mode 100644 index 0000000..4a3fdf2 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.js @@ -0,0 +1,37 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseTupleDef = void 0; +const parseDef_1 = require("../parseDef.js"); +function parseTupleDef(def, refs) { + if (def.rest) { + return { + type: 'array', + minItems: def.items.length, + items: def.items + .map((x, i) => (0, parseDef_1.parseDef)(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', `${i}`], + })) + .reduce((acc, x) => (x === undefined ? acc : [...acc, x]), []), + additionalItems: (0, parseDef_1.parseDef)(def.rest._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalItems'], + }), + }; + } + else { + return { + type: 'array', + minItems: def.items.length, + maxItems: def.items.length, + items: def.items + .map((x, i) => (0, parseDef_1.parseDef)(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', `${i}`], + })) + .reduce((acc, x) => (x === undefined ? acc : [...acc, x]), []), + }; + } +} +exports.parseTupleDef = parseTupleDef; +//# sourceMappingURL=tuple.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.js.map new file mode 100644 index 0000000..77a8eed --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tuple.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/tuple.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAgBxD,SAAgB,aAAa,CAC3B,GAAuD,EACvD,IAAU;IAEV,IAAI,GAAG,CAAC,IAAI,EAAE;QACZ,OAAO;YACL,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,GAAG,CAAC,KAAK,CAAC,MAAM;YAC1B,KAAK,EAAE,GAAG,CAAC,KAAK;iBACb,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACZ,IAAA,mBAAQ,EAAC,CAAC,CAAC,IAAI,EAAE;gBACf,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC;aACpD,CAAC,CACH;iBACA,MAAM,CAAC,CAAC,GAAsB,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;YACnF,eAAe,EAAE,IAAA,mBAAQ,EAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE;gBACvC,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,iBAAiB,CAAC;aACtD,CAAC;SACH,CAAC;KACH;SAAM;QACL,OAAO;YACL,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,GAAG,CAAC,KAAK,CAAC,MAAM;YAC1B,QAAQ,EAAE,GAAG,CAAC,KAAK,CAAC,MAAM;YAC1B,KAAK,EAAE,GAAG,CAAC,KAAK;iBACb,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACZ,IAAA,mBAAQ,EAAC,CAAC,CAAC,IAAI,EAAE;gBACf,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC;aACpD,CAAC,CACH;iBACA,MAAM,CAAC,CAAC,GAAsB,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;SACpF,CAAC;KACH;AACH,CAAC;AApCD,sCAoCC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.mjs new file mode 100644 index 0000000..0acec70 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.mjs @@ -0,0 +1,33 @@ +import { parseDef } from "../parseDef.mjs"; +export function parseTupleDef(def, refs) { + if (def.rest) { + return { + type: 'array', + minItems: def.items.length, + items: def.items + .map((x, i) => parseDef(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', `${i}`], + })) + .reduce((acc, x) => (x === undefined ? acc : [...acc, x]), []), + additionalItems: parseDef(def.rest._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalItems'], + }), + }; + } + else { + return { + type: 'array', + minItems: def.items.length, + maxItems: def.items.length, + items: def.items + .map((x, i) => parseDef(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', `${i}`], + })) + .reduce((acc, x) => (x === undefined ? acc : [...acc, x]), []), + }; + } +} +//# sourceMappingURL=tuple.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.mjs.map new file mode 100644 index 0000000..73cf7ab --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"tuple.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/tuple.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;AAgBpC,MAAM,UAAU,aAAa,CAC3B,GAAuD,EACvD,IAAU;IAEV,IAAI,GAAG,CAAC,IAAI,EAAE;QACZ,OAAO;YACL,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,GAAG,CAAC,KAAK,CAAC,MAAM;YAC1B,KAAK,EAAE,GAAG,CAAC,KAAK;iBACb,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACZ,QAAQ,CAAC,CAAC,CAAC,IAAI,EAAE;gBACf,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC;aACpD,CAAC,CACH;iBACA,MAAM,CAAC,CAAC,GAAsB,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;YACnF,eAAe,EAAE,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE;gBACvC,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,iBAAiB,CAAC;aACtD,CAAC;SACH,CAAC;KACH;SAAM;QACL,OAAO;YACL,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,GAAG,CAAC,KAAK,CAAC,MAAM;YAC1B,QAAQ,EAAE,GAAG,CAAC,KAAK,CAAC,MAAM;YAC1B,KAAK,EAAE,GAAG,CAAC,KAAK;iBACb,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACZ,QAAQ,CAAC,CAAC,CAAC,IAAI,EAAE;gBACf,GAAG,IAAI;gBACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC;aACpD,CAAC,CACH;iBACA,MAAM,CAAC,CAAC,GAAsB,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;SACpF,CAAC;KACH;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.d.ts new file mode 100644 index 0000000..95df601 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.d.ts @@ -0,0 +1,5 @@ +export type JsonSchema7UndefinedType = { + not: {}; +}; +export declare function parseUndefinedDef(): JsonSchema7UndefinedType; +//# sourceMappingURL=undefined.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.d.ts.map new file mode 100644 index 0000000..f1358a0 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"undefined.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/undefined.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,wBAAwB,GAAG;IACrC,GAAG,EAAE,EAAE,CAAC;CACT,CAAC;AAEF,wBAAgB,iBAAiB,IAAI,wBAAwB,CAI5D"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.js new file mode 100644 index 0000000..f64ce9c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseUndefinedDef = void 0; +function parseUndefinedDef() { + return { + not: {}, + }; +} +exports.parseUndefinedDef = parseUndefinedDef; +//# sourceMappingURL=undefined.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.js.map new file mode 100644 index 0000000..a4cbf3e --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.js.map @@ -0,0 +1 @@ +{"version":3,"file":"undefined.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/undefined.ts"],"names":[],"mappings":";;;AAIA,SAAgB,iBAAiB;IAC/B,OAAO;QACL,GAAG,EAAE,EAAE;KACR,CAAC;AACJ,CAAC;AAJD,8CAIC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.mjs new file mode 100644 index 0000000..32ecd26 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.mjs @@ -0,0 +1,6 @@ +export function parseUndefinedDef() { + return { + not: {}, + }; +} +//# sourceMappingURL=undefined.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.mjs.map new file mode 100644 index 0000000..01706a8 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"undefined.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/undefined.ts"],"names":[],"mappings":"AAIA,MAAM,UAAU,iBAAiB;IAC/B,OAAO;QACL,GAAG,EAAE,EAAE;KACR,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.d.ts new file mode 100644 index 0000000..64178d4 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.d.ts @@ -0,0 +1,24 @@ +import { ZodDiscriminatedUnionDef, ZodUnionDef } from 'zod'; +import { JsonSchema7Type } from "../parseDef.js"; +import { Refs } from "../Refs.js"; +export declare const primitiveMappings: { + readonly ZodString: "string"; + readonly ZodNumber: "number"; + readonly ZodBigInt: "integer"; + readonly ZodBoolean: "boolean"; + readonly ZodNull: "null"; +}; +type JsonSchema7Primitive = (typeof primitiveMappings)[keyof typeof primitiveMappings]; +export type JsonSchema7UnionType = JsonSchema7PrimitiveUnionType | JsonSchema7AnyOfType; +type JsonSchema7PrimitiveUnionType = { + type: JsonSchema7Primitive | JsonSchema7Primitive[]; +} | { + type: JsonSchema7Primitive | JsonSchema7Primitive[]; + enum: (string | number | bigint | boolean | null)[]; +}; +type JsonSchema7AnyOfType = { + anyOf: JsonSchema7Type[]; +}; +export declare function parseUnionDef(def: ZodUnionDef | ZodDiscriminatedUnionDef, refs: Refs): JsonSchema7PrimitiveUnionType | JsonSchema7AnyOfType | undefined; +export {}; +//# sourceMappingURL=union.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.d.ts.map new file mode 100644 index 0000000..98ffd91 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"union.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/union.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,wBAAwB,EAA6B,WAAW,EAAE,MAAM,KAAK,CAAC;AACvF,OAAO,EAAE,eAAe,EAAY,MAAM,aAAa,CAAC;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAE/B,eAAO,MAAM,iBAAiB;;;;;;CAMpB,CAAC;AAEX,KAAK,oBAAoB,GAAG,CAAC,OAAO,iBAAiB,CAAC,CAAC,MAAM,OAAO,iBAAiB,CAAC,CAAC;AAEvF,MAAM,MAAM,oBAAoB,GAAG,6BAA6B,GAAG,oBAAoB,CAAC;AAExF,KAAK,6BAA6B,GAC9B;IACE,IAAI,EAAE,oBAAoB,GAAG,oBAAoB,EAAE,CAAC;CACrD,GACD;IACE,IAAI,EAAE,oBAAoB,GAAG,oBAAoB,EAAE,CAAC;IACpD,IAAI,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,IAAI,CAAC,EAAE,CAAC;CACrD,CAAC;AAEN,KAAK,oBAAoB,GAAG;IAC1B,KAAK,EAAE,eAAe,EAAE,CAAC;CAC1B,CAAC;AAEF,wBAAgB,aAAa,CAC3B,GAAG,EAAE,WAAW,GAAG,wBAAwB,CAAC,GAAG,EAAE,GAAG,CAAC,EACrD,IAAI,EAAE,IAAI,GACT,6BAA6B,GAAG,oBAAoB,GAAG,SAAS,CAmElE"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.js new file mode 100644 index 0000000..68fbfac --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.js @@ -0,0 +1,77 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseUnionDef = exports.primitiveMappings = void 0; +const parseDef_1 = require("../parseDef.js"); +exports.primitiveMappings = { + ZodString: 'string', + ZodNumber: 'number', + ZodBigInt: 'integer', + ZodBoolean: 'boolean', + ZodNull: 'null', +}; +function parseUnionDef(def, refs) { + if (refs.target === 'openApi3') + return asAnyOf(def, refs); + const options = def.options instanceof Map ? Array.from(def.options.values()) : def.options; + // This blocks tries to look ahead a bit to produce nicer looking schemas with type array instead of anyOf. + if (options.every((x) => x._def.typeName in exports.primitiveMappings && (!x._def.checks || !x._def.checks.length))) { + // all types in union are primitive and lack checks, so might as well squash into {type: [...]} + const types = options.reduce((types, x) => { + const type = exports.primitiveMappings[x._def.typeName]; //Can be safely casted due to row 43 + return type && !types.includes(type) ? [...types, type] : types; + }, []); + return { + type: types.length > 1 ? types : types[0], + }; + } + else if (options.every((x) => x._def.typeName === 'ZodLiteral' && !x.description)) { + // all options literals + const types = options.reduce((acc, x) => { + const type = typeof x._def.value; + switch (type) { + case 'string': + case 'number': + case 'boolean': + return [...acc, type]; + case 'bigint': + return [...acc, 'integer']; + case 'object': + if (x._def.value === null) + return [...acc, 'null']; + case 'symbol': + case 'undefined': + case 'function': + default: + return acc; + } + }, []); + if (types.length === options.length) { + // all the literals are primitive, as far as null can be considered primitive + const uniqueTypes = types.filter((x, i, a) => a.indexOf(x) === i); + return { + type: uniqueTypes.length > 1 ? uniqueTypes : uniqueTypes[0], + enum: options.reduce((acc, x) => { + return acc.includes(x._def.value) ? acc : [...acc, x._def.value]; + }, []), + }; + } + } + else if (options.every((x) => x._def.typeName === 'ZodEnum')) { + return { + type: 'string', + enum: options.reduce((acc, x) => [...acc, ...x._def.values.filter((x) => !acc.includes(x))], []), + }; + } + return asAnyOf(def, refs); +} +exports.parseUnionDef = parseUnionDef; +const asAnyOf = (def, refs) => { + const anyOf = (def.options instanceof Map ? Array.from(def.options.values()) : def.options) + .map((x, i) => (0, parseDef_1.parseDef)(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', `${i}`], + })) + .filter((x) => !!x && (!refs.strictUnions || (typeof x === 'object' && Object.keys(x).length > 0))); + return anyOf.length ? { anyOf } : undefined; +}; +//# sourceMappingURL=union.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.js.map new file mode 100644 index 0000000..ed456bd --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.js.map @@ -0,0 +1 @@ +{"version":3,"file":"union.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/union.ts"],"names":[],"mappings":";;;AACA,6CAAwD;AAG3C,QAAA,iBAAiB,GAAG;IAC/B,SAAS,EAAE,QAAQ;IACnB,SAAS,EAAE,QAAQ;IACnB,SAAS,EAAE,SAAS;IACpB,UAAU,EAAE,SAAS;IACrB,OAAO,EAAE,MAAM;CACP,CAAC;AAmBX,SAAgB,aAAa,CAC3B,GAAqD,EACrD,IAAU;IAEV,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU;QAAE,OAAO,OAAO,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;IAE1D,MAAM,OAAO,GACX,GAAG,CAAC,OAAO,YAAY,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC;IAE9E,2GAA2G;IAC3G,IACE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,IAAI,yBAAiB,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EACvG;QACA,+FAA+F;QAE/F,MAAM,KAAK,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,KAA6B,EAAE,CAAC,EAAE,EAAE;YAChE,MAAM,IAAI,GAAG,yBAAiB,CAAC,CAAC,CAAC,IAAI,CAAC,QAAwB,CAAC,CAAC,CAAC,oCAAoC;YACrG,OAAO,IAAI,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;QAClE,CAAC,EAAE,EAAE,CAAC,CAAC;QAEP,OAAO;YACL,IAAI,EAAE,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAE;SAC3C,CAAC;KACH;SAAM,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,WAAW,CAAC,EAAE;QACnF,uBAAuB;QAEvB,MAAM,KAAK,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,GAA2B,EAAE,CAA0B,EAAE,EAAE;YACvF,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC;YACjC,QAAQ,IAAI,EAAE;gBACZ,KAAK,QAAQ,CAAC;gBACd,KAAK,QAAQ,CAAC;gBACd,KAAK,SAAS;oBACZ,OAAO,CAAC,GAAG,GAAG,EAAE,IAAI,CAAC,CAAC;gBACxB,KAAK,QAAQ;oBACX,OAAO,CAAC,GAAG,GAAG,EAAE,SAAkB,CAAC,CAAC;gBACtC,KAAK,QAAQ;oBACX,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,KAAK,IAAI;wBAAE,OAAO,CAAC,GAAG,GAAG,EAAE,MAAe,CAAC,CAAC;gBAC9D,KAAK,QAAQ,CAAC;gBACd,KAAK,WAAW,CAAC;gBACjB,KAAK,UAAU,CAAC;gBAChB;oBACE,OAAO,GAAG,CAAC;aACd;QACH,CAAC,EAAE,EAAE,CAAC,CAAC;QAEP,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,EAAE;YACnC,6EAA6E;YAE7E,MAAM,WAAW,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;YAClE,OAAO;gBACL,IAAI,EAAE,WAAW,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAE;gBAC5D,IAAI,EAAE,OAAO,CAAC,MAAM,CAClB,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;oBACT,OAAO,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACnE,CAAC,EACD,EAAmD,CACpD;aACF,CAAC;SACH;KACF;SAAM,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,EAAE;QAC9D,OAAO;YACL,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,OAAO,CAAC,MAAM,CAClB,CAAC,GAAa,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,GAAG,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EACxF,EAAE,CACH;SACF,CAAC;KACH;IAED,OAAO,OAAO,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;AAC5B,CAAC;AAtED,sCAsEC;AAED,MAAM,OAAO,GAAG,CACd,GAAqD,EACrD,IAAU,EACwD,EAAE;IACpE,MAAM,KAAK,GAAI,CAAC,GAAG,CAAC,OAAO,YAAY,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAW;SACnG,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACZ,IAAA,mBAAQ,EAAC,CAAC,CAAC,IAAI,EAAE;QACf,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC;KACpD,CAAC,CACH;SACA,MAAM,CACL,CAAC,CAAC,EAAwB,EAAE,CAC1B,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,CAAC,OAAO,CAAC,KAAK,QAAQ,IAAI,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CACtF,CAAC;IAEJ,OAAO,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;AAC9C,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.mjs new file mode 100644 index 0000000..c87356c --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.mjs @@ -0,0 +1,73 @@ +import { parseDef } from "../parseDef.mjs"; +export const primitiveMappings = { + ZodString: 'string', + ZodNumber: 'number', + ZodBigInt: 'integer', + ZodBoolean: 'boolean', + ZodNull: 'null', +}; +export function parseUnionDef(def, refs) { + if (refs.target === 'openApi3') + return asAnyOf(def, refs); + const options = def.options instanceof Map ? Array.from(def.options.values()) : def.options; + // This blocks tries to look ahead a bit to produce nicer looking schemas with type array instead of anyOf. + if (options.every((x) => x._def.typeName in primitiveMappings && (!x._def.checks || !x._def.checks.length))) { + // all types in union are primitive and lack checks, so might as well squash into {type: [...]} + const types = options.reduce((types, x) => { + const type = primitiveMappings[x._def.typeName]; //Can be safely casted due to row 43 + return type && !types.includes(type) ? [...types, type] : types; + }, []); + return { + type: types.length > 1 ? types : types[0], + }; + } + else if (options.every((x) => x._def.typeName === 'ZodLiteral' && !x.description)) { + // all options literals + const types = options.reduce((acc, x) => { + const type = typeof x._def.value; + switch (type) { + case 'string': + case 'number': + case 'boolean': + return [...acc, type]; + case 'bigint': + return [...acc, 'integer']; + case 'object': + if (x._def.value === null) + return [...acc, 'null']; + case 'symbol': + case 'undefined': + case 'function': + default: + return acc; + } + }, []); + if (types.length === options.length) { + // all the literals are primitive, as far as null can be considered primitive + const uniqueTypes = types.filter((x, i, a) => a.indexOf(x) === i); + return { + type: uniqueTypes.length > 1 ? uniqueTypes : uniqueTypes[0], + enum: options.reduce((acc, x) => { + return acc.includes(x._def.value) ? acc : [...acc, x._def.value]; + }, []), + }; + } + } + else if (options.every((x) => x._def.typeName === 'ZodEnum')) { + return { + type: 'string', + enum: options.reduce((acc, x) => [...acc, ...x._def.values.filter((x) => !acc.includes(x))], []), + }; + } + return asAnyOf(def, refs); +} +const asAnyOf = (def, refs) => { + const anyOf = (def.options instanceof Map ? Array.from(def.options.values()) : def.options) + .map((x, i) => parseDef(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', `${i}`], + })) + .filter((x) => !!x && (!refs.strictUnions || (typeof x === 'object' && Object.keys(x).length > 0))); + return anyOf.length ? { anyOf } : undefined; +}; +//# sourceMappingURL=union.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.mjs.map new file mode 100644 index 0000000..1cbb41b --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/union.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"union.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/union.ts"],"names":[],"mappings":"OACO,EAAmB,QAAQ,EAAE;AAGpC,MAAM,CAAC,MAAM,iBAAiB,GAAG;IAC/B,SAAS,EAAE,QAAQ;IACnB,SAAS,EAAE,QAAQ;IACnB,SAAS,EAAE,SAAS;IACpB,UAAU,EAAE,SAAS;IACrB,OAAO,EAAE,MAAM;CACP,CAAC;AAmBX,MAAM,UAAU,aAAa,CAC3B,GAAqD,EACrD,IAAU;IAEV,IAAI,IAAI,CAAC,MAAM,KAAK,UAAU;QAAE,OAAO,OAAO,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;IAE1D,MAAM,OAAO,GACX,GAAG,CAAC,OAAO,YAAY,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC;IAE9E,2GAA2G;IAC3G,IACE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,IAAI,iBAAiB,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EACvG;QACA,+FAA+F;QAE/F,MAAM,KAAK,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,KAA6B,EAAE,CAAC,EAAE,EAAE;YAChE,MAAM,IAAI,GAAG,iBAAiB,CAAC,CAAC,CAAC,IAAI,CAAC,QAAwB,CAAC,CAAC,CAAC,oCAAoC;YACrG,OAAO,IAAI,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;QAClE,CAAC,EAAE,EAAE,CAAC,CAAC;QAEP,OAAO;YACL,IAAI,EAAE,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAE;SAC3C,CAAC;KACH;SAAM,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,WAAW,CAAC,EAAE;QACnF,uBAAuB;QAEvB,MAAM,KAAK,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,GAA2B,EAAE,CAA0B,EAAE,EAAE;YACvF,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC;YACjC,QAAQ,IAAI,EAAE;gBACZ,KAAK,QAAQ,CAAC;gBACd,KAAK,QAAQ,CAAC;gBACd,KAAK,SAAS;oBACZ,OAAO,CAAC,GAAG,GAAG,EAAE,IAAI,CAAC,CAAC;gBACxB,KAAK,QAAQ;oBACX,OAAO,CAAC,GAAG,GAAG,EAAE,SAAkB,CAAC,CAAC;gBACtC,KAAK,QAAQ;oBACX,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,KAAK,IAAI;wBAAE,OAAO,CAAC,GAAG,GAAG,EAAE,MAAe,CAAC,CAAC;gBAC9D,KAAK,QAAQ,CAAC;gBACd,KAAK,WAAW,CAAC;gBACjB,KAAK,UAAU,CAAC;gBAChB;oBACE,OAAO,GAAG,CAAC;aACd;QACH,CAAC,EAAE,EAAE,CAAC,CAAC;QAEP,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,EAAE;YACnC,6EAA6E;YAE7E,MAAM,WAAW,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;YAClE,OAAO;gBACL,IAAI,EAAE,WAAW,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAE;gBAC5D,IAAI,EAAE,OAAO,CAAC,MAAM,CAClB,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;oBACT,OAAO,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACnE,CAAC,EACD,EAAmD,CACpD;aACF,CAAC;SACH;KACF;SAAM,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,EAAE;QAC9D,OAAO;YACL,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,OAAO,CAAC,MAAM,CAClB,CAAC,GAAa,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,GAAG,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EACxF,EAAE,CACH;SACF,CAAC;KACH;IAED,OAAO,OAAO,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,MAAM,OAAO,GAAG,CACd,GAAqD,EACrD,IAAU,EACwD,EAAE;IACpE,MAAM,KAAK,GAAI,CAAC,GAAG,CAAC,OAAO,YAAY,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAW;SACnG,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACZ,QAAQ,CAAC,CAAC,CAAC,IAAI,EAAE;QACf,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC;KACpD,CAAC,CACH;SACA,MAAM,CACL,CAAC,CAAC,EAAwB,EAAE,CAC1B,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,CAAC,OAAO,CAAC,KAAK,QAAQ,IAAI,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CACtF,CAAC;IAEJ,OAAO,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;AAC9C,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.d.ts new file mode 100644 index 0000000..0afe52f --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.d.ts @@ -0,0 +1,3 @@ +export type JsonSchema7UnknownType = {}; +export declare function parseUnknownDef(): JsonSchema7UnknownType; +//# sourceMappingURL=unknown.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.d.ts.map new file mode 100644 index 0000000..80253f3 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"unknown.d.ts","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/unknown.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,sBAAsB,GAAG,EAAE,CAAC;AAExC,wBAAgB,eAAe,IAAI,sBAAsB,CAExD"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.js b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.js new file mode 100644 index 0000000..56acd24 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseUnknownDef = void 0; +function parseUnknownDef() { + return {}; +} +exports.parseUnknownDef = parseUnknownDef; +//# sourceMappingURL=unknown.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.js.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.js.map new file mode 100644 index 0000000..2a6f672 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.js.map @@ -0,0 +1 @@ +{"version":3,"file":"unknown.js","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/unknown.ts"],"names":[],"mappings":";;;AAEA,SAAgB,eAAe;IAC7B,OAAO,EAAE,CAAC;AACZ,CAAC;AAFD,0CAEC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.mjs b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.mjs new file mode 100644 index 0000000..6146dee --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.mjs @@ -0,0 +1,4 @@ +export function parseUnknownDef() { + return {}; +} +//# sourceMappingURL=unknown.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.mjs.map new file mode 100644 index 0000000..557e5fa --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"unknown.mjs","sourceRoot":"","sources":["../../../src/_vendor/zod-to-json-schema/parsers/unknown.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,eAAe;IAC7B,OAAO,EAAE,CAAC;AACZ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/util.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/util.d.ts new file mode 100644 index 0000000..e144363 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/util.d.ts @@ -0,0 +1,4 @@ +import type { ZodSchema, ZodTypeDef } from 'zod'; +export declare const zodDef: (zodSchema: ZodSchema | ZodTypeDef) => ZodTypeDef; +export declare function isEmptyObj(obj: Object | null | undefined): boolean; +//# sourceMappingURL=util.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/util.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/util.d.ts.map new file mode 100644 index 0000000..d1282a6 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/util.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"util.d.ts","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/util.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,KAAK,CAAC;AAEjD,eAAO,MAAM,MAAM,cAAe,SAAS,GAAG,UAAU,KAAG,UAE1D,CAAC;AAEF,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,GAAG,OAAO,CAIlE"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/util.js b/node_modules/openai/_vendor/zod-to-json-schema/util.js new file mode 100644 index 0000000..ce7369d --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/util.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyObj = exports.zodDef = void 0; +const zodDef = (zodSchema) => { + return '_def' in zodSchema ? zodSchema._def : zodSchema; +}; +exports.zodDef = zodDef; +function isEmptyObj(obj) { + if (!obj) + return true; + for (const _k in obj) + return false; + return true; +} +exports.isEmptyObj = isEmptyObj; +//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/util.js.map b/node_modules/openai/_vendor/zod-to-json-schema/util.js.map new file mode 100644 index 0000000..7cd1e60 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/util.js.map @@ -0,0 +1 @@ +{"version":3,"file":"util.js","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/util.ts"],"names":[],"mappings":";;;AAEO,MAAM,MAAM,GAAG,CAAC,SAAiC,EAAc,EAAE;IACtE,OAAO,MAAM,IAAI,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;AAC1D,CAAC,CAAC;AAFW,QAAA,MAAM,UAEjB;AAEF,SAAgB,UAAU,CAAC,GAA8B;IACvD,IAAI,CAAC,GAAG;QAAE,OAAO,IAAI,CAAC;IACtB,KAAK,MAAM,EAAE,IAAI,GAAG;QAAE,OAAO,KAAK,CAAC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC;AAJD,gCAIC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/util.mjs b/node_modules/openai/_vendor/zod-to-json-schema/util.mjs new file mode 100644 index 0000000..4e6ef56 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/util.mjs @@ -0,0 +1,11 @@ +export const zodDef = (zodSchema) => { + return '_def' in zodSchema ? zodSchema._def : zodSchema; +}; +export function isEmptyObj(obj) { + if (!obj) + return true; + for (const _k in obj) + return false; + return true; +} +//# sourceMappingURL=util.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/util.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/util.mjs.map new file mode 100644 index 0000000..16c49e1 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/util.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"util.mjs","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/util.ts"],"names":[],"mappings":"AAEA,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,SAAiC,EAAc,EAAE;IACtE,OAAO,MAAM,IAAI,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;AAC1D,CAAC,CAAC;AAEF,MAAM,UAAU,UAAU,CAAC,GAA8B;IACvD,IAAI,CAAC,GAAG;QAAE,OAAO,IAAI,CAAC;IACtB,KAAK,MAAM,EAAE,IAAI,GAAG;QAAE,OAAO,KAAK,CAAC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.d.ts b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.d.ts new file mode 100644 index 0000000..147b1ff --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.d.ts @@ -0,0 +1,11 @@ +import { ZodSchema } from 'zod'; +import { Options, Targets } from "./Options.js"; +import { JsonSchema7Type } from "./parseDef.js"; +declare const zodToJsonSchema: (schema: ZodSchema, options?: string | Partial> | undefined) => (Target extends "jsonSchema7" ? JsonSchema7Type : object) & { + $schema?: string; + definitions?: { + [key: string]: Target extends "jsonSchema7" ? JsonSchema7Type : Target extends "jsonSchema2019-09" ? JsonSchema7Type : object; + }; +}; +export { zodToJsonSchema }; +//# sourceMappingURL=zodToJsonSchema.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.d.ts.map b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.d.ts.map new file mode 100644 index 0000000..d3cd9e0 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"zodToJsonSchema.d.ts","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/zodToJsonSchema.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,KAAK,CAAC;AAChC,OAAO,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAY,MAAM,YAAY,CAAC;AAIvD,QAAA,MAAM,eAAe,mDACX,UAAU,GAAG,CAAC;cAGZ,MAAM;;;;CA2GjB,CAAC;AAEF,OAAO,EAAE,eAAe,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.js b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.js new file mode 100644 index 0000000..bda18e8 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.js @@ -0,0 +1,82 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.zodToJsonSchema = void 0; +const parseDef_1 = require("./parseDef.js"); +const Refs_1 = require("./Refs.js"); +const util_1 = require("./util.js"); +const zodToJsonSchema = (schema, options) => { + const refs = (0, Refs_1.getRefs)(options); + const name = typeof options === 'string' ? options + : options?.nameStrategy === 'title' ? undefined + : options?.name; + const main = (0, parseDef_1.parseDef)(schema._def, name === undefined ? refs : ({ + ...refs, + currentPath: [...refs.basePath, refs.definitionPath, name], + }), false) ?? {}; + const title = typeof options === 'object' && options.name !== undefined && options.nameStrategy === 'title' ? + options.name + : undefined; + if (title !== undefined) { + main.title = title; + } + const definitions = (() => { + if ((0, util_1.isEmptyObj)(refs.definitions)) { + return undefined; + } + const definitions = {}; + const processedDefinitions = new Set(); + // the call to `parseDef()` here might itself add more entries to `.definitions` + // so we need to continually evaluate definitions until we've resolved all of them + // + // we have a generous iteration limit here to avoid blowing up the stack if there + // are any bugs that would otherwise result in us iterating indefinitely + for (let i = 0; i < 500; i++) { + const newDefinitions = Object.entries(refs.definitions).filter(([key]) => !processedDefinitions.has(key)); + if (newDefinitions.length === 0) + break; + for (const [key, schema] of newDefinitions) { + definitions[key] = + (0, parseDef_1.parseDef)((0, util_1.zodDef)(schema), { ...refs, currentPath: [...refs.basePath, refs.definitionPath, key] }, true) ?? {}; + processedDefinitions.add(key); + } + } + return definitions; + })(); + const combined = name === undefined ? + definitions ? + { + ...main, + [refs.definitionPath]: definitions, + } + : main + : refs.nameStrategy === 'duplicate-ref' ? + { + ...main, + ...(definitions || refs.seenRefs.size ? + { + [refs.definitionPath]: { + ...definitions, + // only actually duplicate the schema definition if it was ever referenced + // otherwise the duplication is completely pointless + ...(refs.seenRefs.size ? { [name]: main } : undefined), + }, + } + : undefined), + } + : { + $ref: [...(refs.$refStrategy === 'relative' ? [] : refs.basePath), refs.definitionPath, name].join('/'), + [refs.definitionPath]: { + ...definitions, + [name]: main, + }, + }; + if (refs.target === 'jsonSchema7') { + combined.$schema = 'http://json-schema.org/draft-07/schema#'; + } + else if (refs.target === 'jsonSchema2019-09') { + combined.$schema = 'https://json-schema.org/draft/2019-09/schema#'; + } + return combined; +}; +exports.zodToJsonSchema = zodToJsonSchema; +//# sourceMappingURL=zodToJsonSchema.js.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.js.map b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.js.map new file mode 100644 index 0000000..f6827cf --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.js.map @@ -0,0 +1 @@ +{"version":3,"file":"zodToJsonSchema.js","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/zodToJsonSchema.ts"],"names":[],"mappings":";;;AAEA,4CAAuD;AACvD,oCAAiC;AACjC,oCAA4C;AAE5C,MAAM,eAAe,GAAG,CACtB,MAAsB,EACtB,OAA2C,EAQ3C,EAAE;IACF,MAAM,IAAI,GAAG,IAAA,cAAO,EAAC,OAAO,CAAC,CAAC;IAE9B,MAAM,IAAI,GACR,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO;QACrC,CAAC,CAAC,OAAO,EAAE,YAAY,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS;YAC/C,CAAC,CAAC,OAAO,EAAE,IAAI,CAAC;IAElB,MAAM,IAAI,GACR,IAAA,mBAAQ,EACN,MAAM,CAAC,IAAI,EACX,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAC1B;QACE,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,cAAc,EAAE,IAAI,CAAC;KAC3D,CACF,EACD,KAAK,CACN,IAAI,EAAE,CAAC;IAEV,MAAM,KAAK,GACT,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,CAAC,YAAY,KAAK,OAAO,CAAC,CAAC;QAC7F,OAAO,CAAC,IAAI;QACd,CAAC,CAAC,SAAS,CAAC;IAEd,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;KACpB;IAED,MAAM,WAAW,GAAG,CAAC,GAAG,EAAE;QACxB,IAAI,IAAA,iBAAU,EAAC,IAAI,CAAC,WAAW,CAAC,EAAE;YAChC,OAAO,SAAS,CAAC;SAClB;QAED,MAAM,WAAW,GAAwB,EAAE,CAAC;QAC5C,MAAM,oBAAoB,GAAG,IAAI,GAAG,EAAE,CAAC;QAEvC,gFAAgF;QAChF,kFAAkF;QAClF,EAAE;QACF,iFAAiF;QACjF,wEAAwE;QACxE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;YAC5B,MAAM,cAAc,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,MAAM,CAC5D,CAAC,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC,CAC1C,CAAC;YACF,IAAI,cAAc,CAAC,MAAM,KAAK,CAAC;gBAAE,MAAM;YAEvC,KAAK,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,IAAI,cAAc,EAAE;gBAC1C,WAAW,CAAC,GAAG,CAAC;oBACd,IAAA,mBAAQ,EACN,IAAA,aAAM,EAAC,MAAM,CAAC,EACd,EAAE,GAAG,IAAI,EAAE,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,cAAc,EAAE,GAAG,CAAC,EAAE,EACtE,IAAI,CACL,IAAI,EAAE,CAAC;gBACV,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;aAC/B;SACF;QAED,OAAO,WAAW,CAAC;IACrB,CAAC,CAAC,EAAE,CAAC;IAEL,MAAM,QAAQ,GACZ,IAAI,KAAK,SAAS,CAAC,CAAC;QAClB,WAAW,CAAC,CAAC;YACX;gBACE,GAAG,IAAI;gBACP,CAAC,IAAI,CAAC,cAAc,CAAC,EAAE,WAAW;aACnC;YACH,CAAC,CAAC,IAAI;QACR,CAAC,CAAC,IAAI,CAAC,YAAY,KAAK,eAAe,CAAC,CAAC;YACvC;gBACE,GAAG,IAAI;gBACP,GAAG,CAAC,WAAW,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACrC;wBACE,CAAC,IAAI,CAAC,cAAc,CAAC,EAAE;4BACrB,GAAG,WAAW;4BACd,0EAA0E;4BAC1E,oDAAoD;4BACpD,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;yBACvD;qBACF;oBACH,CAAC,CAAC,SAAS,CAAC;aACb;YACH,CAAC,CAAC;gBACE,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,YAAY,KAAK,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,IAAI,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC,IAAI,CAChG,GAAG,CACJ;gBACD,CAAC,IAAI,CAAC,cAAc,CAAC,EAAE;oBACrB,GAAG,WAAW;oBACd,CAAC,IAAI,CAAC,EAAE,IAAI;iBACb;aACF,CAAC;IAEN,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;QACjC,QAAQ,CAAC,OAAO,GAAG,yCAAyC,CAAC;KAC9D;SAAM,IAAI,IAAI,CAAC,MAAM,KAAK,mBAAmB,EAAE;QAC9C,QAAQ,CAAC,OAAO,GAAG,+CAA+C,CAAC;KACpE;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC,CAAC;AAEO,0CAAe"} \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.mjs b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.mjs new file mode 100644 index 0000000..bc5b160 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.mjs @@ -0,0 +1,79 @@ +import { parseDef } from "./parseDef.mjs"; +import { getRefs } from "./Refs.mjs"; +import { zodDef, isEmptyObj } from "./util.mjs"; +const zodToJsonSchema = (schema, options) => { + const refs = getRefs(options); + const name = typeof options === 'string' ? options + : options?.nameStrategy === 'title' ? undefined + : options?.name; + const main = parseDef(schema._def, name === undefined ? refs : ({ + ...refs, + currentPath: [...refs.basePath, refs.definitionPath, name], + }), false) ?? {}; + const title = typeof options === 'object' && options.name !== undefined && options.nameStrategy === 'title' ? + options.name + : undefined; + if (title !== undefined) { + main.title = title; + } + const definitions = (() => { + if (isEmptyObj(refs.definitions)) { + return undefined; + } + const definitions = {}; + const processedDefinitions = new Set(); + // the call to `parseDef()` here might itself add more entries to `.definitions` + // so we need to continually evaluate definitions until we've resolved all of them + // + // we have a generous iteration limit here to avoid blowing up the stack if there + // are any bugs that would otherwise result in us iterating indefinitely + for (let i = 0; i < 500; i++) { + const newDefinitions = Object.entries(refs.definitions).filter(([key]) => !processedDefinitions.has(key)); + if (newDefinitions.length === 0) + break; + for (const [key, schema] of newDefinitions) { + definitions[key] = + parseDef(zodDef(schema), { ...refs, currentPath: [...refs.basePath, refs.definitionPath, key] }, true) ?? {}; + processedDefinitions.add(key); + } + } + return definitions; + })(); + const combined = name === undefined ? + definitions ? + { + ...main, + [refs.definitionPath]: definitions, + } + : main + : refs.nameStrategy === 'duplicate-ref' ? + { + ...main, + ...(definitions || refs.seenRefs.size ? + { + [refs.definitionPath]: { + ...definitions, + // only actually duplicate the schema definition if it was ever referenced + // otherwise the duplication is completely pointless + ...(refs.seenRefs.size ? { [name]: main } : undefined), + }, + } + : undefined), + } + : { + $ref: [...(refs.$refStrategy === 'relative' ? [] : refs.basePath), refs.definitionPath, name].join('/'), + [refs.definitionPath]: { + ...definitions, + [name]: main, + }, + }; + if (refs.target === 'jsonSchema7') { + combined.$schema = 'http://json-schema.org/draft-07/schema#'; + } + else if (refs.target === 'jsonSchema2019-09') { + combined.$schema = 'https://json-schema.org/draft/2019-09/schema#'; + } + return combined; +}; +export { zodToJsonSchema }; +//# sourceMappingURL=zodToJsonSchema.mjs.map \ No newline at end of file diff --git a/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.mjs.map b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.mjs.map new file mode 100644 index 0000000..d8b1785 --- /dev/null +++ b/node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"zodToJsonSchema.mjs","sourceRoot":"","sources":["../../src/_vendor/zod-to-json-schema/zodToJsonSchema.ts"],"names":[],"mappings":"OAEO,EAAmB,QAAQ,EAAE;OAC7B,EAAE,OAAO,EAAE;OACX,EAAE,MAAM,EAAE,UAAU,EAAE;AAE7B,MAAM,eAAe,GAAG,CACtB,MAAsB,EACtB,OAA2C,EAQ3C,EAAE;IACF,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IAE9B,MAAM,IAAI,GACR,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO;QACrC,CAAC,CAAC,OAAO,EAAE,YAAY,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS;YAC/C,CAAC,CAAC,OAAO,EAAE,IAAI,CAAC;IAElB,MAAM,IAAI,GACR,QAAQ,CACN,MAAM,CAAC,IAAI,EACX,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAC1B;QACE,GAAG,IAAI;QACP,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,cAAc,EAAE,IAAI,CAAC;KAC3D,CACF,EACD,KAAK,CACN,IAAI,EAAE,CAAC;IAEV,MAAM,KAAK,GACT,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,CAAC,YAAY,KAAK,OAAO,CAAC,CAAC;QAC7F,OAAO,CAAC,IAAI;QACd,CAAC,CAAC,SAAS,CAAC;IAEd,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;KACpB;IAED,MAAM,WAAW,GAAG,CAAC,GAAG,EAAE;QACxB,IAAI,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;YAChC,OAAO,SAAS,CAAC;SAClB;QAED,MAAM,WAAW,GAAwB,EAAE,CAAC;QAC5C,MAAM,oBAAoB,GAAG,IAAI,GAAG,EAAE,CAAC;QAEvC,gFAAgF;QAChF,kFAAkF;QAClF,EAAE;QACF,iFAAiF;QACjF,wEAAwE;QACxE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;YAC5B,MAAM,cAAc,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,MAAM,CAC5D,CAAC,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC,CAC1C,CAAC;YACF,IAAI,cAAc,CAAC,MAAM,KAAK,CAAC;gBAAE,MAAM;YAEvC,KAAK,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,IAAI,cAAc,EAAE;gBAC1C,WAAW,CAAC,GAAG,CAAC;oBACd,QAAQ,CACN,MAAM,CAAC,MAAM,CAAC,EACd,EAAE,GAAG,IAAI,EAAE,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,cAAc,EAAE,GAAG,CAAC,EAAE,EACtE,IAAI,CACL,IAAI,EAAE,CAAC;gBACV,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;aAC/B;SACF;QAED,OAAO,WAAW,CAAC;IACrB,CAAC,CAAC,EAAE,CAAC;IAEL,MAAM,QAAQ,GACZ,IAAI,KAAK,SAAS,CAAC,CAAC;QAClB,WAAW,CAAC,CAAC;YACX;gBACE,GAAG,IAAI;gBACP,CAAC,IAAI,CAAC,cAAc,CAAC,EAAE,WAAW;aACnC;YACH,CAAC,CAAC,IAAI;QACR,CAAC,CAAC,IAAI,CAAC,YAAY,KAAK,eAAe,CAAC,CAAC;YACvC;gBACE,GAAG,IAAI;gBACP,GAAG,CAAC,WAAW,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACrC;wBACE,CAAC,IAAI,CAAC,cAAc,CAAC,EAAE;4BACrB,GAAG,WAAW;4BACd,0EAA0E;4BAC1E,oDAAoD;4BACpD,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;yBACvD;qBACF;oBACH,CAAC,CAAC,SAAS,CAAC;aACb;YACH,CAAC,CAAC;gBACE,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,YAAY,KAAK,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,IAAI,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC,IAAI,CAChG,GAAG,CACJ;gBACD,CAAC,IAAI,CAAC,cAAc,CAAC,EAAE;oBACrB,GAAG,WAAW;oBACd,CAAC,IAAI,CAAC,EAAE,IAAI;iBACb;aACF,CAAC;IAEN,IAAI,IAAI,CAAC,MAAM,KAAK,aAAa,EAAE;QACjC,QAAQ,CAAC,OAAO,GAAG,yCAAyC,CAAC;KAC9D;SAAM,IAAI,IAAI,CAAC,MAAM,KAAK,mBAAmB,EAAE;QAC9C,QAAQ,CAAC,OAAO,GAAG,+CAA+C,CAAC;KACpE;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC,CAAC;AAEF,OAAO,EAAE,eAAe,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/bin/cli b/node_modules/openai/bin/cli new file mode 100755 index 0000000..00275ac --- /dev/null +++ b/node_modules/openai/bin/cli @@ -0,0 +1,49 @@ +#!/usr/bin/env node + +const { spawnSync } = require('child_process'); + +const commands = { + migrate: { + description: 'Run migrations to update from openai v3 to v4', + fn: () => { + console.log('This automatic code migration is provided by grit.io'); + console.log('Visit https://app.grit.io/studio?preset=openai_v4 for more details.') + + const result = spawnSync( + 'npx', + ['-y', '@getgrit/launcher', 'apply', 'openai_v4', ...process.argv.slice(3)], + { stdio: 'inherit' }, + ); + if (result.status !== 0) { + process.exit(result.status); + } + } + } +} + +function exitWithHelp() { + console.log("Usage: $0 "); + console.log(); + console.log('Subcommands:'); + + for (const [name, info] of Object.entries(commands)) { + console.log(` ${name} ${info.description}`); + } + + console.log(); + process.exit(1); +} + +if (process.argv.length < 3) { + exitWithHelp(); +} + +const commandName = process.argv[2]; + +const command = commands[commandName]; +if (!command) { + console.log(`Unknown subcommand ${commandName}.`); + exitWithHelp(); +} + +command.fn(); diff --git a/node_modules/openai/core.d.ts b/node_modules/openai/core.d.ts new file mode 100644 index 0000000..5de371f --- /dev/null +++ b/node_modules/openai/core.d.ts @@ -0,0 +1,249 @@ + + +import { Stream } from "./streaming.js"; +import { APIError } from "./error.js"; +import { type Readable, type Agent, type RequestInfo, type RequestInit, type Response, type HeadersInit } from "./_shims/index.js"; +export { type Response }; +import { BlobLike } from "./uploads.js"; +export { maybeMultipartFormRequestOptions, multipartFormRequestOptions, createForm, type Uploadable, } from "./uploads.js"; +export type Fetch = (url: RequestInfo, init?: RequestInit) => Promise; +type PromiseOrValue = T | Promise; +type APIResponseProps = { + response: Response; + options: FinalRequestOptions; + controller: AbortController; +}; +type WithRequestID = T extends Array | Response | AbstractPage ? T : T extends Record ? T & { + _request_id?: string | null; +} : T; +/** + * A subclass of `Promise` providing additional helper methods + * for interacting with the SDK. + */ +export declare class APIPromise extends Promise> { + private responsePromise; + private parseResponse; + private parsedPromise; + constructor(responsePromise: Promise, parseResponse?: (props: APIResponseProps) => PromiseOrValue>); + _thenUnwrap(transform: (data: T, props: APIResponseProps) => U): APIPromise; + /** + * Gets the raw `Response` instance instead of parsing the response + * data. + * + * If you want to parse the response body but still get the `Response` + * instance, you can use {@link withResponse()}. + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + asResponse(): Promise; + /** + * Gets the parsed response data, the raw `Response` instance and the ID of the request, + * returned via the X-Request-ID header which is useful for debugging requests and reporting + * issues to OpenAI. + * + * If you just want to get the raw `Response` instance without parsing it, + * you can use {@link asResponse()}. + * + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + withResponse(): Promise<{ + data: T; + response: Response; + request_id: string | null | undefined; + }>; + private parse; + then, TResult2 = never>(onfulfilled?: ((value: WithRequestID) => TResult1 | PromiseLike) | undefined | null, onrejected?: ((reason: any) => TResult2 | PromiseLike) | undefined | null): Promise; + catch(onrejected?: ((reason: any) => TResult | PromiseLike) | undefined | null): Promise | TResult>; + finally(onfinally?: (() => void) | undefined | null): Promise>; +} +export declare abstract class APIClient { + baseURL: string; + maxRetries: number; + timeout: number; + httpAgent: Agent | undefined; + private fetch; + protected idempotencyHeader?: string; + constructor({ baseURL, maxRetries, timeout, // 10 minutes + httpAgent, fetch: overriddenFetch, }: { + baseURL: string; + maxRetries?: number | undefined; + timeout: number | undefined; + httpAgent: Agent | undefined; + fetch: Fetch | undefined; + }); + protected authHeaders(opts: FinalRequestOptions): Headers; + /** + * Override this to add your own default headers, for example: + * + * { + * ...super.defaultHeaders(), + * Authorization: 'Bearer 123', + * } + */ + protected defaultHeaders(opts: FinalRequestOptions): Headers; + protected abstract defaultQuery(): DefaultQuery | undefined; + /** + * Override this to add your own headers validation: + */ + protected validateHeaders(headers: Headers, customHeaders: Headers): void; + protected defaultIdempotencyKey(): string; + get(path: string, opts?: PromiseOrValue>): APIPromise; + post(path: string, opts?: PromiseOrValue>): APIPromise; + patch(path: string, opts?: PromiseOrValue>): APIPromise; + put(path: string, opts?: PromiseOrValue>): APIPromise; + delete(path: string, opts?: PromiseOrValue>): APIPromise; + private methodRequest; + getAPIList = AbstractPage>(path: string, Page: new (...args: any[]) => PageClass, opts?: RequestOptions): PagePromise; + private calculateContentLength; + buildRequest(options: FinalRequestOptions, { retryCount }?: { + retryCount?: number; + }): { + req: RequestInit; + url: string; + timeout: number; + }; + private buildHeaders; + /** + * Used as a callback for mutating the given `FinalRequestOptions` object. + */ + protected prepareOptions(options: FinalRequestOptions): Promise; + /** + * Used as a callback for mutating the given `RequestInit` object. + * + * This is useful for cases where you want to add certain headers based off of + * the request properties, e.g. `method` or `url`. + */ + protected prepareRequest(request: RequestInit, { url, options }: { + url: string; + options: FinalRequestOptions; + }): Promise; + protected parseHeaders(headers: HeadersInit | null | undefined): Record; + protected makeStatusError(status: number | undefined, error: Object | undefined, message: string | undefined, headers: Headers | undefined): APIError; + request(options: PromiseOrValue>, remainingRetries?: number | null): APIPromise; + private makeRequest; + requestAPIList = AbstractPage>(Page: new (...args: ConstructorParameters) => PageClass, options: FinalRequestOptions): PagePromise; + buildURL(path: string, query: Req | null | undefined): string; + protected stringifyQuery(query: Record): string; + fetchWithTimeout(url: RequestInfo, init: RequestInit | undefined, ms: number, controller: AbortController): Promise; + private shouldRetry; + private retryRequest; + private calculateDefaultRetryTimeoutMillis; + private getUserAgent; +} +export type PageInfo = { + url: URL; +} | { + params: Record | null; +}; +export declare abstract class AbstractPage implements AsyncIterable { + #private; + protected options: FinalRequestOptions; + protected response: Response; + protected body: unknown; + constructor(client: APIClient, response: Response, body: unknown, options: FinalRequestOptions); + /** + * @deprecated Use nextPageInfo instead + */ + abstract nextPageParams(): Partial> | null; + abstract nextPageInfo(): PageInfo | null; + abstract getPaginatedItems(): Item[]; + hasNextPage(): boolean; + getNextPage(): Promise; + iterPages(): AsyncGenerator; + [Symbol.asyncIterator](): AsyncGenerator; +} +/** + * This subclass of Promise will resolve to an instantiated Page once the request completes. + * + * It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ +export declare class PagePromise, Item = ReturnType[number]> extends APIPromise implements AsyncIterable { + constructor(client: APIClient, request: Promise, Page: new (...args: ConstructorParameters) => PageClass); + /** + * Allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ + [Symbol.asyncIterator](): AsyncGenerator; +} +export declare const createResponseHeaders: (headers: Awaited>['headers']) => Record; +type HTTPMethod = 'get' | 'post' | 'put' | 'patch' | 'delete'; +export type RequestClient = { + fetch: Fetch; +}; +export type Headers = Record; +export type DefaultQuery = Record; +export type KeysEnum = { + [P in keyof Required]: true; +}; +export type RequestOptions | Readable | BlobLike | ArrayBufferView | ArrayBuffer> = { + method?: HTTPMethod; + path?: string; + query?: Req | undefined; + body?: Req | null | undefined; + headers?: Headers | undefined; + maxRetries?: number; + stream?: boolean | undefined; + timeout?: number; + httpAgent?: Agent; + signal?: AbortSignal | undefined | null; + idempotencyKey?: string; + __binaryRequest?: boolean | undefined; + __binaryResponse?: boolean | undefined; + __streamClass?: typeof Stream; +}; +export declare const isRequestOptions: (obj: unknown) => obj is RequestOptions; +export type FinalRequestOptions | Readable | DataView> = RequestOptions & { + method: HTTPMethod; + path: string; +}; +export declare const safeJSON: (text: string) => any; +export declare const sleep: (ms: number) => Promise; +export declare const castToError: (err: any) => Error; +export declare const ensurePresent: (value: T | null | undefined) => T; +/** + * Read an environment variable. + * + * Trims beginning and trailing whitespace. + * + * Will return undefined if the environment variable doesn't exist or cannot be accessed. + */ +export declare const readEnv: (env: string) => string | undefined; +export declare const coerceInteger: (value: unknown) => number; +export declare const coerceFloat: (value: unknown) => number; +export declare const coerceBoolean: (value: unknown) => boolean; +export declare const maybeCoerceInteger: (value: unknown) => number | undefined; +export declare const maybeCoerceFloat: (value: unknown) => number | undefined; +export declare const maybeCoerceBoolean: (value: unknown) => boolean | undefined; +export declare function isEmptyObj(obj: Object | null | undefined): boolean; +export declare function hasOwn(obj: Object, key: string): boolean; +export declare function debug(action: string, ...args: any[]): void; +export declare const isRunningInBrowser: () => boolean; +export interface HeadersProtocol { + get: (header: string) => string | null | undefined; +} +export type HeadersLike = Record | HeadersProtocol; +export declare const isHeadersProtocol: (headers: any) => headers is HeadersProtocol; +export declare const getRequiredHeader: (headers: HeadersLike | Headers, header: string) => string; +export declare const getHeader: (headers: HeadersLike | Headers, header: string) => string | undefined; +/** + * Encodes a string to Base64 format. + */ +export declare const toBase64: (str: string | null | undefined) => string; +export declare function isObj(obj: unknown): obj is Record; +//# sourceMappingURL=core.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/core.d.ts.map b/node_modules/openai/core.d.ts.map new file mode 100644 index 0000000..48302ca --- /dev/null +++ b/node_modules/openai/core.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"core.d.ts","sourceRoot":"","sources":["src/core.ts"],"names":[],"mappings":";;AACA,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,OAAO,EAEL,QAAQ,EAIT,MAAM,SAAS,CAAC;AACjB,OAAO,EAEL,KAAK,QAAQ,EAEb,KAAK,KAAK,EAEV,KAAK,WAAW,EAChB,KAAK,WAAW,EAChB,KAAK,QAAQ,EACb,KAAK,WAAW,EACjB,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAE,KAAK,QAAQ,EAAE,CAAC;AACzB,OAAO,EAAE,QAAQ,EAA+B,MAAM,WAAW,CAAC;AAClE,OAAO,EACL,gCAAgC,EAChC,2BAA2B,EAC3B,UAAU,EACV,KAAK,UAAU,GAChB,MAAM,WAAW,CAAC;AAEnB,MAAM,MAAM,KAAK,GAAG,CAAC,GAAG,EAAE,WAAW,EAAE,IAAI,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC;AAEhF,KAAK,cAAc,CAAC,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;AAExC,KAAK,gBAAgB,GAAG;IACtB,QAAQ,EAAE,QAAQ,CAAC;IACnB,OAAO,EAAE,mBAAmB,CAAC;IAC7B,UAAU,EAAE,eAAe,CAAC;CAC7B,CAAC;AA4CF,KAAK,aAAa,CAAC,CAAC,IAClB,CAAC,SAAS,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC,GAAG,CAAC,GACrD,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,CAAC,GAAG;IAAE,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAA;CAAE,GACnE,CAAC,CAAC;AAaN;;;GAGG;AACH,qBAAa,UAAU,CAAC,CAAC,CAAE,SAAQ,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAIxD,OAAO,CAAC,eAAe;IACvB,OAAO,CAAC,aAAa;IAJvB,OAAO,CAAC,aAAa,CAAwC;gBAGnD,eAAe,EAAE,OAAO,CAAC,gBAAgB,CAAC,EAC1C,aAAa,GAAE,CACrB,KAAK,EAAE,gBAAgB,KACpB,cAAc,CAAC,aAAa,CAAC,CAAC,CAAC,CAAwB;IAU9D,WAAW,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,IAAI,EAAE,CAAC,EAAE,KAAK,EAAE,gBAAgB,KAAK,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;IAMjF;;;;;;;;;;;;OAYG;IACH,UAAU,IAAI,OAAO,CAAC,QAAQ,CAAC;IAI/B;;;;;;;;;;;;;;OAcG;IACG,YAAY,IAAI,OAAO,CAAC;QAAE,IAAI,EAAE,CAAC,CAAC;QAAC,QAAQ,EAAE,QAAQ,CAAC;QAAC,UAAU,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAA;KAAE,CAAC;IAKrG,OAAO,CAAC,KAAK;IAOJ,IAAI,CAAC,QAAQ,GAAG,aAAa,CAAC,CAAC,CAAC,EAAE,QAAQ,GAAG,KAAK,EACzD,WAAW,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC,CAAC,KAAK,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAC,GAAG,SAAS,GAAG,IAAI,EAChG,UAAU,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,GAAG,KAAK,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAC,GAAG,SAAS,GAAG,IAAI,GAClF,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAItB,KAAK,CAAC,OAAO,GAAG,KAAK,EAC5B,UAAU,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,GAAG,KAAK,OAAO,GAAG,WAAW,CAAC,OAAO,CAAC,CAAC,GAAG,SAAS,GAAG,IAAI,GAChF,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC;IAI7B,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;CAGzF;AAED,8BAAsB,SAAS;IAC7B,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,EAAE,KAAK,GAAG,SAAS,CAAC;IAE7B,OAAO,CAAC,KAAK,CAAQ;IACrB,SAAS,CAAC,iBAAiB,CAAC,EAAE,MAAM,CAAC;gBAEzB,EACV,OAAO,EACP,UAAc,EACd,OAAgB,EAAE,aAAa;IAC/B,SAAS,EACT,KAAK,EAAE,eAAe,GACvB,EAAE;QACD,OAAO,EAAE,MAAM,CAAC;QAChB,UAAU,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QAChC,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;QAC5B,SAAS,EAAE,KAAK,GAAG,SAAS,CAAC;QAC7B,KAAK,EAAE,KAAK,GAAG,SAAS,CAAC;KAC1B;IASD,SAAS,CAAC,WAAW,CAAC,IAAI,EAAE,mBAAmB,GAAG,OAAO;IAIzD;;;;;;;OAOG;IACH,SAAS,CAAC,cAAc,CAAC,IAAI,EAAE,mBAAmB,GAAG,OAAO;IAU5D,SAAS,CAAC,QAAQ,CAAC,YAAY,IAAI,YAAY,GAAG,SAAS;IAE3D;;OAEG;IACH,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,OAAO;IAElE,SAAS,CAAC,qBAAqB,IAAI,MAAM;IAIzC,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,cAAc,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,CAAC,GAAG,CAAC;IAIxF,IAAI,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,cAAc,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,CAAC,GAAG,CAAC;IAIzF,KAAK,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,cAAc,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,CAAC,GAAG,CAAC;IAI1F,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,cAAc,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,CAAC,GAAG,CAAC;IAIxF,MAAM,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,cAAc,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,CAAC,GAAG,CAAC;IAI3F,OAAO,CAAC,aAAa;IAkBrB,UAAU,CAAC,IAAI,EAAE,SAAS,SAAS,YAAY,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,IAAI,CAAC,EACxE,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,KAAK,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,SAAS,EACvC,IAAI,CAAC,EAAE,cAAc,CAAC,GAAG,CAAC,GACzB,WAAW,CAAC,SAAS,EAAE,IAAI,CAAC;IAI/B,OAAO,CAAC,sBAAsB;IAkB9B,YAAY,CAAC,GAAG,EACd,OAAO,EAAE,mBAAmB,CAAC,GAAG,CAAC,EACjC,EAAE,UAAc,EAAE,GAAE;QAAE,UAAU,CAAC,EAAE,MAAM,CAAA;KAAO,GAC/C;QAAE,GAAG,EAAE,WAAW,CAAC;QAAC,GAAG,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAE;IA+CrD,OAAO,CAAC,YAAY;IAwCpB;;OAEG;cACa,cAAc,CAAC,OAAO,EAAE,mBAAmB,GAAG,OAAO,CAAC,IAAI,CAAC;IAE3E;;;;;OAKG;cACa,cAAc,CAC5B,OAAO,EAAE,WAAW,EACpB,EAAE,GAAG,EAAE,OAAO,EAAE,EAAE;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,mBAAmB,CAAA;KAAE,GAC9D,OAAO,CAAC,IAAI,CAAC;IAEhB,SAAS,CAAC,YAAY,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI,GAAG,SAAS,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IASvF,SAAS,CAAC,eAAe,CACvB,MAAM,EAAE,MAAM,GAAG,SAAS,EAC1B,KAAK,EAAE,MAAM,GAAG,SAAS,EACzB,OAAO,EAAE,MAAM,GAAG,SAAS,EAC3B,OAAO,EAAE,OAAO,GAAG,SAAS,GAC3B,QAAQ;IAIX,OAAO,CAAC,GAAG,EAAE,GAAG,EACd,OAAO,EAAE,cAAc,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC,EACjD,gBAAgB,GAAE,MAAM,GAAG,IAAW,GACrC,UAAU,CAAC,GAAG,CAAC;YAIJ,WAAW;IA6DzB,cAAc,CAAC,IAAI,GAAG,OAAO,EAAE,SAAS,SAAS,YAAY,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,IAAI,CAAC,EACtF,IAAI,EAAE,KAAK,GAAG,IAAI,EAAE,qBAAqB,CAAC,OAAO,YAAY,CAAC,KAAK,SAAS,EAC5E,OAAO,EAAE,mBAAmB,GAC3B,WAAW,CAAC,SAAS,EAAE,IAAI,CAAC;IAK/B,QAAQ,CAAC,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,GAAG,IAAI,GAAG,SAAS,GAAG,MAAM;IAkBlE,SAAS,CAAC,cAAc,CAAC,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM;IAiB1D,gBAAgB,CACpB,GAAG,EAAE,WAAW,EAChB,IAAI,EAAE,WAAW,GAAG,SAAS,EAC7B,EAAE,EAAE,MAAM,EACV,UAAU,EAAE,eAAe,GAC1B,OAAO,CAAC,QAAQ,CAAC;IAcpB,OAAO,CAAC,WAAW;YAuBL,YAAY;IAsC1B,OAAO,CAAC,kCAAkC;IAe1C,OAAO,CAAC,YAAY;CAGrB;AAED,MAAM,MAAM,QAAQ,GAAG;IAAE,GAAG,EAAE,GAAG,CAAA;CAAE,GAAG;IAAE,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,IAAI,CAAA;CAAE,CAAC;AAEjF,8BAAsB,YAAY,CAAC,IAAI,CAAE,YAAW,aAAa,CAAC,IAAI,CAAC;;IAErE,SAAS,CAAC,OAAO,EAAE,mBAAmB,CAAC;IAEvC,SAAS,CAAC,QAAQ,EAAE,QAAQ,CAAC;IAC7B,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC;gBAEZ,MAAM,EAAE,SAAS,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,mBAAmB;IAO9F;;OAEG;IACH,QAAQ,CAAC,cAAc,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,GAAG,IAAI;IAClE,QAAQ,CAAC,YAAY,IAAI,QAAQ,GAAG,IAAI;IAExC,QAAQ,CAAC,iBAAiB,IAAI,IAAI,EAAE;IAEpC,WAAW,IAAI,OAAO;IAMhB,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAqB3B,SAAS,IAAI,cAAc,CAAC,IAAI,CAAC;IAUjC,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,cAAc,CAAC,IAAI,CAAC;CAOtD;AAED;;;;;;;;GAQG;AACH,qBAAa,WAAW,CACpB,SAAS,SAAS,YAAY,CAAC,IAAI,CAAC,EACpC,IAAI,GAAG,UAAU,CAAC,SAAS,CAAC,mBAAmB,CAAC,CAAC,CAAC,MAAM,CAAC,CAE3D,SAAQ,UAAU,CAAC,SAAS,CAC5B,YAAW,aAAa,CAAC,IAAI,CAAC;gBAG5B,MAAM,EAAE,SAAS,EACjB,OAAO,EAAE,OAAO,CAAC,gBAAgB,CAAC,EAClC,IAAI,EAAE,KAAK,GAAG,IAAI,EAAE,qBAAqB,CAAC,OAAO,YAAY,CAAC,KAAK,SAAS;IAc9E;;;;;;OAMG;IACI,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,cAAc,CAAC,IAAI,CAAC;CAMtD;AAED,eAAO,MAAM,qBAAqB,YACvB,QAAQ,WAAW,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,KAC7C,OAAO,MAAM,EAAE,MAAM,CAavB,CAAC;AAEF,KAAK,UAAU,GAAG,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,OAAO,GAAG,QAAQ,CAAC;AAE9D,MAAM,MAAM,aAAa,GAAG;IAAE,KAAK,EAAE,KAAK,CAAA;CAAE,CAAC;AAC7C,MAAM,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC,CAAC;AAChE,MAAM,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC,CAAC;AAC9D,MAAM,MAAM,QAAQ,CAAC,CAAC,IAAI;KAAG,CAAC,IAAI,MAAM,QAAQ,CAAC,CAAC,CAAC,GAAG,IAAI;CAAE,CAAC;AAE7D,MAAM,MAAM,cAAc,CACxB,GAAG,GAAG,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,QAAQ,GAAG,QAAQ,GAAG,eAAe,GAAG,WAAW,IAC3F;IACF,MAAM,CAAC,EAAE,UAAU,CAAC;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,GAAG,GAAG,SAAS,CAAC;IACxB,IAAI,CAAC,EAAE,GAAG,GAAG,IAAI,GAAG,SAAS,CAAC;IAC9B,OAAO,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAE9B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,MAAM,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAC7B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,KAAK,CAAC;IAClB,MAAM,CAAC,EAAE,WAAW,GAAG,SAAS,GAAG,IAAI,CAAC;IACxC,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB,eAAe,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IACtC,gBAAgB,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IACvC,aAAa,CAAC,EAAE,OAAO,MAAM,CAAC;CAC/B,CAAC;AAwBF,eAAO,MAAM,gBAAgB,QAAS,OAAO,mCAO5C,CAAC;AAEF,MAAM,MAAM,mBAAmB,CAAC,GAAG,GAAG,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,QAAQ,GAAG,QAAQ,IAC3F,cAAc,CAAC,GAAG,CAAC,GAAG;IACpB,MAAM,EAAE,UAAU,CAAC;IACnB,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAiKJ,eAAO,MAAM,QAAQ,SAAU,MAAM,QAMpC,CAAC;AAQF,eAAO,MAAM,KAAK,OAAQ,MAAM,qBAAsD,CAAC;AAYvF,eAAO,MAAM,WAAW,QAAS,GAAG,KAAG,KAQtC,CAAC;AAEF,eAAO,MAAM,aAAa,uCAGzB,CAAC;AAEF;;;;;;GAMG;AACH,eAAO,MAAM,OAAO,QAAS,MAAM,KAAG,MAAM,GAAG,SAQ9C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,OAAO,KAAG,MAK9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,OAAO,KAAG,MAK5C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,OAAO,KAAG,OAI9C,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,OAAO,KAAG,MAAM,GAAG,SAK5D,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,OAAO,KAAG,MAAM,GAAG,SAK1D,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,OAAO,KAAG,OAAO,GAAG,SAK7D,CAAC;AAGF,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,GAAG,OAAO,CAIlE;AAGD,wBAAgB,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAExD;AAwBD,wBAAgB,KAAK,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE,QAInD;AAaD,eAAO,MAAM,kBAAkB,eAS9B,CAAC;AAEF,MAAM,WAAW,eAAe;IAC9B,GAAG,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;CACpD;AACD,MAAM,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS,CAAC,GAAG,eAAe,CAAC;AAE1F,eAAO,MAAM,iBAAiB,YAAa,GAAG,+BAE7C,CAAC;AAEF,eAAO,MAAM,iBAAiB,YAAa,WAAW,GAAG,OAAO,UAAU,MAAM,KAAG,MAMlF,CAAC;AAEF,eAAO,MAAM,SAAS,YAAa,WAAW,GAAG,OAAO,UAAU,MAAM,KAAG,MAAM,GAAG,SA2BnF,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,QAAQ,QAAS,MAAM,GAAG,IAAI,GAAG,SAAS,KAAG,MAWzD,CAAC;AAEF,wBAAgB,KAAK,CAAC,GAAG,EAAE,OAAO,GAAG,GAAG,IAAI,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAElE"} \ No newline at end of file diff --git a/node_modules/openai/core.js b/node_modules/openai/core.js new file mode 100644 index 0000000..e50f37b --- /dev/null +++ b/node_modules/openai/core.js @@ -0,0 +1,921 @@ +"use strict"; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _AbstractPage_client; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isObj = exports.toBase64 = exports.getHeader = exports.getRequiredHeader = exports.isHeadersProtocol = exports.isRunningInBrowser = exports.debug = exports.hasOwn = exports.isEmptyObj = exports.maybeCoerceBoolean = exports.maybeCoerceFloat = exports.maybeCoerceInteger = exports.coerceBoolean = exports.coerceFloat = exports.coerceInteger = exports.readEnv = exports.ensurePresent = exports.castToError = exports.sleep = exports.safeJSON = exports.isRequestOptions = exports.createResponseHeaders = exports.PagePromise = exports.AbstractPage = exports.APIClient = exports.APIPromise = exports.createForm = exports.multipartFormRequestOptions = exports.maybeMultipartFormRequestOptions = void 0; +const version_1 = require("./version.js"); +const streaming_1 = require("./streaming.js"); +const error_1 = require("./error.js"); +const index_1 = require("./_shims/index.js"); +const uploads_1 = require("./uploads.js"); +var uploads_2 = require("./uploads.js"); +Object.defineProperty(exports, "maybeMultipartFormRequestOptions", { enumerable: true, get: function () { return uploads_2.maybeMultipartFormRequestOptions; } }); +Object.defineProperty(exports, "multipartFormRequestOptions", { enumerable: true, get: function () { return uploads_2.multipartFormRequestOptions; } }); +Object.defineProperty(exports, "createForm", { enumerable: true, get: function () { return uploads_2.createForm; } }); +async function defaultParseResponse(props) { + const { response } = props; + if (props.options.stream) { + debug('response', response.status, response.url, response.headers, response.body); + // Note: there is an invariant here that isn't represented in the type system + // that if you set `stream: true` the response type must also be `Stream` + if (props.options.__streamClass) { + return props.options.__streamClass.fromSSEResponse(response, props.controller); + } + return streaming_1.Stream.fromSSEResponse(response, props.controller); + } + // fetch refuses to read the body when the status code is 204. + if (response.status === 204) { + return null; + } + if (props.options.__binaryResponse) { + return response; + } + const contentType = response.headers.get('content-type'); + const isJSON = contentType?.includes('application/json') || contentType?.includes('application/vnd.api+json'); + if (isJSON) { + const json = await response.json(); + debug('response', response.status, response.url, response.headers, json); + return _addRequestID(json, response); + } + const text = await response.text(); + debug('response', response.status, response.url, response.headers, text); + // TODO handle blob, arraybuffer, other content types, etc. + return text; +} +function _addRequestID(value, response) { + if (!value || typeof value !== 'object' || Array.isArray(value)) { + return value; + } + return Object.defineProperty(value, '_request_id', { + value: response.headers.get('x-request-id'), + enumerable: false, + }); +} +/** + * A subclass of `Promise` providing additional helper methods + * for interacting with the SDK. + */ +class APIPromise extends Promise { + constructor(responsePromise, parseResponse = defaultParseResponse) { + super((resolve) => { + // this is maybe a bit weird but this has to be a no-op to not implicitly + // parse the response body; instead .then, .catch, .finally are overridden + // to parse the response + resolve(null); + }); + this.responsePromise = responsePromise; + this.parseResponse = parseResponse; + } + _thenUnwrap(transform) { + return new APIPromise(this.responsePromise, async (props) => _addRequestID(transform(await this.parseResponse(props), props), props.response)); + } + /** + * Gets the raw `Response` instance instead of parsing the response + * data. + * + * If you want to parse the response body but still get the `Response` + * instance, you can use {@link withResponse()}. + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + asResponse() { + return this.responsePromise.then((p) => p.response); + } + /** + * Gets the parsed response data, the raw `Response` instance and the ID of the request, + * returned via the X-Request-ID header which is useful for debugging requests and reporting + * issues to OpenAI. + * + * If you just want to get the raw `Response` instance without parsing it, + * you can use {@link asResponse()}. + * + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + async withResponse() { + const [data, response] = await Promise.all([this.parse(), this.asResponse()]); + return { data, response, request_id: response.headers.get('x-request-id') }; + } + parse() { + if (!this.parsedPromise) { + this.parsedPromise = this.responsePromise.then(this.parseResponse); + } + return this.parsedPromise; + } + then(onfulfilled, onrejected) { + return this.parse().then(onfulfilled, onrejected); + } + catch(onrejected) { + return this.parse().catch(onrejected); + } + finally(onfinally) { + return this.parse().finally(onfinally); + } +} +exports.APIPromise = APIPromise; +class APIClient { + constructor({ baseURL, maxRetries = 2, timeout = 600000, // 10 minutes + httpAgent, fetch: overriddenFetch, }) { + this.baseURL = baseURL; + this.maxRetries = validatePositiveInteger('maxRetries', maxRetries); + this.timeout = validatePositiveInteger('timeout', timeout); + this.httpAgent = httpAgent; + this.fetch = overriddenFetch ?? index_1.fetch; + } + authHeaders(opts) { + return {}; + } + /** + * Override this to add your own default headers, for example: + * + * { + * ...super.defaultHeaders(), + * Authorization: 'Bearer 123', + * } + */ + defaultHeaders(opts) { + return { + Accept: 'application/json', + 'Content-Type': 'application/json', + 'User-Agent': this.getUserAgent(), + ...getPlatformHeaders(), + ...this.authHeaders(opts), + }; + } + /** + * Override this to add your own headers validation: + */ + validateHeaders(headers, customHeaders) { } + defaultIdempotencyKey() { + return `stainless-node-retry-${uuid4()}`; + } + get(path, opts) { + return this.methodRequest('get', path, opts); + } + post(path, opts) { + return this.methodRequest('post', path, opts); + } + patch(path, opts) { + return this.methodRequest('patch', path, opts); + } + put(path, opts) { + return this.methodRequest('put', path, opts); + } + delete(path, opts) { + return this.methodRequest('delete', path, opts); + } + methodRequest(method, path, opts) { + return this.request(Promise.resolve(opts).then(async (opts) => { + const body = opts && (0, uploads_1.isBlobLike)(opts?.body) ? new DataView(await opts.body.arrayBuffer()) + : opts?.body instanceof DataView ? opts.body + : opts?.body instanceof ArrayBuffer ? new DataView(opts.body) + : opts && ArrayBuffer.isView(opts?.body) ? new DataView(opts.body.buffer) + : opts?.body; + return { method, path, ...opts, body }; + })); + } + getAPIList(path, Page, opts) { + return this.requestAPIList(Page, { method: 'get', path, ...opts }); + } + calculateContentLength(body) { + if (typeof body === 'string') { + if (typeof Buffer !== 'undefined') { + return Buffer.byteLength(body, 'utf8').toString(); + } + if (typeof TextEncoder !== 'undefined') { + const encoder = new TextEncoder(); + const encoded = encoder.encode(body); + return encoded.length.toString(); + } + } + else if (ArrayBuffer.isView(body)) { + return body.byteLength.toString(); + } + return null; + } + buildRequest(options, { retryCount = 0 } = {}) { + const { method, path, query, headers: headers = {} } = options; + const body = ArrayBuffer.isView(options.body) || (options.__binaryRequest && typeof options.body === 'string') ? + options.body + : (0, uploads_1.isMultipartBody)(options.body) ? options.body.body + : options.body ? JSON.stringify(options.body, null, 2) + : null; + const contentLength = this.calculateContentLength(body); + const url = this.buildURL(path, query); + if ('timeout' in options) + validatePositiveInteger('timeout', options.timeout); + const timeout = options.timeout ?? this.timeout; + const httpAgent = options.httpAgent ?? this.httpAgent ?? (0, index_1.getDefaultAgent)(url); + const minAgentTimeout = timeout + 1000; + if (typeof httpAgent?.options?.timeout === 'number' && + minAgentTimeout > (httpAgent.options.timeout ?? 0)) { + // Allow any given request to bump our agent active socket timeout. + // This may seem strange, but leaking active sockets should be rare and not particularly problematic, + // and without mutating agent we would need to create more of them. + // This tradeoff optimizes for performance. + httpAgent.options.timeout = minAgentTimeout; + } + if (this.idempotencyHeader && method !== 'get') { + if (!options.idempotencyKey) + options.idempotencyKey = this.defaultIdempotencyKey(); + headers[this.idempotencyHeader] = options.idempotencyKey; + } + const reqHeaders = this.buildHeaders({ options, headers, contentLength, retryCount }); + const req = { + method, + ...(body && { body: body }), + headers: reqHeaders, + ...(httpAgent && { agent: httpAgent }), + // @ts-ignore node-fetch uses a custom AbortSignal type that is + // not compatible with standard web types + signal: options.signal ?? null, + }; + return { req, url, timeout }; + } + buildHeaders({ options, headers, contentLength, retryCount, }) { + const reqHeaders = {}; + if (contentLength) { + reqHeaders['content-length'] = contentLength; + } + const defaultHeaders = this.defaultHeaders(options); + applyHeadersMut(reqHeaders, defaultHeaders); + applyHeadersMut(reqHeaders, headers); + // let builtin fetch set the Content-Type for multipart bodies + if ((0, uploads_1.isMultipartBody)(options.body) && index_1.kind !== 'node') { + delete reqHeaders['content-type']; + } + // Don't set the retry count header if it was already set or removed through default headers or by the + // caller. We check `defaultHeaders` and `headers`, which can contain nulls, instead of `reqHeaders` to + // account for the removal case. + if ((0, exports.getHeader)(defaultHeaders, 'x-stainless-retry-count') === undefined && + (0, exports.getHeader)(headers, 'x-stainless-retry-count') === undefined) { + reqHeaders['x-stainless-retry-count'] = String(retryCount); + } + this.validateHeaders(reqHeaders, headers); + return reqHeaders; + } + /** + * Used as a callback for mutating the given `FinalRequestOptions` object. + */ + async prepareOptions(options) { } + /** + * Used as a callback for mutating the given `RequestInit` object. + * + * This is useful for cases where you want to add certain headers based off of + * the request properties, e.g. `method` or `url`. + */ + async prepareRequest(request, { url, options }) { } + parseHeaders(headers) { + return (!headers ? {} + : Symbol.iterator in headers ? + Object.fromEntries(Array.from(headers).map((header) => [...header])) + : { ...headers }); + } + makeStatusError(status, error, message, headers) { + return error_1.APIError.generate(status, error, message, headers); + } + request(options, remainingRetries = null) { + return new APIPromise(this.makeRequest(options, remainingRetries)); + } + async makeRequest(optionsInput, retriesRemaining) { + const options = await optionsInput; + const maxRetries = options.maxRetries ?? this.maxRetries; + if (retriesRemaining == null) { + retriesRemaining = maxRetries; + } + await this.prepareOptions(options); + const { req, url, timeout } = this.buildRequest(options, { retryCount: maxRetries - retriesRemaining }); + await this.prepareRequest(req, { url, options }); + debug('request', url, options, req.headers); + if (options.signal?.aborted) { + throw new error_1.APIUserAbortError(); + } + const controller = new AbortController(); + const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(exports.castToError); + if (response instanceof Error) { + if (options.signal?.aborted) { + throw new error_1.APIUserAbortError(); + } + if (retriesRemaining) { + return this.retryRequest(options, retriesRemaining); + } + if (response.name === 'AbortError') { + throw new error_1.APIConnectionTimeoutError(); + } + throw new error_1.APIConnectionError({ cause: response }); + } + const responseHeaders = (0, exports.createResponseHeaders)(response.headers); + if (!response.ok) { + if (retriesRemaining && this.shouldRetry(response)) { + const retryMessage = `retrying, ${retriesRemaining} attempts remaining`; + debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders); + return this.retryRequest(options, retriesRemaining, responseHeaders); + } + const errText = await response.text().catch((e) => (0, exports.castToError)(e).message); + const errJSON = (0, exports.safeJSON)(errText); + const errMessage = errJSON ? undefined : errText; + const retryMessage = retriesRemaining ? `(error; no more retries left)` : `(error; not retryable)`; + debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders, errMessage); + const err = this.makeStatusError(response.status, errJSON, errMessage, responseHeaders); + throw err; + } + return { response, options, controller }; + } + requestAPIList(Page, options) { + const request = this.makeRequest(options, null); + return new PagePromise(this, request, Page); + } + buildURL(path, query) { + const url = isAbsoluteURL(path) ? + new URL(path) + : new URL(this.baseURL + (this.baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path)); + const defaultQuery = this.defaultQuery(); + if (!isEmptyObj(defaultQuery)) { + query = { ...defaultQuery, ...query }; + } + if (typeof query === 'object' && query && !Array.isArray(query)) { + url.search = this.stringifyQuery(query); + } + return url.toString(); + } + stringifyQuery(query) { + return Object.entries(query) + .filter(([_, value]) => typeof value !== 'undefined') + .map(([key, value]) => { + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`; + } + if (value === null) { + return `${encodeURIComponent(key)}=`; + } + throw new error_1.OpenAIError(`Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`); + }) + .join('&'); + } + async fetchWithTimeout(url, init, ms, controller) { + const { signal, ...options } = init || {}; + if (signal) + signal.addEventListener('abort', () => controller.abort()); + const timeout = setTimeout(() => controller.abort(), ms); + return ( + // use undefined this binding; fetch errors if bound to something else in browser/cloudflare + this.fetch.call(undefined, url, { signal: controller.signal, ...options }).finally(() => { + clearTimeout(timeout); + })); + } + shouldRetry(response) { + // Note this is not a standard header. + const shouldRetryHeader = response.headers.get('x-should-retry'); + // If the server explicitly says whether or not to retry, obey. + if (shouldRetryHeader === 'true') + return true; + if (shouldRetryHeader === 'false') + return false; + // Retry on request timeouts. + if (response.status === 408) + return true; + // Retry on lock timeouts. + if (response.status === 409) + return true; + // Retry on rate limits. + if (response.status === 429) + return true; + // Retry internal errors. + if (response.status >= 500) + return true; + return false; + } + async retryRequest(options, retriesRemaining, responseHeaders) { + let timeoutMillis; + // Note the `retry-after-ms` header may not be standard, but is a good idea and we'd like proactive support for it. + const retryAfterMillisHeader = responseHeaders?.['retry-after-ms']; + if (retryAfterMillisHeader) { + const timeoutMs = parseFloat(retryAfterMillisHeader); + if (!Number.isNaN(timeoutMs)) { + timeoutMillis = timeoutMs; + } + } + // About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + const retryAfterHeader = responseHeaders?.['retry-after']; + if (retryAfterHeader && !timeoutMillis) { + const timeoutSeconds = parseFloat(retryAfterHeader); + if (!Number.isNaN(timeoutSeconds)) { + timeoutMillis = timeoutSeconds * 1000; + } + else { + timeoutMillis = Date.parse(retryAfterHeader) - Date.now(); + } + } + // If the API asks us to wait a certain amount of time (and it's a reasonable amount), + // just do what it says, but otherwise calculate a default + if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1000)) { + const maxRetries = options.maxRetries ?? this.maxRetries; + timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries); + } + await (0, exports.sleep)(timeoutMillis); + return this.makeRequest(options, retriesRemaining - 1); + } + calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) { + const initialRetryDelay = 0.5; + const maxRetryDelay = 8.0; + const numRetries = maxRetries - retriesRemaining; + // Apply exponential backoff, but not more than the max. + const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay); + // Apply some jitter, take up to at most 25 percent of the retry time. + const jitter = 1 - Math.random() * 0.25; + return sleepSeconds * jitter * 1000; + } + getUserAgent() { + return `${this.constructor.name}/JS ${version_1.VERSION}`; + } +} +exports.APIClient = APIClient; +class AbstractPage { + constructor(client, response, body, options) { + _AbstractPage_client.set(this, void 0); + __classPrivateFieldSet(this, _AbstractPage_client, client, "f"); + this.options = options; + this.response = response; + this.body = body; + } + hasNextPage() { + const items = this.getPaginatedItems(); + if (!items.length) + return false; + return this.nextPageInfo() != null; + } + async getNextPage() { + const nextInfo = this.nextPageInfo(); + if (!nextInfo) { + throw new error_1.OpenAIError('No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.'); + } + const nextOptions = { ...this.options }; + if ('params' in nextInfo && typeof nextOptions.query === 'object') { + nextOptions.query = { ...nextOptions.query, ...nextInfo.params }; + } + else if ('url' in nextInfo) { + const params = [...Object.entries(nextOptions.query || {}), ...nextInfo.url.searchParams.entries()]; + for (const [key, value] of params) { + nextInfo.url.searchParams.set(key, value); + } + nextOptions.query = undefined; + nextOptions.path = nextInfo.url.toString(); + } + return await __classPrivateFieldGet(this, _AbstractPage_client, "f").requestAPIList(this.constructor, nextOptions); + } + async *iterPages() { + // eslint-disable-next-line @typescript-eslint/no-this-alias + let page = this; + yield page; + while (page.hasNextPage()) { + page = await page.getNextPage(); + yield page; + } + } + async *[(_AbstractPage_client = new WeakMap(), Symbol.asyncIterator)]() { + for await (const page of this.iterPages()) { + for (const item of page.getPaginatedItems()) { + yield item; + } + } + } +} +exports.AbstractPage = AbstractPage; +/** + * This subclass of Promise will resolve to an instantiated Page once the request completes. + * + * It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ +class PagePromise extends APIPromise { + constructor(client, request, Page) { + super(request, async (props) => new Page(client, props.response, await defaultParseResponse(props), props.options)); + } + /** + * Allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ + async *[Symbol.asyncIterator]() { + const page = await this; + for await (const item of page) { + yield item; + } + } +} +exports.PagePromise = PagePromise; +const createResponseHeaders = (headers) => { + return new Proxy(Object.fromEntries( + // @ts-ignore + headers.entries()), { + get(target, name) { + const key = name.toString(); + return target[key.toLowerCase()] || target[key]; + }, + }); +}; +exports.createResponseHeaders = createResponseHeaders; +// This is required so that we can determine if a given object matches the RequestOptions +// type at runtime. While this requires duplication, it is enforced by the TypeScript +// compiler such that any missing / extraneous keys will cause an error. +const requestOptionsKeys = { + method: true, + path: true, + query: true, + body: true, + headers: true, + maxRetries: true, + stream: true, + timeout: true, + httpAgent: true, + signal: true, + idempotencyKey: true, + __binaryRequest: true, + __binaryResponse: true, + __streamClass: true, +}; +const isRequestOptions = (obj) => { + return (typeof obj === 'object' && + obj !== null && + !isEmptyObj(obj) && + Object.keys(obj).every((k) => hasOwn(requestOptionsKeys, k))); +}; +exports.isRequestOptions = isRequestOptions; +const getPlatformProperties = () => { + if (typeof Deno !== 'undefined' && Deno.build != null) { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': version_1.VERSION, + 'X-Stainless-OS': normalizePlatform(Deno.build.os), + 'X-Stainless-Arch': normalizeArch(Deno.build.arch), + 'X-Stainless-Runtime': 'deno', + 'X-Stainless-Runtime-Version': typeof Deno.version === 'string' ? Deno.version : Deno.version?.deno ?? 'unknown', + }; + } + if (typeof EdgeRuntime !== 'undefined') { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': version_1.VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': `other:${EdgeRuntime}`, + 'X-Stainless-Runtime': 'edge', + 'X-Stainless-Runtime-Version': process.version, + }; + } + // Check if Node.js + if (Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]') { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': version_1.VERSION, + 'X-Stainless-OS': normalizePlatform(process.platform), + 'X-Stainless-Arch': normalizeArch(process.arch), + 'X-Stainless-Runtime': 'node', + 'X-Stainless-Runtime-Version': process.version, + }; + } + const browserInfo = getBrowserInfo(); + if (browserInfo) { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': version_1.VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': 'unknown', + 'X-Stainless-Runtime': `browser:${browserInfo.browser}`, + 'X-Stainless-Runtime-Version': browserInfo.version, + }; + } + // TODO add support for Cloudflare workers, etc. + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': version_1.VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': 'unknown', + 'X-Stainless-Runtime': 'unknown', + 'X-Stainless-Runtime-Version': 'unknown', + }; +}; +// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts +function getBrowserInfo() { + if (typeof navigator === 'undefined' || !navigator) { + return null; + } + // NOTE: The order matters here! + const browserPatterns = [ + { key: 'edge', pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'ie', pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'ie', pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'chrome', pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'firefox', pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'safari', pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ }, + ]; + // Find the FIRST matching browser + for (const { key, pattern } of browserPatterns) { + const match = pattern.exec(navigator.userAgent); + if (match) { + const major = match[1] || 0; + const minor = match[2] || 0; + const patch = match[3] || 0; + return { browser: key, version: `${major}.${minor}.${patch}` }; + } + } + return null; +} +const normalizeArch = (arch) => { + // Node docs: + // - https://nodejs.org/api/process.html#processarch + // Deno docs: + // - https://doc.deno.land/deno/stable/~/Deno.build + if (arch === 'x32') + return 'x32'; + if (arch === 'x86_64' || arch === 'x64') + return 'x64'; + if (arch === 'arm') + return 'arm'; + if (arch === 'aarch64' || arch === 'arm64') + return 'arm64'; + if (arch) + return `other:${arch}`; + return 'unknown'; +}; +const normalizePlatform = (platform) => { + // Node platforms: + // - https://nodejs.org/api/process.html#processplatform + // Deno platforms: + // - https://doc.deno.land/deno/stable/~/Deno.build + // - https://github.com/denoland/deno/issues/14799 + platform = platform.toLowerCase(); + // NOTE: this iOS check is untested and may not work + // Node does not work natively on IOS, there is a fork at + // https://github.com/nodejs-mobile/nodejs-mobile + // however it is unknown at the time of writing how to detect if it is running + if (platform.includes('ios')) + return 'iOS'; + if (platform === 'android') + return 'Android'; + if (platform === 'darwin') + return 'MacOS'; + if (platform === 'win32') + return 'Windows'; + if (platform === 'freebsd') + return 'FreeBSD'; + if (platform === 'openbsd') + return 'OpenBSD'; + if (platform === 'linux') + return 'Linux'; + if (platform) + return `Other:${platform}`; + return 'Unknown'; +}; +let _platformHeaders; +const getPlatformHeaders = () => { + return (_platformHeaders ?? (_platformHeaders = getPlatformProperties())); +}; +const safeJSON = (text) => { + try { + return JSON.parse(text); + } + catch (err) { + return undefined; + } +}; +exports.safeJSON = safeJSON; +// https://url.spec.whatwg.org/#url-scheme-string +const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i; +const isAbsoluteURL = (url) => { + return startsWithSchemeRegexp.test(url); +}; +const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); +exports.sleep = sleep; +const validatePositiveInteger = (name, n) => { + if (typeof n !== 'number' || !Number.isInteger(n)) { + throw new error_1.OpenAIError(`${name} must be an integer`); + } + if (n < 0) { + throw new error_1.OpenAIError(`${name} must be a positive integer`); + } + return n; +}; +const castToError = (err) => { + if (err instanceof Error) + return err; + if (typeof err === 'object' && err !== null) { + try { + return new Error(JSON.stringify(err)); + } + catch { } + } + return new Error(err); +}; +exports.castToError = castToError; +const ensurePresent = (value) => { + if (value == null) + throw new error_1.OpenAIError(`Expected a value to be given but received ${value} instead.`); + return value; +}; +exports.ensurePresent = ensurePresent; +/** + * Read an environment variable. + * + * Trims beginning and trailing whitespace. + * + * Will return undefined if the environment variable doesn't exist or cannot be accessed. + */ +const readEnv = (env) => { + if (typeof process !== 'undefined') { + return process.env?.[env]?.trim() ?? undefined; + } + if (typeof Deno !== 'undefined') { + return Deno.env?.get?.(env)?.trim(); + } + return undefined; +}; +exports.readEnv = readEnv; +const coerceInteger = (value) => { + if (typeof value === 'number') + return Math.round(value); + if (typeof value === 'string') + return parseInt(value, 10); + throw new error_1.OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); +}; +exports.coerceInteger = coerceInteger; +const coerceFloat = (value) => { + if (typeof value === 'number') + return value; + if (typeof value === 'string') + return parseFloat(value); + throw new error_1.OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); +}; +exports.coerceFloat = coerceFloat; +const coerceBoolean = (value) => { + if (typeof value === 'boolean') + return value; + if (typeof value === 'string') + return value === 'true'; + return Boolean(value); +}; +exports.coerceBoolean = coerceBoolean; +const maybeCoerceInteger = (value) => { + if (value === undefined) { + return undefined; + } + return (0, exports.coerceInteger)(value); +}; +exports.maybeCoerceInteger = maybeCoerceInteger; +const maybeCoerceFloat = (value) => { + if (value === undefined) { + return undefined; + } + return (0, exports.coerceFloat)(value); +}; +exports.maybeCoerceFloat = maybeCoerceFloat; +const maybeCoerceBoolean = (value) => { + if (value === undefined) { + return undefined; + } + return (0, exports.coerceBoolean)(value); +}; +exports.maybeCoerceBoolean = maybeCoerceBoolean; +// https://stackoverflow.com/a/34491287 +function isEmptyObj(obj) { + if (!obj) + return true; + for (const _k in obj) + return false; + return true; +} +exports.isEmptyObj = isEmptyObj; +// https://eslint.org/docs/latest/rules/no-prototype-builtins +function hasOwn(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} +exports.hasOwn = hasOwn; +/** + * Copies headers from "newHeaders" onto "targetHeaders", + * using lower-case for all properties, + * ignoring any keys with undefined values, + * and deleting any keys with null values. + */ +function applyHeadersMut(targetHeaders, newHeaders) { + for (const k in newHeaders) { + if (!hasOwn(newHeaders, k)) + continue; + const lowerKey = k.toLowerCase(); + if (!lowerKey) + continue; + const val = newHeaders[k]; + if (val === null) { + delete targetHeaders[lowerKey]; + } + else if (val !== undefined) { + targetHeaders[lowerKey] = val; + } + } +} +function debug(action, ...args) { + if (typeof process !== 'undefined' && process?.env?.['DEBUG'] === 'true') { + console.log(`OpenAI:DEBUG:${action}`, ...args); + } +} +exports.debug = debug; +/** + * https://stackoverflow.com/a/2117523 + */ +const uuid4 = () => { + return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => { + const r = (Math.random() * 16) | 0; + const v = c === 'x' ? r : (r & 0x3) | 0x8; + return v.toString(16); + }); +}; +const isRunningInBrowser = () => { + return ( + // @ts-ignore + typeof window !== 'undefined' && + // @ts-ignore + typeof window.document !== 'undefined' && + // @ts-ignore + typeof navigator !== 'undefined'); +}; +exports.isRunningInBrowser = isRunningInBrowser; +const isHeadersProtocol = (headers) => { + return typeof headers?.get === 'function'; +}; +exports.isHeadersProtocol = isHeadersProtocol; +const getRequiredHeader = (headers, header) => { + const foundHeader = (0, exports.getHeader)(headers, header); + if (foundHeader === undefined) { + throw new Error(`Could not find ${header} header`); + } + return foundHeader; +}; +exports.getRequiredHeader = getRequiredHeader; +const getHeader = (headers, header) => { + const lowerCasedHeader = header.toLowerCase(); + if ((0, exports.isHeadersProtocol)(headers)) { + // to deal with the case where the header looks like Stainless-Event-Id + const intercapsHeader = header[0]?.toUpperCase() + + header.substring(1).replace(/([^\w])(\w)/g, (_m, g1, g2) => g1 + g2.toUpperCase()); + for (const key of [header, lowerCasedHeader, header.toUpperCase(), intercapsHeader]) { + const value = headers.get(key); + if (value) { + return value; + } + } + } + for (const [key, value] of Object.entries(headers)) { + if (key.toLowerCase() === lowerCasedHeader) { + if (Array.isArray(value)) { + if (value.length <= 1) + return value[0]; + console.warn(`Received ${value.length} entries for the ${header} header, using the first entry.`); + return value[0]; + } + return value; + } + } + return undefined; +}; +exports.getHeader = getHeader; +/** + * Encodes a string to Base64 format. + */ +const toBase64 = (str) => { + if (!str) + return ''; + if (typeof Buffer !== 'undefined') { + return Buffer.from(str).toString('base64'); + } + if (typeof btoa !== 'undefined') { + return btoa(str); + } + throw new error_1.OpenAIError('Cannot generate b64 string; Expected `Buffer` or `btoa` to be defined'); +}; +exports.toBase64 = toBase64; +function isObj(obj) { + return obj != null && typeof obj === 'object' && !Array.isArray(obj); +} +exports.isObj = isObj; +//# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/node_modules/openai/core.js.map b/node_modules/openai/core.js.map new file mode 100644 index 0000000..dcdae6d --- /dev/null +++ b/node_modules/openai/core.js.map @@ -0,0 +1 @@ +{"version":3,"file":"core.js","sourceRoot":"","sources":["src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,0CAAoC;AACpC,8CAAqC;AACrC,sCAMiB;AACjB,6CAUwB;AAExB,0CAAkE;AAClE,wCAKmB;AAJjB,2HAAA,gCAAgC,OAAA;AAChC,sHAAA,2BAA2B,OAAA;AAC3B,qGAAA,UAAU,OAAA;AAcZ,KAAK,UAAU,oBAAoB,CAAI,KAAuB;IAC5D,MAAM,EAAE,QAAQ,EAAE,GAAG,KAAK,CAAC;IAC3B,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,EAAE;QACxB,KAAK,CAAC,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC;QAElF,6EAA6E;QAC7E,4EAA4E;QAE5E,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,EAAE;YAC/B,OAAO,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,eAAe,CAAC,QAAQ,EAAE,KAAK,CAAC,UAAU,CAAQ,CAAC;SACvF;QAED,OAAO,kBAAM,CAAC,eAAe,CAAC,QAAQ,EAAE,KAAK,CAAC,UAAU,CAAQ,CAAC;KAClE;IAED,8DAA8D;IAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,OAAO,IAAwB,CAAC;KACjC;IAED,IAAI,KAAK,CAAC,OAAO,CAAC,gBAAgB,EAAE;QAClC,OAAO,QAAuC,CAAC;KAChD;IAED,MAAM,WAAW,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;IACzD,MAAM,MAAM,GACV,WAAW,EAAE,QAAQ,CAAC,kBAAkB,CAAC,IAAI,WAAW,EAAE,QAAQ,CAAC,0BAA0B,CAAC,CAAC;IACjG,IAAI,MAAM,EAAE;QACV,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;QAEnC,KAAK,CAAC,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAEzE,OAAO,aAAa,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;KACtC;IAED,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;IACnC,KAAK,CAAC,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;IAEzE,2DAA2D;IAC3D,OAAO,IAAmC,CAAC;AAC7C,CAAC;AAOD,SAAS,aAAa,CAAI,KAAQ,EAAE,QAAkB;IACpD,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC/D,OAAO,KAAyB,CAAC;KAClC;IAED,OAAO,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,aAAa,EAAE;QACjD,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;QAC3C,UAAU,EAAE,KAAK;KAClB,CAAqB,CAAC;AACzB,CAAC;AAED;;;GAGG;AACH,MAAa,UAAc,SAAQ,OAAyB;IAG1D,YACU,eAA0C,EAC1C,gBAEgC,oBAAoB;QAE5D,KAAK,CAAC,CAAC,OAAO,EAAE,EAAE;YAChB,yEAAyE;YACzE,0EAA0E;YAC1E,wBAAwB;YACxB,OAAO,CAAC,IAAW,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAVK,oBAAe,GAAf,eAAe,CAA2B;QAC1C,kBAAa,GAAb,aAAa,CAEuC;IAQ9D,CAAC;IAED,WAAW,CAAI,SAAkD;QAC/D,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,eAAe,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,CAC1D,aAAa,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,EAAE,KAAK,CAAC,EAAE,KAAK,CAAC,QAAQ,CAAC,CACjF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;OAYG;IACH,UAAU;QACR,OAAO,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,KAAK,CAAC,YAAY;QAChB,MAAM,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC;QAC9E,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,UAAU,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE,CAAC;IAC9E,CAAC;IAEO,KAAK;QACX,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YACvB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,CAAqC,CAAC;SACxG;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAEQ,IAAI,CACX,WAAgG,EAChG,UAAmF;QAEnF,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,WAAW,EAAE,UAAU,CAAC,CAAC;IACpD,CAAC;IAEQ,KAAK,CACZ,UAAiF;QAEjF,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;IACxC,CAAC;IAEQ,OAAO,CAAC,SAA2C;QAC1D,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACzC,CAAC;CACF;AAnFD,gCAmFC;AAED,MAAsB,SAAS;IAS7B,YAAY,EACV,OAAO,EACP,UAAU,GAAG,CAAC,EACd,OAAO,GAAG,MAAM,EAAE,aAAa;IAC/B,SAAS,EACT,KAAK,EAAE,eAAe,GAOvB;QACC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,UAAU,GAAG,uBAAuB,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;QACpE,IAAI,CAAC,OAAO,GAAG,uBAAuB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;QAC3D,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,CAAC,KAAK,GAAG,eAAe,IAAI,aAAK,CAAC;IACxC,CAAC;IAES,WAAW,CAAC,IAAyB;QAC7C,OAAO,EAAE,CAAC;IACZ,CAAC;IAED;;;;;;;OAOG;IACO,cAAc,CAAC,IAAyB;QAChD,OAAO;YACL,MAAM,EAAE,kBAAkB;YAC1B,cAAc,EAAE,kBAAkB;YAClC,YAAY,EAAE,IAAI,CAAC,YAAY,EAAE;YACjC,GAAG,kBAAkB,EAAE;YACvB,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;SAC1B,CAAC;IACJ,CAAC;IAID;;OAEG;IACO,eAAe,CAAC,OAAgB,EAAE,aAAsB,IAAG,CAAC;IAE5D,qBAAqB;QAC7B,OAAO,wBAAwB,KAAK,EAAE,EAAE,CAAC;IAC3C,CAAC;IAED,GAAG,CAAW,IAAY,EAAE,IAA0C;QACpE,OAAO,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAC/C,CAAC;IAED,IAAI,CAAW,IAAY,EAAE,IAA0C;QACrE,OAAO,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAChD,CAAC;IAED,KAAK,CAAW,IAAY,EAAE,IAA0C;QACtE,OAAO,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAED,GAAG,CAAW,IAAY,EAAE,IAA0C;QACpE,OAAO,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAC/C,CAAC;IAED,MAAM,CAAW,IAAY,EAAE,IAA0C;QACvE,OAAO,IAAI,CAAC,aAAa,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEO,aAAa,CACnB,MAAkB,EAClB,IAAY,EACZ,IAA0C;QAE1C,OAAO,IAAI,CAAC,OAAO,CACjB,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE;YACxC,MAAM,IAAI,GACR,IAAI,IAAI,IAAA,oBAAU,EAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,QAAQ,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC;gBAC5E,CAAC,CAAC,IAAI,EAAE,IAAI,YAAY,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI;oBAC5C,CAAC,CAAC,IAAI,EAAE,IAAI,YAAY,WAAW,CAAC,CAAC,CAAC,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC;wBAC7D,CAAC,CAAC,IAAI,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC;4BACzE,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC;YACf,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,IAAI,EAAE,IAAI,EAAE,CAAC;QACzC,CAAC,CAAC,CACH,CAAC;IACJ,CAAC;IAED,UAAU,CACR,IAAY,EACZ,IAAuC,EACvC,IAA0B;QAE1B,OAAO,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,IAAI,EAAE,CAAC,CAAC;IACrE,CAAC;IAEO,sBAAsB,CAAC,IAAa;QAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;gBACjC,OAAO,MAAM,CAAC,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC;aACnD;YAED,IAAI,OAAO,WAAW,KAAK,WAAW,EAAE;gBACtC,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;gBAClC,MAAM,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;gBACrC,OAAO,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;aAClC;SACF;aAAM,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;YACnC,OAAO,IAAI,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED,YAAY,CACV,OAAiC,EACjC,EAAE,UAAU,GAAG,CAAC,KAA8B,EAAE;QAEhD,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,EAAE,GAAG,OAAO,CAAC;QAE/D,MAAM,IAAI,GACR,WAAW,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,eAAe,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;YACjG,OAAO,CAAC,IAAI;YACd,CAAC,CAAC,IAAA,yBAAe,EAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI;gBACnD,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;oBACtD,CAAC,CAAC,IAAI,CAAC;QACT,MAAM,aAAa,GAAG,IAAI,CAAC,sBAAsB,CAAC,IAAI,CAAC,CAAC;QAExD,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAK,EAAE,KAAK,CAAC,CAAC;QACxC,IAAI,SAAS,IAAI,OAAO;YAAE,uBAAuB,CAAC,SAAS,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;QAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;QAChD,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS,IAAI,IAAA,uBAAe,EAAC,GAAG,CAAC,CAAC;QAC9E,MAAM,eAAe,GAAG,OAAO,GAAG,IAAI,CAAC;QACvC,IACE,OAAQ,SAAiB,EAAE,OAAO,EAAE,OAAO,KAAK,QAAQ;YACxD,eAAe,GAAG,CAAE,SAAiB,CAAC,OAAO,CAAC,OAAO,IAAI,CAAC,CAAC,EAC3D;YACA,mEAAmE;YACnE,qGAAqG;YACrG,mEAAmE;YACnE,2CAA2C;YAC1C,SAAiB,CAAC,OAAO,CAAC,OAAO,GAAG,eAAe,CAAC;SACtD;QAED,IAAI,IAAI,CAAC,iBAAiB,IAAI,MAAM,KAAK,KAAK,EAAE;YAC9C,IAAI,CAAC,OAAO,CAAC,cAAc;gBAAE,OAAO,CAAC,cAAc,GAAG,IAAI,CAAC,qBAAqB,EAAE,CAAC;YACnF,OAAO,CAAC,IAAI,CAAC,iBAAiB,CAAC,GAAG,OAAO,CAAC,cAAc,CAAC;SAC1D;QAED,MAAM,UAAU,GAAG,IAAI,CAAC,YAAY,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,CAAC,CAAC;QAEtF,MAAM,GAAG,GAAgB;YACvB,MAAM;YACN,GAAG,CAAC,IAAI,IAAI,EAAE,IAAI,EAAE,IAAW,EAAE,CAAC;YAClC,OAAO,EAAE,UAAU;YACnB,GAAG,CAAC,SAAS,IAAI,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC;YACtC,+DAA+D;YAC/D,yCAAyC;YACzC,MAAM,EAAE,OAAO,CAAC,MAAM,IAAI,IAAI;SAC/B,CAAC;QAEF,OAAO,EAAE,GAAG,EAAE,GAAG,EAAE,OAAO,EAAE,CAAC;IAC/B,CAAC;IAEO,YAAY,CAAC,EACnB,OAAO,EACP,OAAO,EACP,aAAa,EACb,UAAU,GAMX;QACC,MAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,IAAI,aAAa,EAAE;YACjB,UAAU,CAAC,gBAAgB,CAAC,GAAG,aAAa,CAAC;SAC9C;QAED,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;QACpD,eAAe,CAAC,UAAU,EAAE,cAAc,CAAC,CAAC;QAC5C,eAAe,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAErC,8DAA8D;QAC9D,IAAI,IAAA,yBAAe,EAAC,OAAO,CAAC,IAAI,CAAC,IAAI,YAAS,KAAK,MAAM,EAAE;YACzD,OAAO,UAAU,CAAC,cAAc,CAAC,CAAC;SACnC;QAED,sGAAsG;QACtG,uGAAuG;QACvG,gCAAgC;QAChC,IACE,IAAA,iBAAS,EAAC,cAAc,EAAE,yBAAyB,CAAC,KAAK,SAAS;YAClE,IAAA,iBAAS,EAAC,OAAO,EAAE,yBAAyB,CAAC,KAAK,SAAS,EAC3D;YACA,UAAU,CAAC,yBAAyB,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC;SAC5D;QAED,IAAI,CAAC,eAAe,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAE1C,OAAO,UAAU,CAAC;IACpB,CAAC;IAED;;OAEG;IACO,KAAK,CAAC,cAAc,CAAC,OAA4B,IAAkB,CAAC;IAE9E;;;;;OAKG;IACO,KAAK,CAAC,cAAc,CAC5B,OAAoB,EACpB,EAAE,GAAG,EAAE,OAAO,EAAiD,IAC/C,CAAC;IAET,YAAY,CAAC,OAAuC;QAC5D,OAAO,CACL,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACb,CAAC,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC;gBAC5B,MAAM,CAAC,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,OAA6B,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC5F,CAAC,CAAC,EAAE,GAAG,OAAO,EAAE,CACjB,CAAC;IACJ,CAAC;IAES,eAAe,CACvB,MAA0B,EAC1B,KAAyB,EACzB,OAA2B,EAC3B,OAA4B;QAE5B,OAAO,gBAAQ,CAAC,QAAQ,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC5D,CAAC;IAED,OAAO,CACL,OAAiD,EACjD,mBAAkC,IAAI;QAEtC,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;IACrE,CAAC;IAEO,KAAK,CAAC,WAAW,CACvB,YAAsD,EACtD,gBAA+B;QAE/B,MAAM,OAAO,GAAG,MAAM,YAAY,CAAC;QACnC,MAAM,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,IAAI,CAAC,UAAU,CAAC;QACzD,IAAI,gBAAgB,IAAI,IAAI,EAAE;YAC5B,gBAAgB,GAAG,UAAU,CAAC;SAC/B;QAED,MAAM,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;QAEnC,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,EAAE,UAAU,EAAE,UAAU,GAAG,gBAAgB,EAAE,CAAC,CAAC;QAExG,MAAM,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,EAAE,GAAG,EAAE,OAAO,EAAE,CAAC,CAAC;QAEjD,KAAK,CAAC,SAAS,EAAE,GAAG,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;QAE5C,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE;YAC3B,MAAM,IAAI,yBAAiB,EAAE,CAAC;SAC/B;QAED,MAAM,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC;QACzC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,GAAG,EAAE,GAAG,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC,KAAK,CAAC,mBAAW,CAAC,CAAC;QAE/F,IAAI,QAAQ,YAAY,KAAK,EAAE;YAC7B,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE;gBAC3B,MAAM,IAAI,yBAAiB,EAAE,CAAC;aAC/B;YACD,IAAI,gBAAgB,EAAE;gBACpB,OAAO,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,gBAAgB,CAAC,CAAC;aACrD;YACD,IAAI,QAAQ,CAAC,IAAI,KAAK,YAAY,EAAE;gBAClC,MAAM,IAAI,iCAAyB,EAAE,CAAC;aACvC;YACD,MAAM,IAAI,0BAAkB,CAAC,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC,CAAC;SACnD;QAED,MAAM,eAAe,GAAG,IAAA,6BAAqB,EAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAEhE,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YAChB,IAAI,gBAAgB,IAAI,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,EAAE;gBAClD,MAAM,YAAY,GAAG,aAAa,gBAAgB,qBAAqB,CAAC;gBACxE,KAAK,CAAC,oBAAoB,YAAY,GAAG,EAAE,QAAQ,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,CAAC;gBAClF,OAAO,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,gBAAgB,EAAE,eAAe,CAAC,CAAC;aACtE;YAED,MAAM,OAAO,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAA,mBAAW,EAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;YAC3E,MAAM,OAAO,GAAG,IAAA,gBAAQ,EAAC,OAAO,CAAC,CAAC;YAClC,MAAM,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YACjD,MAAM,YAAY,GAAG,gBAAgB,CAAC,CAAC,CAAC,+BAA+B,CAAC,CAAC,CAAC,wBAAwB,CAAC;YAEnG,KAAK,CAAC,oBAAoB,YAAY,GAAG,EAAE,QAAQ,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,EAAE,UAAU,CAAC,CAAC;YAE9F,MAAM,GAAG,GAAG,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,EAAE,UAAU,EAAE,eAAe,CAAC,CAAC;YACxF,MAAM,GAAG,CAAC;SACX;QAED,OAAO,EAAE,QAAQ,EAAE,OAAO,EAAE,UAAU,EAAE,CAAC;IAC3C,CAAC;IAED,cAAc,CACZ,IAA4E,EAC5E,OAA4B;QAE5B,MAAM,OAAO,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAChD,OAAO,IAAI,WAAW,CAAkB,IAAI,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;IAC/D,CAAC;IAED,QAAQ,CAAM,IAAY,EAAE,KAA6B;QACvD,MAAM,GAAG,GACP,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;YACnB,IAAI,GAAG,CAAC,IAAI,CAAC;YACf,CAAC,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;QAExG,MAAM,YAAY,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QACzC,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;YAC7B,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,KAAK,EAAS,CAAC;SAC9C;QAED,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;YAC/D,GAAG,CAAC,MAAM,GAAG,IAAI,CAAC,cAAc,CAAC,KAAgC,CAAC,CAAC;SACpE;QAED,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;IACxB,CAAC;IAES,cAAc,CAAC,KAA8B;QACrD,OAAO,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC;aACzB,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,OAAO,KAAK,KAAK,WAAW,CAAC;aACpD,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE;YACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBACxF,OAAO,GAAG,kBAAkB,CAAC,GAAG,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC;aAClE;YACD,IAAI,KAAK,KAAK,IAAI,EAAE;gBAClB,OAAO,GAAG,kBAAkB,CAAC,GAAG,CAAC,GAAG,CAAC;aACtC;YACD,MAAM,IAAI,mBAAW,CACnB,yBAAyB,OAAO,KAAK,mQAAmQ,CACzS,CAAC;QACJ,CAAC,CAAC;aACD,IAAI,CAAC,GAAG,CAAC,CAAC;IACf,CAAC;IAED,KAAK,CAAC,gBAAgB,CACpB,GAAgB,EAChB,IAA6B,EAC7B,EAAU,EACV,UAA2B;QAE3B,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,IAAI,IAAI,EAAE,CAAC;QAC1C,IAAI,MAAM;YAAE,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;QAEvE,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QAEzD,OAAO;QACL,4FAA4F;QAC5F,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,GAAG,EAAE,EAAE,MAAM,EAAE,UAAU,CAAC,MAAa,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE;YAC7F,YAAY,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC,CAAC,CACH,CAAC;IACJ,CAAC;IAEO,WAAW,CAAC,QAAkB;QACpC,sCAAsC;QACtC,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;QAEjE,+DAA+D;QAC/D,IAAI,iBAAiB,KAAK,MAAM;YAAE,OAAO,IAAI,CAAC;QAC9C,IAAI,iBAAiB,KAAK,OAAO;YAAE,OAAO,KAAK,CAAC;QAEhD,6BAA6B;QAC7B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG;YAAE,OAAO,IAAI,CAAC;QAEzC,0BAA0B;QAC1B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG;YAAE,OAAO,IAAI,CAAC;QAEzC,wBAAwB;QACxB,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG;YAAE,OAAO,IAAI,CAAC;QAEzC,yBAAyB;QACzB,IAAI,QAAQ,CAAC,MAAM,IAAI,GAAG;YAAE,OAAO,IAAI,CAAC;QAExC,OAAO,KAAK,CAAC;IACf,CAAC;IAEO,KAAK,CAAC,YAAY,CACxB,OAA4B,EAC5B,gBAAwB,EACxB,eAAqC;QAErC,IAAI,aAAiC,CAAC;QAEtC,mHAAmH;QACnH,MAAM,sBAAsB,GAAG,eAAe,EAAE,CAAC,gBAAgB,CAAC,CAAC;QACnE,IAAI,sBAAsB,EAAE;YAC1B,MAAM,SAAS,GAAG,UAAU,CAAC,sBAAsB,CAAC,CAAC;YACrD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE;gBAC5B,aAAa,GAAG,SAAS,CAAC;aAC3B;SACF;QAED,sGAAsG;QACtG,MAAM,gBAAgB,GAAG,eAAe,EAAE,CAAC,aAAa,CAAC,CAAC;QAC1D,IAAI,gBAAgB,IAAI,CAAC,aAAa,EAAE;YACtC,MAAM,cAAc,GAAG,UAAU,CAAC,gBAAgB,CAAC,CAAC;YACpD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,cAAc,CAAC,EAAE;gBACjC,aAAa,GAAG,cAAc,GAAG,IAAI,CAAC;aACvC;iBAAM;gBACL,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;aAC3D;SACF;QAED,sFAAsF;QACtF,0DAA0D;QAC1D,IAAI,CAAC,CAAC,aAAa,IAAI,CAAC,IAAI,aAAa,IAAI,aAAa,GAAG,EAAE,GAAG,IAAI,CAAC,EAAE;YACvE,MAAM,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,IAAI,CAAC,UAAU,CAAC;YACzD,aAAa,GAAG,IAAI,CAAC,kCAAkC,CAAC,gBAAgB,EAAE,UAAU,CAAC,CAAC;SACvF;QACD,MAAM,IAAA,aAAK,EAAC,aAAa,CAAC,CAAC;QAE3B,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,gBAAgB,GAAG,CAAC,CAAC,CAAC;IACzD,CAAC;IAEO,kCAAkC,CAAC,gBAAwB,EAAE,UAAkB;QACrF,MAAM,iBAAiB,GAAG,GAAG,CAAC;QAC9B,MAAM,aAAa,GAAG,GAAG,CAAC;QAE1B,MAAM,UAAU,GAAG,UAAU,GAAG,gBAAgB,CAAC;QAEjD,wDAAwD;QACxD,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,UAAU,CAAC,EAAE,aAAa,CAAC,CAAC;QAE1F,sEAAsE;QACtE,MAAM,MAAM,GAAG,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC;QAExC,OAAO,YAAY,GAAG,MAAM,GAAG,IAAI,CAAC;IACtC,CAAC;IAEO,YAAY;QAClB,OAAO,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,OAAO,iBAAO,EAAE,CAAC;IAClD,CAAC;CACF;AA5cD,8BA4cC;AAID,MAAsB,YAAY;IAOhC,YAAY,MAAiB,EAAE,QAAkB,EAAE,IAAa,EAAE,OAA4B;QAN9F,uCAAmB;QAOjB,uBAAA,IAAI,wBAAW,MAAM,MAAA,CAAC;QACtB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;IACnB,CAAC;IAUD,WAAW;QACT,MAAM,KAAK,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;QACvC,IAAI,CAAC,KAAK,CAAC,MAAM;YAAE,OAAO,KAAK,CAAC;QAChC,OAAO,IAAI,CAAC,YAAY,EAAE,IAAI,IAAI,CAAC;IACrC,CAAC;IAED,KAAK,CAAC,WAAW;QACf,MAAM,QAAQ,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QACrC,IAAI,CAAC,QAAQ,EAAE;YACb,MAAM,IAAI,mBAAW,CACnB,uFAAuF,CACxF,CAAC;SACH;QACD,MAAM,WAAW,GAAG,EAAE,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;QACxC,IAAI,QAAQ,IAAI,QAAQ,IAAI,OAAO,WAAW,CAAC,KAAK,KAAK,QAAQ,EAAE;YACjE,WAAW,CAAC,KAAK,GAAG,EAAE,GAAG,WAAW,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC;SAClE;aAAM,IAAI,KAAK,IAAI,QAAQ,EAAE;YAC5B,MAAM,MAAM,GAAG,CAAC,GAAG,MAAM,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE,CAAC,EAAE,GAAG,QAAQ,CAAC,GAAG,CAAC,YAAY,CAAC,OAAO,EAAE,CAAC,CAAC;YACpG,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,EAAE;gBACjC,QAAQ,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,KAAY,CAAC,CAAC;aAClD;YACD,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;YAC9B,WAAW,CAAC,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC;SAC5C;QACD,OAAO,MAAM,uBAAA,IAAI,4BAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,WAAkB,EAAE,WAAW,CAAC,CAAC;IACjF,CAAC;IAED,KAAK,CAAC,CAAC,SAAS;QACd,4DAA4D;QAC5D,IAAI,IAAI,GAAS,IAAI,CAAC;QACtB,MAAM,IAAI,CAAC;QACX,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE;YACzB,IAAI,GAAG,MAAM,IAAI,CAAC,WAAW,EAAE,CAAC;YAChC,MAAM,IAAI,CAAC;SACZ;IACH,CAAC;IAED,KAAK,CAAC,CAAC,wCAAC,MAAM,CAAC,aAAa,EAAC;QAC3B,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YACzC,KAAK,MAAM,IAAI,IAAI,IAAI,CAAC,iBAAiB,EAAE,EAAE;gBAC3C,MAAM,IAAI,CAAC;aACZ;SACF;IACH,CAAC;CACF;AAlED,oCAkEC;AAED;;;;;;;;GAQG;AACH,MAAa,WAIX,SAAQ,UAAqB;IAG7B,YACE,MAAiB,EACjB,OAAkC,EAClC,IAA4E;QAE5E,KAAK,CACH,OAAO,EACP,KAAK,EAAE,KAAK,EAAE,EAAE,CACd,IAAI,IAAI,CACN,MAAM,EACN,KAAK,CAAC,QAAQ,EACd,MAAM,oBAAoB,CAAC,KAAK,CAAC,EACjC,KAAK,CAAC,OAAO,CACc,CAChC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC;QAC3B,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC;QACxB,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,IAAI,EAAE;YAC7B,MAAM,IAAI,CAAC;SACZ;IACH,CAAC;CACF;AArCD,kCAqCC;AAEM,MAAM,qBAAqB,GAAG,CACnC,OAA8C,EACtB,EAAE;IAC1B,OAAO,IAAI,KAAK,CACd,MAAM,CAAC,WAAW;IAChB,aAAa;IACb,OAAO,CAAC,OAAO,EAAE,CAClB,EACD;QACE,GAAG,CAAC,MAAM,EAAE,IAAI;YACd,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,OAAO,MAAM,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC;QAClD,CAAC;KACF,CACF,CAAC;AACJ,CAAC,CAAC;AAfW,QAAA,qBAAqB,yBAehC;AA8BF,yFAAyF;AACzF,qFAAqF;AACrF,wEAAwE;AACxE,MAAM,kBAAkB,GAA6B;IACnD,MAAM,EAAE,IAAI;IACZ,IAAI,EAAE,IAAI;IACV,KAAK,EAAE,IAAI;IACX,IAAI,EAAE,IAAI;IACV,OAAO,EAAE,IAAI;IAEb,UAAU,EAAE,IAAI;IAChB,MAAM,EAAE,IAAI;IACZ,OAAO,EAAE,IAAI;IACb,SAAS,EAAE,IAAI;IACf,MAAM,EAAE,IAAI;IACZ,cAAc,EAAE,IAAI;IAEpB,eAAe,EAAE,IAAI;IACrB,gBAAgB,EAAE,IAAI;IACtB,aAAa,EAAE,IAAI;CACpB,CAAC;AAEK,MAAM,gBAAgB,GAAG,CAAC,GAAY,EAAyB,EAAE;IACtE,OAAO,CACL,OAAO,GAAG,KAAK,QAAQ;QACvB,GAAG,KAAK,IAAI;QACZ,CAAC,UAAU,CAAC,GAAG,CAAC;QAChB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAC,CAC7D,CAAC;AACJ,CAAC,CAAC;AAPW,QAAA,gBAAgB,oBAO3B;AA8BF,MAAM,qBAAqB,GAAG,GAAuB,EAAE;IACrD,IAAI,OAAO,IAAI,KAAK,WAAW,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,EAAE;QACrD,OAAO;YACL,kBAAkB,EAAE,IAAI;YACxB,6BAA6B,EAAE,iBAAO;YACtC,gBAAgB,EAAE,iBAAiB,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;YAClD,kBAAkB,EAAE,aAAa,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC;YAClD,qBAAqB,EAAE,MAAM;YAC7B,6BAA6B,EAC3B,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,IAAI,SAAS;SACpF,CAAC;KACH;IACD,IAAI,OAAO,WAAW,KAAK,WAAW,EAAE;QACtC,OAAO;YACL,kBAAkB,EAAE,IAAI;YACxB,6BAA6B,EAAE,iBAAO;YACtC,gBAAgB,EAAE,SAAS;YAC3B,kBAAkB,EAAE,SAAS,WAAW,EAAE;YAC1C,qBAAqB,EAAE,MAAM;YAC7B,6BAA6B,EAAE,OAAO,CAAC,OAAO;SAC/C,CAAC;KACH;IACD,mBAAmB;IACnB,IAAI,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,kBAAkB,EAAE;QACvG,OAAO;YACL,kBAAkB,EAAE,IAAI;YACxB,6BAA6B,EAAE,iBAAO;YACtC,gBAAgB,EAAE,iBAAiB,CAAC,OAAO,CAAC,QAAQ,CAAC;YACrD,kBAAkB,EAAE,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC;YAC/C,qBAAqB,EAAE,MAAM;YAC7B,6BAA6B,EAAE,OAAO,CAAC,OAAO;SAC/C,CAAC;KACH;IAED,MAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,IAAI,WAAW,EAAE;QACf,OAAO;YACL,kBAAkB,EAAE,IAAI;YACxB,6BAA6B,EAAE,iBAAO;YACtC,gBAAgB,EAAE,SAAS;YAC3B,kBAAkB,EAAE,SAAS;YAC7B,qBAAqB,EAAE,WAAW,WAAW,CAAC,OAAO,EAAE;YACvD,6BAA6B,EAAE,WAAW,CAAC,OAAO;SACnD,CAAC;KACH;IAED,gDAAgD;IAChD,OAAO;QACL,kBAAkB,EAAE,IAAI;QACxB,6BAA6B,EAAE,iBAAO;QACtC,gBAAgB,EAAE,SAAS;QAC3B,kBAAkB,EAAE,SAAS;QAC7B,qBAAqB,EAAE,SAAS;QAChC,6BAA6B,EAAE,SAAS;KACzC,CAAC;AACJ,CAAC,CAAC;AASF,8IAA8I;AAC9I,SAAS,cAAc;IACrB,IAAI,OAAO,SAAS,KAAK,WAAW,IAAI,CAAC,SAAS,EAAE;QAClD,OAAO,IAAI,CAAC;KACb;IAED,gCAAgC;IAChC,MAAM,eAAe,GAAG;QACtB,EAAE,GAAG,EAAE,MAAe,EAAE,OAAO,EAAE,sCAAsC,EAAE;QACzE,EAAE,GAAG,EAAE,IAAa,EAAE,OAAO,EAAE,sCAAsC,EAAE;QACvE,EAAE,GAAG,EAAE,IAAa,EAAE,OAAO,EAAE,4CAA4C,EAAE;QAC7E,EAAE,GAAG,EAAE,QAAiB,EAAE,OAAO,EAAE,wCAAwC,EAAE;QAC7E,EAAE,GAAG,EAAE,SAAkB,EAAE,OAAO,EAAE,yCAAyC,EAAE;QAC/E,EAAE,GAAG,EAAE,QAAiB,EAAE,OAAO,EAAE,mEAAmE,EAAE;KACzG,CAAC;IAEF,kCAAkC;IAClC,KAAK,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,IAAI,eAAe,EAAE;QAC9C,MAAM,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;QAChD,IAAI,KAAK,EAAE;YACT,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YAC5B,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YAC5B,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YAE5B,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,OAAO,EAAE,GAAG,KAAK,IAAI,KAAK,IAAI,KAAK,EAAE,EAAE,CAAC;SAChE;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED,MAAM,aAAa,GAAG,CAAC,IAAY,EAAQ,EAAE;IAC3C,aAAa;IACb,oDAAoD;IACpD,aAAa;IACb,mDAAmD;IACnD,IAAI,IAAI,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACjC,IAAI,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACtD,IAAI,IAAI,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACjC,IAAI,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,OAAO;QAAE,OAAO,OAAO,CAAC;IAC3D,IAAI,IAAI;QAAE,OAAO,SAAS,IAAI,EAAE,CAAC;IACjC,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AAEF,MAAM,iBAAiB,GAAG,CAAC,QAAgB,EAAgB,EAAE;IAC3D,kBAAkB;IAClB,wDAAwD;IACxD,kBAAkB;IAClB,mDAAmD;IACnD,kDAAkD;IAElD,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,oDAAoD;IACpD,yDAAyD;IACzD,iDAAiD;IACjD,8EAA8E;IAC9E,IAAI,QAAQ,CAAC,QAAQ,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAC3C,IAAI,QAAQ,KAAK,SAAS;QAAE,OAAO,SAAS,CAAC;IAC7C,IAAI,QAAQ,KAAK,QAAQ;QAAE,OAAO,OAAO,CAAC;IAC1C,IAAI,QAAQ,KAAK,OAAO;QAAE,OAAO,SAAS,CAAC;IAC3C,IAAI,QAAQ,KAAK,SAAS;QAAE,OAAO,SAAS,CAAC;IAC7C,IAAI,QAAQ,KAAK,SAAS;QAAE,OAAO,SAAS,CAAC;IAC7C,IAAI,QAAQ,KAAK,OAAO;QAAE,OAAO,OAAO,CAAC;IACzC,IAAI,QAAQ;QAAE,OAAO,SAAS,QAAQ,EAAE,CAAC;IACzC,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AAEF,IAAI,gBAAoC,CAAC;AACzC,MAAM,kBAAkB,GAAG,GAAG,EAAE;IAC9B,OAAO,CAAC,gBAAgB,KAAhB,gBAAgB,GAAK,qBAAqB,EAAE,EAAC,CAAC;AACxD,CAAC,CAAC;AAEK,MAAM,QAAQ,GAAG,CAAC,IAAY,EAAE,EAAE;IACvC,IAAI;QACF,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;KACzB;IAAC,OAAO,GAAG,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;AACH,CAAC,CAAC;AANW,QAAA,QAAQ,YAMnB;AAEF,iDAAiD;AACjD,MAAM,sBAAsB,GAAG,sBAAsB,CAAC;AACtD,MAAM,aAAa,GAAG,CAAC,GAAW,EAAW,EAAE;IAC7C,OAAO,sBAAsB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC1C,CAAC,CAAC;AAEK,MAAM,KAAK,GAAG,CAAC,EAAU,EAAE,EAAE,CAAC,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC;AAA1E,QAAA,KAAK,SAAqE;AAEvF,MAAM,uBAAuB,GAAG,CAAC,IAAY,EAAE,CAAU,EAAU,EAAE;IACnE,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACjD,MAAM,IAAI,mBAAW,CAAC,GAAG,IAAI,qBAAqB,CAAC,CAAC;KACrD;IACD,IAAI,CAAC,GAAG,CAAC,EAAE;QACT,MAAM,IAAI,mBAAW,CAAC,GAAG,IAAI,6BAA6B,CAAC,CAAC;KAC7D;IACD,OAAO,CAAC,CAAC;AACX,CAAC,CAAC;AAEK,MAAM,WAAW,GAAG,CAAC,GAAQ,EAAS,EAAE;IAC7C,IAAI,GAAG,YAAY,KAAK;QAAE,OAAO,GAAG,CAAC;IACrC,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK,IAAI,EAAE;QAC3C,IAAI;YACF,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC;SACvC;QAAC,MAAM,GAAE;KACX;IACD,OAAO,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC;AACxB,CAAC,CAAC;AARW,QAAA,WAAW,eAQtB;AAEK,MAAM,aAAa,GAAG,CAAI,KAA2B,EAAK,EAAE;IACjE,IAAI,KAAK,IAAI,IAAI;QAAE,MAAM,IAAI,mBAAW,CAAC,6CAA6C,KAAK,WAAW,CAAC,CAAC;IACxG,OAAO,KAAK,CAAC;AACf,CAAC,CAAC;AAHW,QAAA,aAAa,iBAGxB;AAEF;;;;;;GAMG;AACI,MAAM,OAAO,GAAG,CAAC,GAAW,EAAsB,EAAE;IACzD,IAAI,OAAO,OAAO,KAAK,WAAW,EAAE;QAClC,OAAO,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,IAAI,EAAE,IAAI,SAAS,CAAC;KAChD;IACD,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;QAC/B,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,IAAI,EAAE,CAAC;KACrC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AARW,QAAA,OAAO,WAQlB;AAEK,MAAM,aAAa,GAAG,CAAC,KAAc,EAAU,EAAE;IACtD,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACxD,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,QAAQ,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;IAE1D,MAAM,IAAI,mBAAW,CAAC,oBAAoB,KAAK,WAAW,OAAO,KAAK,iBAAiB,CAAC,CAAC;AAC3F,CAAC,CAAC;AALW,QAAA,aAAa,iBAKxB;AAEK,MAAM,WAAW,GAAG,CAAC,KAAc,EAAU,EAAE;IACpD,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,KAAK,CAAC;IAC5C,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,UAAU,CAAC,KAAK,CAAC,CAAC;IAExD,MAAM,IAAI,mBAAW,CAAC,oBAAoB,KAAK,WAAW,OAAO,KAAK,iBAAiB,CAAC,CAAC;AAC3F,CAAC,CAAC;AALW,QAAA,WAAW,eAKtB;AAEK,MAAM,aAAa,GAAG,CAAC,KAAc,EAAW,EAAE;IACvD,IAAI,OAAO,KAAK,KAAK,SAAS;QAAE,OAAO,KAAK,CAAC;IAC7C,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,KAAK,KAAK,MAAM,CAAC;IACvD,OAAO,OAAO,CAAC,KAAK,CAAC,CAAC;AACxB,CAAC,CAAC;AAJW,QAAA,aAAa,iBAIxB;AAEK,MAAM,kBAAkB,GAAG,CAAC,KAAc,EAAsB,EAAE;IACvE,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAA,qBAAa,EAAC,KAAK,CAAC,CAAC;AAC9B,CAAC,CAAC;AALW,QAAA,kBAAkB,sBAK7B;AAEK,MAAM,gBAAgB,GAAG,CAAC,KAAc,EAAsB,EAAE;IACrE,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAA,mBAAW,EAAC,KAAK,CAAC,CAAC;AAC5B,CAAC,CAAC;AALW,QAAA,gBAAgB,oBAK3B;AAEK,MAAM,kBAAkB,GAAG,CAAC,KAAc,EAAuB,EAAE;IACxE,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAA,qBAAa,EAAC,KAAK,CAAC,CAAC;AAC9B,CAAC,CAAC;AALW,QAAA,kBAAkB,sBAK7B;AAEF,uCAAuC;AACvC,SAAgB,UAAU,CAAC,GAA8B;IACvD,IAAI,CAAC,GAAG;QAAE,OAAO,IAAI,CAAC;IACtB,KAAK,MAAM,EAAE,IAAI,GAAG;QAAE,OAAO,KAAK,CAAC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC;AAJD,gCAIC;AAED,6DAA6D;AAC7D,SAAgB,MAAM,CAAC,GAAW,EAAE,GAAW;IAC7C,OAAO,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;AACxD,CAAC;AAFD,wBAEC;AAED;;;;;GAKG;AACH,SAAS,eAAe,CAAC,aAAsB,EAAE,UAAmB;IAClE,KAAK,MAAM,CAAC,IAAI,UAAU,EAAE;QAC1B,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC;YAAE,SAAS;QACrC,MAAM,QAAQ,GAAG,CAAC,CAAC,WAAW,EAAE,CAAC;QACjC,IAAI,CAAC,QAAQ;YAAE,SAAS;QAExB,MAAM,GAAG,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC;QAE1B,IAAI,GAAG,KAAK,IAAI,EAAE;YAChB,OAAO,aAAa,CAAC,QAAQ,CAAC,CAAC;SAChC;aAAM,IAAI,GAAG,KAAK,SAAS,EAAE;YAC5B,aAAa,CAAC,QAAQ,CAAC,GAAG,GAAG,CAAC;SAC/B;KACF;AACH,CAAC;AAED,SAAgB,KAAK,CAAC,MAAc,EAAE,GAAG,IAAW;IAClD,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,KAAK,MAAM,EAAE;QACxE,OAAO,CAAC,GAAG,CAAC,gBAAgB,MAAM,EAAE,EAAE,GAAG,IAAI,CAAC,CAAC;KAChD;AACH,CAAC;AAJD,sBAIC;AAED;;GAEG;AACH,MAAM,KAAK,GAAG,GAAG,EAAE;IACjB,OAAO,sCAAsC,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,EAAE;QACnE,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;QACnC,MAAM,CAAC,GAAG,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;QAC1C,OAAO,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IACxB,CAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEK,MAAM,kBAAkB,GAAG,GAAG,EAAE;IACrC,OAAO;IACL,aAAa;IACb,OAAO,MAAM,KAAK,WAAW;QAC7B,aAAa;QACb,OAAO,MAAM,CAAC,QAAQ,KAAK,WAAW;QACtC,aAAa;QACb,OAAO,SAAS,KAAK,WAAW,CACjC,CAAC;AACJ,CAAC,CAAC;AATW,QAAA,kBAAkB,sBAS7B;AAOK,MAAM,iBAAiB,GAAG,CAAC,OAAY,EAA8B,EAAE;IAC5E,OAAO,OAAO,OAAO,EAAE,GAAG,KAAK,UAAU,CAAC;AAC5C,CAAC,CAAC;AAFW,QAAA,iBAAiB,qBAE5B;AAEK,MAAM,iBAAiB,GAAG,CAAC,OAA8B,EAAE,MAAc,EAAU,EAAE;IAC1F,MAAM,WAAW,GAAG,IAAA,iBAAS,EAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC/C,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,IAAI,KAAK,CAAC,kBAAkB,MAAM,SAAS,CAAC,CAAC;KACpD;IACD,OAAO,WAAW,CAAC;AACrB,CAAC,CAAC;AANW,QAAA,iBAAiB,qBAM5B;AAEK,MAAM,SAAS,GAAG,CAAC,OAA8B,EAAE,MAAc,EAAsB,EAAE;IAC9F,MAAM,gBAAgB,GAAG,MAAM,CAAC,WAAW,EAAE,CAAC;IAC9C,IAAI,IAAA,yBAAiB,EAAC,OAAO,CAAC,EAAE;QAC9B,uEAAuE;QACvE,MAAM,eAAe,GACnB,MAAM,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE;YACxB,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,cAAc,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;QACrF,KAAK,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE,gBAAgB,EAAE,MAAM,CAAC,WAAW,EAAE,EAAE,eAAe,CAAC,EAAE;YACnF,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YAC/B,IAAI,KAAK,EAAE;gBACT,OAAO,KAAK,CAAC;aACd;SACF;KACF;IAED,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAClD,IAAI,GAAG,CAAC,WAAW,EAAE,KAAK,gBAAgB,EAAE;YAC1C,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;gBACxB,IAAI,KAAK,CAAC,MAAM,IAAI,CAAC;oBAAE,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;gBACvC,OAAO,CAAC,IAAI,CAAC,YAAY,KAAK,CAAC,MAAM,oBAAoB,MAAM,iCAAiC,CAAC,CAAC;gBAClG,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;aACjB;YACD,OAAO,KAAK,CAAC;SACd;KACF;IAED,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AA3BW,QAAA,SAAS,aA2BpB;AAEF;;GAEG;AACI,MAAM,QAAQ,GAAG,CAAC,GAA8B,EAAU,EAAE;IACjE,IAAI,CAAC,GAAG;QAAE,OAAO,EAAE,CAAC;IACpB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;QACjC,OAAO,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;KAC5C;IAED,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;QAC/B,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;KAClB;IAED,MAAM,IAAI,mBAAW,CAAC,uEAAuE,CAAC,CAAC;AACjG,CAAC,CAAC;AAXW,QAAA,QAAQ,YAWnB;AAEF,SAAgB,KAAK,CAAC,GAAY;IAChC,OAAO,GAAG,IAAI,IAAI,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACvE,CAAC;AAFD,sBAEC"} \ No newline at end of file diff --git a/node_modules/openai/core.mjs b/node_modules/openai/core.mjs new file mode 100644 index 0000000..62daf43 --- /dev/null +++ b/node_modules/openai/core.mjs @@ -0,0 +1,889 @@ +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _AbstractPage_client; +import { VERSION } from "./version.mjs"; +import { Stream } from "./streaming.mjs"; +import { OpenAIError, APIError, APIConnectionError, APIConnectionTimeoutError, APIUserAbortError, } from "./error.mjs"; +import { kind as shimsKind, getDefaultAgent, fetch, } from "./_shims/index.mjs"; +import { isBlobLike, isMultipartBody } from "./uploads.mjs"; +export { maybeMultipartFormRequestOptions, multipartFormRequestOptions, createForm, } from "./uploads.mjs"; +async function defaultParseResponse(props) { + const { response } = props; + if (props.options.stream) { + debug('response', response.status, response.url, response.headers, response.body); + // Note: there is an invariant here that isn't represented in the type system + // that if you set `stream: true` the response type must also be `Stream` + if (props.options.__streamClass) { + return props.options.__streamClass.fromSSEResponse(response, props.controller); + } + return Stream.fromSSEResponse(response, props.controller); + } + // fetch refuses to read the body when the status code is 204. + if (response.status === 204) { + return null; + } + if (props.options.__binaryResponse) { + return response; + } + const contentType = response.headers.get('content-type'); + const isJSON = contentType?.includes('application/json') || contentType?.includes('application/vnd.api+json'); + if (isJSON) { + const json = await response.json(); + debug('response', response.status, response.url, response.headers, json); + return _addRequestID(json, response); + } + const text = await response.text(); + debug('response', response.status, response.url, response.headers, text); + // TODO handle blob, arraybuffer, other content types, etc. + return text; +} +function _addRequestID(value, response) { + if (!value || typeof value !== 'object' || Array.isArray(value)) { + return value; + } + return Object.defineProperty(value, '_request_id', { + value: response.headers.get('x-request-id'), + enumerable: false, + }); +} +/** + * A subclass of `Promise` providing additional helper methods + * for interacting with the SDK. + */ +export class APIPromise extends Promise { + constructor(responsePromise, parseResponse = defaultParseResponse) { + super((resolve) => { + // this is maybe a bit weird but this has to be a no-op to not implicitly + // parse the response body; instead .then, .catch, .finally are overridden + // to parse the response + resolve(null); + }); + this.responsePromise = responsePromise; + this.parseResponse = parseResponse; + } + _thenUnwrap(transform) { + return new APIPromise(this.responsePromise, async (props) => _addRequestID(transform(await this.parseResponse(props), props), props.response)); + } + /** + * Gets the raw `Response` instance instead of parsing the response + * data. + * + * If you want to parse the response body but still get the `Response` + * instance, you can use {@link withResponse()}. + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + asResponse() { + return this.responsePromise.then((p) => p.response); + } + /** + * Gets the parsed response data, the raw `Response` instance and the ID of the request, + * returned via the X-Request-ID header which is useful for debugging requests and reporting + * issues to OpenAI. + * + * If you just want to get the raw `Response` instance without parsing it, + * you can use {@link asResponse()}. + * + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + async withResponse() { + const [data, response] = await Promise.all([this.parse(), this.asResponse()]); + return { data, response, request_id: response.headers.get('x-request-id') }; + } + parse() { + if (!this.parsedPromise) { + this.parsedPromise = this.responsePromise.then(this.parseResponse); + } + return this.parsedPromise; + } + then(onfulfilled, onrejected) { + return this.parse().then(onfulfilled, onrejected); + } + catch(onrejected) { + return this.parse().catch(onrejected); + } + finally(onfinally) { + return this.parse().finally(onfinally); + } +} +export class APIClient { + constructor({ baseURL, maxRetries = 2, timeout = 600000, // 10 minutes + httpAgent, fetch: overriddenFetch, }) { + this.baseURL = baseURL; + this.maxRetries = validatePositiveInteger('maxRetries', maxRetries); + this.timeout = validatePositiveInteger('timeout', timeout); + this.httpAgent = httpAgent; + this.fetch = overriddenFetch ?? fetch; + } + authHeaders(opts) { + return {}; + } + /** + * Override this to add your own default headers, for example: + * + * { + * ...super.defaultHeaders(), + * Authorization: 'Bearer 123', + * } + */ + defaultHeaders(opts) { + return { + Accept: 'application/json', + 'Content-Type': 'application/json', + 'User-Agent': this.getUserAgent(), + ...getPlatformHeaders(), + ...this.authHeaders(opts), + }; + } + /** + * Override this to add your own headers validation: + */ + validateHeaders(headers, customHeaders) { } + defaultIdempotencyKey() { + return `stainless-node-retry-${uuid4()}`; + } + get(path, opts) { + return this.methodRequest('get', path, opts); + } + post(path, opts) { + return this.methodRequest('post', path, opts); + } + patch(path, opts) { + return this.methodRequest('patch', path, opts); + } + put(path, opts) { + return this.methodRequest('put', path, opts); + } + delete(path, opts) { + return this.methodRequest('delete', path, opts); + } + methodRequest(method, path, opts) { + return this.request(Promise.resolve(opts).then(async (opts) => { + const body = opts && isBlobLike(opts?.body) ? new DataView(await opts.body.arrayBuffer()) + : opts?.body instanceof DataView ? opts.body + : opts?.body instanceof ArrayBuffer ? new DataView(opts.body) + : opts && ArrayBuffer.isView(opts?.body) ? new DataView(opts.body.buffer) + : opts?.body; + return { method, path, ...opts, body }; + })); + } + getAPIList(path, Page, opts) { + return this.requestAPIList(Page, { method: 'get', path, ...opts }); + } + calculateContentLength(body) { + if (typeof body === 'string') { + if (typeof Buffer !== 'undefined') { + return Buffer.byteLength(body, 'utf8').toString(); + } + if (typeof TextEncoder !== 'undefined') { + const encoder = new TextEncoder(); + const encoded = encoder.encode(body); + return encoded.length.toString(); + } + } + else if (ArrayBuffer.isView(body)) { + return body.byteLength.toString(); + } + return null; + } + buildRequest(options, { retryCount = 0 } = {}) { + const { method, path, query, headers: headers = {} } = options; + const body = ArrayBuffer.isView(options.body) || (options.__binaryRequest && typeof options.body === 'string') ? + options.body + : isMultipartBody(options.body) ? options.body.body + : options.body ? JSON.stringify(options.body, null, 2) + : null; + const contentLength = this.calculateContentLength(body); + const url = this.buildURL(path, query); + if ('timeout' in options) + validatePositiveInteger('timeout', options.timeout); + const timeout = options.timeout ?? this.timeout; + const httpAgent = options.httpAgent ?? this.httpAgent ?? getDefaultAgent(url); + const minAgentTimeout = timeout + 1000; + if (typeof httpAgent?.options?.timeout === 'number' && + minAgentTimeout > (httpAgent.options.timeout ?? 0)) { + // Allow any given request to bump our agent active socket timeout. + // This may seem strange, but leaking active sockets should be rare and not particularly problematic, + // and without mutating agent we would need to create more of them. + // This tradeoff optimizes for performance. + httpAgent.options.timeout = minAgentTimeout; + } + if (this.idempotencyHeader && method !== 'get') { + if (!options.idempotencyKey) + options.idempotencyKey = this.defaultIdempotencyKey(); + headers[this.idempotencyHeader] = options.idempotencyKey; + } + const reqHeaders = this.buildHeaders({ options, headers, contentLength, retryCount }); + const req = { + method, + ...(body && { body: body }), + headers: reqHeaders, + ...(httpAgent && { agent: httpAgent }), + // @ts-ignore node-fetch uses a custom AbortSignal type that is + // not compatible with standard web types + signal: options.signal ?? null, + }; + return { req, url, timeout }; + } + buildHeaders({ options, headers, contentLength, retryCount, }) { + const reqHeaders = {}; + if (contentLength) { + reqHeaders['content-length'] = contentLength; + } + const defaultHeaders = this.defaultHeaders(options); + applyHeadersMut(reqHeaders, defaultHeaders); + applyHeadersMut(reqHeaders, headers); + // let builtin fetch set the Content-Type for multipart bodies + if (isMultipartBody(options.body) && shimsKind !== 'node') { + delete reqHeaders['content-type']; + } + // Don't set the retry count header if it was already set or removed through default headers or by the + // caller. We check `defaultHeaders` and `headers`, which can contain nulls, instead of `reqHeaders` to + // account for the removal case. + if (getHeader(defaultHeaders, 'x-stainless-retry-count') === undefined && + getHeader(headers, 'x-stainless-retry-count') === undefined) { + reqHeaders['x-stainless-retry-count'] = String(retryCount); + } + this.validateHeaders(reqHeaders, headers); + return reqHeaders; + } + /** + * Used as a callback for mutating the given `FinalRequestOptions` object. + */ + async prepareOptions(options) { } + /** + * Used as a callback for mutating the given `RequestInit` object. + * + * This is useful for cases where you want to add certain headers based off of + * the request properties, e.g. `method` or `url`. + */ + async prepareRequest(request, { url, options }) { } + parseHeaders(headers) { + return (!headers ? {} + : Symbol.iterator in headers ? + Object.fromEntries(Array.from(headers).map((header) => [...header])) + : { ...headers }); + } + makeStatusError(status, error, message, headers) { + return APIError.generate(status, error, message, headers); + } + request(options, remainingRetries = null) { + return new APIPromise(this.makeRequest(options, remainingRetries)); + } + async makeRequest(optionsInput, retriesRemaining) { + const options = await optionsInput; + const maxRetries = options.maxRetries ?? this.maxRetries; + if (retriesRemaining == null) { + retriesRemaining = maxRetries; + } + await this.prepareOptions(options); + const { req, url, timeout } = this.buildRequest(options, { retryCount: maxRetries - retriesRemaining }); + await this.prepareRequest(req, { url, options }); + debug('request', url, options, req.headers); + if (options.signal?.aborted) { + throw new APIUserAbortError(); + } + const controller = new AbortController(); + const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(castToError); + if (response instanceof Error) { + if (options.signal?.aborted) { + throw new APIUserAbortError(); + } + if (retriesRemaining) { + return this.retryRequest(options, retriesRemaining); + } + if (response.name === 'AbortError') { + throw new APIConnectionTimeoutError(); + } + throw new APIConnectionError({ cause: response }); + } + const responseHeaders = createResponseHeaders(response.headers); + if (!response.ok) { + if (retriesRemaining && this.shouldRetry(response)) { + const retryMessage = `retrying, ${retriesRemaining} attempts remaining`; + debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders); + return this.retryRequest(options, retriesRemaining, responseHeaders); + } + const errText = await response.text().catch((e) => castToError(e).message); + const errJSON = safeJSON(errText); + const errMessage = errJSON ? undefined : errText; + const retryMessage = retriesRemaining ? `(error; no more retries left)` : `(error; not retryable)`; + debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders, errMessage); + const err = this.makeStatusError(response.status, errJSON, errMessage, responseHeaders); + throw err; + } + return { response, options, controller }; + } + requestAPIList(Page, options) { + const request = this.makeRequest(options, null); + return new PagePromise(this, request, Page); + } + buildURL(path, query) { + const url = isAbsoluteURL(path) ? + new URL(path) + : new URL(this.baseURL + (this.baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path)); + const defaultQuery = this.defaultQuery(); + if (!isEmptyObj(defaultQuery)) { + query = { ...defaultQuery, ...query }; + } + if (typeof query === 'object' && query && !Array.isArray(query)) { + url.search = this.stringifyQuery(query); + } + return url.toString(); + } + stringifyQuery(query) { + return Object.entries(query) + .filter(([_, value]) => typeof value !== 'undefined') + .map(([key, value]) => { + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`; + } + if (value === null) { + return `${encodeURIComponent(key)}=`; + } + throw new OpenAIError(`Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`); + }) + .join('&'); + } + async fetchWithTimeout(url, init, ms, controller) { + const { signal, ...options } = init || {}; + if (signal) + signal.addEventListener('abort', () => controller.abort()); + const timeout = setTimeout(() => controller.abort(), ms); + return ( + // use undefined this binding; fetch errors if bound to something else in browser/cloudflare + this.fetch.call(undefined, url, { signal: controller.signal, ...options }).finally(() => { + clearTimeout(timeout); + })); + } + shouldRetry(response) { + // Note this is not a standard header. + const shouldRetryHeader = response.headers.get('x-should-retry'); + // If the server explicitly says whether or not to retry, obey. + if (shouldRetryHeader === 'true') + return true; + if (shouldRetryHeader === 'false') + return false; + // Retry on request timeouts. + if (response.status === 408) + return true; + // Retry on lock timeouts. + if (response.status === 409) + return true; + // Retry on rate limits. + if (response.status === 429) + return true; + // Retry internal errors. + if (response.status >= 500) + return true; + return false; + } + async retryRequest(options, retriesRemaining, responseHeaders) { + let timeoutMillis; + // Note the `retry-after-ms` header may not be standard, but is a good idea and we'd like proactive support for it. + const retryAfterMillisHeader = responseHeaders?.['retry-after-ms']; + if (retryAfterMillisHeader) { + const timeoutMs = parseFloat(retryAfterMillisHeader); + if (!Number.isNaN(timeoutMs)) { + timeoutMillis = timeoutMs; + } + } + // About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + const retryAfterHeader = responseHeaders?.['retry-after']; + if (retryAfterHeader && !timeoutMillis) { + const timeoutSeconds = parseFloat(retryAfterHeader); + if (!Number.isNaN(timeoutSeconds)) { + timeoutMillis = timeoutSeconds * 1000; + } + else { + timeoutMillis = Date.parse(retryAfterHeader) - Date.now(); + } + } + // If the API asks us to wait a certain amount of time (and it's a reasonable amount), + // just do what it says, but otherwise calculate a default + if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1000)) { + const maxRetries = options.maxRetries ?? this.maxRetries; + timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries); + } + await sleep(timeoutMillis); + return this.makeRequest(options, retriesRemaining - 1); + } + calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) { + const initialRetryDelay = 0.5; + const maxRetryDelay = 8.0; + const numRetries = maxRetries - retriesRemaining; + // Apply exponential backoff, but not more than the max. + const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay); + // Apply some jitter, take up to at most 25 percent of the retry time. + const jitter = 1 - Math.random() * 0.25; + return sleepSeconds * jitter * 1000; + } + getUserAgent() { + return `${this.constructor.name}/JS ${VERSION}`; + } +} +export class AbstractPage { + constructor(client, response, body, options) { + _AbstractPage_client.set(this, void 0); + __classPrivateFieldSet(this, _AbstractPage_client, client, "f"); + this.options = options; + this.response = response; + this.body = body; + } + hasNextPage() { + const items = this.getPaginatedItems(); + if (!items.length) + return false; + return this.nextPageInfo() != null; + } + async getNextPage() { + const nextInfo = this.nextPageInfo(); + if (!nextInfo) { + throw new OpenAIError('No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.'); + } + const nextOptions = { ...this.options }; + if ('params' in nextInfo && typeof nextOptions.query === 'object') { + nextOptions.query = { ...nextOptions.query, ...nextInfo.params }; + } + else if ('url' in nextInfo) { + const params = [...Object.entries(nextOptions.query || {}), ...nextInfo.url.searchParams.entries()]; + for (const [key, value] of params) { + nextInfo.url.searchParams.set(key, value); + } + nextOptions.query = undefined; + nextOptions.path = nextInfo.url.toString(); + } + return await __classPrivateFieldGet(this, _AbstractPage_client, "f").requestAPIList(this.constructor, nextOptions); + } + async *iterPages() { + // eslint-disable-next-line @typescript-eslint/no-this-alias + let page = this; + yield page; + while (page.hasNextPage()) { + page = await page.getNextPage(); + yield page; + } + } + async *[(_AbstractPage_client = new WeakMap(), Symbol.asyncIterator)]() { + for await (const page of this.iterPages()) { + for (const item of page.getPaginatedItems()) { + yield item; + } + } + } +} +/** + * This subclass of Promise will resolve to an instantiated Page once the request completes. + * + * It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ +export class PagePromise extends APIPromise { + constructor(client, request, Page) { + super(request, async (props) => new Page(client, props.response, await defaultParseResponse(props), props.options)); + } + /** + * Allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ + async *[Symbol.asyncIterator]() { + const page = await this; + for await (const item of page) { + yield item; + } + } +} +export const createResponseHeaders = (headers) => { + return new Proxy(Object.fromEntries( + // @ts-ignore + headers.entries()), { + get(target, name) { + const key = name.toString(); + return target[key.toLowerCase()] || target[key]; + }, + }); +}; +// This is required so that we can determine if a given object matches the RequestOptions +// type at runtime. While this requires duplication, it is enforced by the TypeScript +// compiler such that any missing / extraneous keys will cause an error. +const requestOptionsKeys = { + method: true, + path: true, + query: true, + body: true, + headers: true, + maxRetries: true, + stream: true, + timeout: true, + httpAgent: true, + signal: true, + idempotencyKey: true, + __binaryRequest: true, + __binaryResponse: true, + __streamClass: true, +}; +export const isRequestOptions = (obj) => { + return (typeof obj === 'object' && + obj !== null && + !isEmptyObj(obj) && + Object.keys(obj).every((k) => hasOwn(requestOptionsKeys, k))); +}; +const getPlatformProperties = () => { + if (typeof Deno !== 'undefined' && Deno.build != null) { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': normalizePlatform(Deno.build.os), + 'X-Stainless-Arch': normalizeArch(Deno.build.arch), + 'X-Stainless-Runtime': 'deno', + 'X-Stainless-Runtime-Version': typeof Deno.version === 'string' ? Deno.version : Deno.version?.deno ?? 'unknown', + }; + } + if (typeof EdgeRuntime !== 'undefined') { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': `other:${EdgeRuntime}`, + 'X-Stainless-Runtime': 'edge', + 'X-Stainless-Runtime-Version': process.version, + }; + } + // Check if Node.js + if (Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]') { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': normalizePlatform(process.platform), + 'X-Stainless-Arch': normalizeArch(process.arch), + 'X-Stainless-Runtime': 'node', + 'X-Stainless-Runtime-Version': process.version, + }; + } + const browserInfo = getBrowserInfo(); + if (browserInfo) { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': 'unknown', + 'X-Stainless-Runtime': `browser:${browserInfo.browser}`, + 'X-Stainless-Runtime-Version': browserInfo.version, + }; + } + // TODO add support for Cloudflare workers, etc. + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': 'unknown', + 'X-Stainless-Runtime': 'unknown', + 'X-Stainless-Runtime-Version': 'unknown', + }; +}; +// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts +function getBrowserInfo() { + if (typeof navigator === 'undefined' || !navigator) { + return null; + } + // NOTE: The order matters here! + const browserPatterns = [ + { key: 'edge', pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'ie', pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'ie', pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'chrome', pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'firefox', pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'safari', pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ }, + ]; + // Find the FIRST matching browser + for (const { key, pattern } of browserPatterns) { + const match = pattern.exec(navigator.userAgent); + if (match) { + const major = match[1] || 0; + const minor = match[2] || 0; + const patch = match[3] || 0; + return { browser: key, version: `${major}.${minor}.${patch}` }; + } + } + return null; +} +const normalizeArch = (arch) => { + // Node docs: + // - https://nodejs.org/api/process.html#processarch + // Deno docs: + // - https://doc.deno.land/deno/stable/~/Deno.build + if (arch === 'x32') + return 'x32'; + if (arch === 'x86_64' || arch === 'x64') + return 'x64'; + if (arch === 'arm') + return 'arm'; + if (arch === 'aarch64' || arch === 'arm64') + return 'arm64'; + if (arch) + return `other:${arch}`; + return 'unknown'; +}; +const normalizePlatform = (platform) => { + // Node platforms: + // - https://nodejs.org/api/process.html#processplatform + // Deno platforms: + // - https://doc.deno.land/deno/stable/~/Deno.build + // - https://github.com/denoland/deno/issues/14799 + platform = platform.toLowerCase(); + // NOTE: this iOS check is untested and may not work + // Node does not work natively on IOS, there is a fork at + // https://github.com/nodejs-mobile/nodejs-mobile + // however it is unknown at the time of writing how to detect if it is running + if (platform.includes('ios')) + return 'iOS'; + if (platform === 'android') + return 'Android'; + if (platform === 'darwin') + return 'MacOS'; + if (platform === 'win32') + return 'Windows'; + if (platform === 'freebsd') + return 'FreeBSD'; + if (platform === 'openbsd') + return 'OpenBSD'; + if (platform === 'linux') + return 'Linux'; + if (platform) + return `Other:${platform}`; + return 'Unknown'; +}; +let _platformHeaders; +const getPlatformHeaders = () => { + return (_platformHeaders ?? (_platformHeaders = getPlatformProperties())); +}; +export const safeJSON = (text) => { + try { + return JSON.parse(text); + } + catch (err) { + return undefined; + } +}; +// https://url.spec.whatwg.org/#url-scheme-string +const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i; +const isAbsoluteURL = (url) => { + return startsWithSchemeRegexp.test(url); +}; +export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); +const validatePositiveInteger = (name, n) => { + if (typeof n !== 'number' || !Number.isInteger(n)) { + throw new OpenAIError(`${name} must be an integer`); + } + if (n < 0) { + throw new OpenAIError(`${name} must be a positive integer`); + } + return n; +}; +export const castToError = (err) => { + if (err instanceof Error) + return err; + if (typeof err === 'object' && err !== null) { + try { + return new Error(JSON.stringify(err)); + } + catch { } + } + return new Error(err); +}; +export const ensurePresent = (value) => { + if (value == null) + throw new OpenAIError(`Expected a value to be given but received ${value} instead.`); + return value; +}; +/** + * Read an environment variable. + * + * Trims beginning and trailing whitespace. + * + * Will return undefined if the environment variable doesn't exist or cannot be accessed. + */ +export const readEnv = (env) => { + if (typeof process !== 'undefined') { + return process.env?.[env]?.trim() ?? undefined; + } + if (typeof Deno !== 'undefined') { + return Deno.env?.get?.(env)?.trim(); + } + return undefined; +}; +export const coerceInteger = (value) => { + if (typeof value === 'number') + return Math.round(value); + if (typeof value === 'string') + return parseInt(value, 10); + throw new OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); +}; +export const coerceFloat = (value) => { + if (typeof value === 'number') + return value; + if (typeof value === 'string') + return parseFloat(value); + throw new OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); +}; +export const coerceBoolean = (value) => { + if (typeof value === 'boolean') + return value; + if (typeof value === 'string') + return value === 'true'; + return Boolean(value); +}; +export const maybeCoerceInteger = (value) => { + if (value === undefined) { + return undefined; + } + return coerceInteger(value); +}; +export const maybeCoerceFloat = (value) => { + if (value === undefined) { + return undefined; + } + return coerceFloat(value); +}; +export const maybeCoerceBoolean = (value) => { + if (value === undefined) { + return undefined; + } + return coerceBoolean(value); +}; +// https://stackoverflow.com/a/34491287 +export function isEmptyObj(obj) { + if (!obj) + return true; + for (const _k in obj) + return false; + return true; +} +// https://eslint.org/docs/latest/rules/no-prototype-builtins +export function hasOwn(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} +/** + * Copies headers from "newHeaders" onto "targetHeaders", + * using lower-case for all properties, + * ignoring any keys with undefined values, + * and deleting any keys with null values. + */ +function applyHeadersMut(targetHeaders, newHeaders) { + for (const k in newHeaders) { + if (!hasOwn(newHeaders, k)) + continue; + const lowerKey = k.toLowerCase(); + if (!lowerKey) + continue; + const val = newHeaders[k]; + if (val === null) { + delete targetHeaders[lowerKey]; + } + else if (val !== undefined) { + targetHeaders[lowerKey] = val; + } + } +} +export function debug(action, ...args) { + if (typeof process !== 'undefined' && process?.env?.['DEBUG'] === 'true') { + console.log(`OpenAI:DEBUG:${action}`, ...args); + } +} +/** + * https://stackoverflow.com/a/2117523 + */ +const uuid4 = () => { + return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => { + const r = (Math.random() * 16) | 0; + const v = c === 'x' ? r : (r & 0x3) | 0x8; + return v.toString(16); + }); +}; +export const isRunningInBrowser = () => { + return ( + // @ts-ignore + typeof window !== 'undefined' && + // @ts-ignore + typeof window.document !== 'undefined' && + // @ts-ignore + typeof navigator !== 'undefined'); +}; +export const isHeadersProtocol = (headers) => { + return typeof headers?.get === 'function'; +}; +export const getRequiredHeader = (headers, header) => { + const foundHeader = getHeader(headers, header); + if (foundHeader === undefined) { + throw new Error(`Could not find ${header} header`); + } + return foundHeader; +}; +export const getHeader = (headers, header) => { + const lowerCasedHeader = header.toLowerCase(); + if (isHeadersProtocol(headers)) { + // to deal with the case where the header looks like Stainless-Event-Id + const intercapsHeader = header[0]?.toUpperCase() + + header.substring(1).replace(/([^\w])(\w)/g, (_m, g1, g2) => g1 + g2.toUpperCase()); + for (const key of [header, lowerCasedHeader, header.toUpperCase(), intercapsHeader]) { + const value = headers.get(key); + if (value) { + return value; + } + } + } + for (const [key, value] of Object.entries(headers)) { + if (key.toLowerCase() === lowerCasedHeader) { + if (Array.isArray(value)) { + if (value.length <= 1) + return value[0]; + console.warn(`Received ${value.length} entries for the ${header} header, using the first entry.`); + return value[0]; + } + return value; + } + } + return undefined; +}; +/** + * Encodes a string to Base64 format. + */ +export const toBase64 = (str) => { + if (!str) + return ''; + if (typeof Buffer !== 'undefined') { + return Buffer.from(str).toString('base64'); + } + if (typeof btoa !== 'undefined') { + return btoa(str); + } + throw new OpenAIError('Cannot generate b64 string; Expected `Buffer` or `btoa` to be defined'); +}; +export function isObj(obj) { + return obj != null && typeof obj === 'object' && !Array.isArray(obj); +} +//# sourceMappingURL=core.mjs.map \ No newline at end of file diff --git a/node_modules/openai/core.mjs.map b/node_modules/openai/core.mjs.map new file mode 100644 index 0000000..e9761a0 --- /dev/null +++ b/node_modules/openai/core.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"core.mjs","sourceRoot":"","sources":["src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;OAAO,EAAE,OAAO,EAAE;OACX,EAAE,MAAM,EAAE;OACV,EACL,WAAW,EACX,QAAQ,EACR,kBAAkB,EAClB,yBAAyB,EACzB,iBAAiB,GAClB;OACM,EACL,IAAI,IAAI,SAAS,EAEjB,eAAe,EAEf,KAAK,GAKN;OAEM,EAAY,UAAU,EAAE,eAAe,EAAE;OACzC,EACL,gCAAgC,EAChC,2BAA2B,EAC3B,UAAU,GAEX;AAYD,KAAK,UAAU,oBAAoB,CAAI,KAAuB;IAC5D,MAAM,EAAE,QAAQ,EAAE,GAAG,KAAK,CAAC;IAC3B,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,EAAE;QACxB,KAAK,CAAC,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC;QAElF,6EAA6E;QAC7E,4EAA4E;QAE5E,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,EAAE;YAC/B,OAAO,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,eAAe,CAAC,QAAQ,EAAE,KAAK,CAAC,UAAU,CAAQ,CAAC;SACvF;QAED,OAAO,MAAM,CAAC,eAAe,CAAC,QAAQ,EAAE,KAAK,CAAC,UAAU,CAAQ,CAAC;KAClE;IAED,8DAA8D;IAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,OAAO,IAAwB,CAAC;KACjC;IAED,IAAI,KAAK,CAAC,OAAO,CAAC,gBAAgB,EAAE;QAClC,OAAO,QAAuC,CAAC;KAChD;IAED,MAAM,WAAW,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;IACzD,MAAM,MAAM,GACV,WAAW,EAAE,QAAQ,CAAC,kBAAkB,CAAC,IAAI,WAAW,EAAE,QAAQ,CAAC,0BAA0B,CAAC,CAAC;IACjG,IAAI,MAAM,EAAE;QACV,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;QAEnC,KAAK,CAAC,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAEzE,OAAO,aAAa,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;KACtC;IAED,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;IACnC,KAAK,CAAC,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;IAEzE,2DAA2D;IAC3D,OAAO,IAAmC,CAAC;AAC7C,CAAC;AAOD,SAAS,aAAa,CAAI,KAAQ,EAAE,QAAkB;IACpD,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC/D,OAAO,KAAyB,CAAC;KAClC;IAED,OAAO,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,aAAa,EAAE;QACjD,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;QAC3C,UAAU,EAAE,KAAK;KAClB,CAAqB,CAAC;AACzB,CAAC;AAED;;;GAGG;AACH,MAAM,OAAO,UAAc,SAAQ,OAAyB;IAG1D,YACU,eAA0C,EAC1C,gBAEgC,oBAAoB;QAE5D,KAAK,CAAC,CAAC,OAAO,EAAE,EAAE;YAChB,yEAAyE;YACzE,0EAA0E;YAC1E,wBAAwB;YACxB,OAAO,CAAC,IAAW,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAVK,oBAAe,GAAf,eAAe,CAA2B;QAC1C,kBAAa,GAAb,aAAa,CAEuC;IAQ9D,CAAC;IAED,WAAW,CAAI,SAAkD;QAC/D,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,eAAe,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,CAC1D,aAAa,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,EAAE,KAAK,CAAC,EAAE,KAAK,CAAC,QAAQ,CAAC,CACjF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;OAYG;IACH,UAAU;QACR,OAAO,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,KAAK,CAAC,YAAY;QAChB,MAAM,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC;QAC9E,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,UAAU,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE,CAAC;IAC9E,CAAC;IAEO,KAAK;QACX,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YACvB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,CAAqC,CAAC;SACxG;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAEQ,IAAI,CACX,WAAgG,EAChG,UAAmF;QAEnF,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,WAAW,EAAE,UAAU,CAAC,CAAC;IACpD,CAAC;IAEQ,KAAK,CACZ,UAAiF;QAEjF,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;IACxC,CAAC;IAEQ,OAAO,CAAC,SAA2C;QAC1D,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACzC,CAAC;CACF;AAED,MAAM,OAAgB,SAAS;IAS7B,YAAY,EACV,OAAO,EACP,UAAU,GAAG,CAAC,EACd,OAAO,GAAG,MAAM,EAAE,aAAa;IAC/B,SAAS,EACT,KAAK,EAAE,eAAe,GAOvB;QACC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,UAAU,GAAG,uBAAuB,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;QACpE,IAAI,CAAC,OAAO,GAAG,uBAAuB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;QAC3D,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,CAAC,KAAK,GAAG,eAAe,IAAI,KAAK,CAAC;IACxC,CAAC;IAES,WAAW,CAAC,IAAyB;QAC7C,OAAO,EAAE,CAAC;IACZ,CAAC;IAED;;;;;;;OAOG;IACO,cAAc,CAAC,IAAyB;QAChD,OAAO;YACL,MAAM,EAAE,kBAAkB;YAC1B,cAAc,EAAE,kBAAkB;YAClC,YAAY,EAAE,IAAI,CAAC,YAAY,EAAE;YACjC,GAAG,kBAAkB,EAAE;YACvB,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;SAC1B,CAAC;IACJ,CAAC;IAID;;OAEG;IACO,eAAe,CAAC,OAAgB,EAAE,aAAsB,IAAG,CAAC;IAE5D,qBAAqB;QAC7B,OAAO,wBAAwB,KAAK,EAAE,EAAE,CAAC;IAC3C,CAAC;IAED,GAAG,CAAW,IAAY,EAAE,IAA0C;QACpE,OAAO,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAC/C,CAAC;IAED,IAAI,CAAW,IAAY,EAAE,IAA0C;QACrE,OAAO,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAChD,CAAC;IAED,KAAK,CAAW,IAAY,EAAE,IAA0C;QACtE,OAAO,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAED,GAAG,CAAW,IAAY,EAAE,IAA0C;QACpE,OAAO,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAC/C,CAAC;IAED,MAAM,CAAW,IAAY,EAAE,IAA0C;QACvE,OAAO,IAAI,CAAC,aAAa,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEO,aAAa,CACnB,MAAkB,EAClB,IAAY,EACZ,IAA0C;QAE1C,OAAO,IAAI,CAAC,OAAO,CACjB,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE;YACxC,MAAM,IAAI,GACR,IAAI,IAAI,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,QAAQ,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC;gBAC5E,CAAC,CAAC,IAAI,EAAE,IAAI,YAAY,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI;oBAC5C,CAAC,CAAC,IAAI,EAAE,IAAI,YAAY,WAAW,CAAC,CAAC,CAAC,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC;wBAC7D,CAAC,CAAC,IAAI,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC;4BACzE,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC;YACf,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,IAAI,EAAE,IAAI,EAAE,CAAC;QACzC,CAAC,CAAC,CACH,CAAC;IACJ,CAAC;IAED,UAAU,CACR,IAAY,EACZ,IAAuC,EACvC,IAA0B;QAE1B,OAAO,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,IAAI,EAAE,CAAC,CAAC;IACrE,CAAC;IAEO,sBAAsB,CAAC,IAAa;QAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;gBACjC,OAAO,MAAM,CAAC,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC;aACnD;YAED,IAAI,OAAO,WAAW,KAAK,WAAW,EAAE;gBACtC,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;gBAClC,MAAM,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;gBACrC,OAAO,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;aAClC;SACF;aAAM,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;YACnC,OAAO,IAAI,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED,YAAY,CACV,OAAiC,EACjC,EAAE,UAAU,GAAG,CAAC,KAA8B,EAAE;QAEhD,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,EAAE,GAAG,OAAO,CAAC;QAE/D,MAAM,IAAI,GACR,WAAW,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,eAAe,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;YACjG,OAAO,CAAC,IAAI;YACd,CAAC,CAAC,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI;gBACnD,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;oBACtD,CAAC,CAAC,IAAI,CAAC;QACT,MAAM,aAAa,GAAG,IAAI,CAAC,sBAAsB,CAAC,IAAI,CAAC,CAAC;QAExD,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAK,EAAE,KAAK,CAAC,CAAC;QACxC,IAAI,SAAS,IAAI,OAAO;YAAE,uBAAuB,CAAC,SAAS,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;QAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;QAChD,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS,IAAI,eAAe,CAAC,GAAG,CAAC,CAAC;QAC9E,MAAM,eAAe,GAAG,OAAO,GAAG,IAAI,CAAC;QACvC,IACE,OAAQ,SAAiB,EAAE,OAAO,EAAE,OAAO,KAAK,QAAQ;YACxD,eAAe,GAAG,CAAE,SAAiB,CAAC,OAAO,CAAC,OAAO,IAAI,CAAC,CAAC,EAC3D;YACA,mEAAmE;YACnE,qGAAqG;YACrG,mEAAmE;YACnE,2CAA2C;YAC1C,SAAiB,CAAC,OAAO,CAAC,OAAO,GAAG,eAAe,CAAC;SACtD;QAED,IAAI,IAAI,CAAC,iBAAiB,IAAI,MAAM,KAAK,KAAK,EAAE;YAC9C,IAAI,CAAC,OAAO,CAAC,cAAc;gBAAE,OAAO,CAAC,cAAc,GAAG,IAAI,CAAC,qBAAqB,EAAE,CAAC;YACnF,OAAO,CAAC,IAAI,CAAC,iBAAiB,CAAC,GAAG,OAAO,CAAC,cAAc,CAAC;SAC1D;QAED,MAAM,UAAU,GAAG,IAAI,CAAC,YAAY,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,CAAC,CAAC;QAEtF,MAAM,GAAG,GAAgB;YACvB,MAAM;YACN,GAAG,CAAC,IAAI,IAAI,EAAE,IAAI,EAAE,IAAW,EAAE,CAAC;YAClC,OAAO,EAAE,UAAU;YACnB,GAAG,CAAC,SAAS,IAAI,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC;YACtC,+DAA+D;YAC/D,yCAAyC;YACzC,MAAM,EAAE,OAAO,CAAC,MAAM,IAAI,IAAI;SAC/B,CAAC;QAEF,OAAO,EAAE,GAAG,EAAE,GAAG,EAAE,OAAO,EAAE,CAAC;IAC/B,CAAC;IAEO,YAAY,CAAC,EACnB,OAAO,EACP,OAAO,EACP,aAAa,EACb,UAAU,GAMX;QACC,MAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,IAAI,aAAa,EAAE;YACjB,UAAU,CAAC,gBAAgB,CAAC,GAAG,aAAa,CAAC;SAC9C;QAED,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;QACpD,eAAe,CAAC,UAAU,EAAE,cAAc,CAAC,CAAC;QAC5C,eAAe,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAErC,8DAA8D;QAC9D,IAAI,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,SAAS,KAAK,MAAM,EAAE;YACzD,OAAO,UAAU,CAAC,cAAc,CAAC,CAAC;SACnC;QAED,sGAAsG;QACtG,uGAAuG;QACvG,gCAAgC;QAChC,IACE,SAAS,CAAC,cAAc,EAAE,yBAAyB,CAAC,KAAK,SAAS;YAClE,SAAS,CAAC,OAAO,EAAE,yBAAyB,CAAC,KAAK,SAAS,EAC3D;YACA,UAAU,CAAC,yBAAyB,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC;SAC5D;QAED,IAAI,CAAC,eAAe,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAE1C,OAAO,UAAU,CAAC;IACpB,CAAC;IAED;;OAEG;IACO,KAAK,CAAC,cAAc,CAAC,OAA4B,IAAkB,CAAC;IAE9E;;;;;OAKG;IACO,KAAK,CAAC,cAAc,CAC5B,OAAoB,EACpB,EAAE,GAAG,EAAE,OAAO,EAAiD,IAC/C,CAAC;IAET,YAAY,CAAC,OAAuC;QAC5D,OAAO,CACL,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACb,CAAC,CAAC,MAAM,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC;gBAC5B,MAAM,CAAC,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,OAA6B,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC5F,CAAC,CAAC,EAAE,GAAG,OAAO,EAAE,CACjB,CAAC;IACJ,CAAC;IAES,eAAe,CACvB,MAA0B,EAC1B,KAAyB,EACzB,OAA2B,EAC3B,OAA4B;QAE5B,OAAO,QAAQ,CAAC,QAAQ,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC5D,CAAC;IAED,OAAO,CACL,OAAiD,EACjD,mBAAkC,IAAI;QAEtC,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;IACrE,CAAC;IAEO,KAAK,CAAC,WAAW,CACvB,YAAsD,EACtD,gBAA+B;QAE/B,MAAM,OAAO,GAAG,MAAM,YAAY,CAAC;QACnC,MAAM,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,IAAI,CAAC,UAAU,CAAC;QACzD,IAAI,gBAAgB,IAAI,IAAI,EAAE;YAC5B,gBAAgB,GAAG,UAAU,CAAC;SAC/B;QAED,MAAM,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;QAEnC,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,EAAE,UAAU,EAAE,UAAU,GAAG,gBAAgB,EAAE,CAAC,CAAC;QAExG,MAAM,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,EAAE,GAAG,EAAE,OAAO,EAAE,CAAC,CAAC;QAEjD,KAAK,CAAC,SAAS,EAAE,GAAG,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;QAE5C,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE;YAC3B,MAAM,IAAI,iBAAiB,EAAE,CAAC;SAC/B;QAED,MAAM,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC;QACzC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,GAAG,EAAE,GAAG,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;QAE/F,IAAI,QAAQ,YAAY,KAAK,EAAE;YAC7B,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE;gBAC3B,MAAM,IAAI,iBAAiB,EAAE,CAAC;aAC/B;YACD,IAAI,gBAAgB,EAAE;gBACpB,OAAO,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,gBAAgB,CAAC,CAAC;aACrD;YACD,IAAI,QAAQ,CAAC,IAAI,KAAK,YAAY,EAAE;gBAClC,MAAM,IAAI,yBAAyB,EAAE,CAAC;aACvC;YACD,MAAM,IAAI,kBAAkB,CAAC,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC,CAAC;SACnD;QAED,MAAM,eAAe,GAAG,qBAAqB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAEhE,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YAChB,IAAI,gBAAgB,IAAI,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,EAAE;gBAClD,MAAM,YAAY,GAAG,aAAa,gBAAgB,qBAAqB,CAAC;gBACxE,KAAK,CAAC,oBAAoB,YAAY,GAAG,EAAE,QAAQ,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,CAAC;gBAClF,OAAO,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,gBAAgB,EAAE,eAAe,CAAC,CAAC;aACtE;YAED,MAAM,OAAO,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;YAC3E,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;YAClC,MAAM,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YACjD,MAAM,YAAY,GAAG,gBAAgB,CAAC,CAAC,CAAC,+BAA+B,CAAC,CAAC,CAAC,wBAAwB,CAAC;YAEnG,KAAK,CAAC,oBAAoB,YAAY,GAAG,EAAE,QAAQ,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,EAAE,UAAU,CAAC,CAAC;YAE9F,MAAM,GAAG,GAAG,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,EAAE,UAAU,EAAE,eAAe,CAAC,CAAC;YACxF,MAAM,GAAG,CAAC;SACX;QAED,OAAO,EAAE,QAAQ,EAAE,OAAO,EAAE,UAAU,EAAE,CAAC;IAC3C,CAAC;IAED,cAAc,CACZ,IAA4E,EAC5E,OAA4B;QAE5B,MAAM,OAAO,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAChD,OAAO,IAAI,WAAW,CAAkB,IAAI,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;IAC/D,CAAC;IAED,QAAQ,CAAM,IAAY,EAAE,KAA6B;QACvD,MAAM,GAAG,GACP,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;YACnB,IAAI,GAAG,CAAC,IAAI,CAAC;YACf,CAAC,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;QAExG,MAAM,YAAY,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QACzC,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;YAC7B,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,KAAK,EAAS,CAAC;SAC9C;QAED,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;YAC/D,GAAG,CAAC,MAAM,GAAG,IAAI,CAAC,cAAc,CAAC,KAAgC,CAAC,CAAC;SACpE;QAED,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;IACxB,CAAC;IAES,cAAc,CAAC,KAA8B;QACrD,OAAO,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC;aACzB,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,OAAO,KAAK,KAAK,WAAW,CAAC;aACpD,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE;YACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBACxF,OAAO,GAAG,kBAAkB,CAAC,GAAG,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC;aAClE;YACD,IAAI,KAAK,KAAK,IAAI,EAAE;gBAClB,OAAO,GAAG,kBAAkB,CAAC,GAAG,CAAC,GAAG,CAAC;aACtC;YACD,MAAM,IAAI,WAAW,CACnB,yBAAyB,OAAO,KAAK,mQAAmQ,CACzS,CAAC;QACJ,CAAC,CAAC;aACD,IAAI,CAAC,GAAG,CAAC,CAAC;IACf,CAAC;IAED,KAAK,CAAC,gBAAgB,CACpB,GAAgB,EAChB,IAA6B,EAC7B,EAAU,EACV,UAA2B;QAE3B,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,IAAI,IAAI,EAAE,CAAC;QAC1C,IAAI,MAAM;YAAE,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;QAEvE,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QAEzD,OAAO;QACL,4FAA4F;QAC5F,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,GAAG,EAAE,EAAE,MAAM,EAAE,UAAU,CAAC,MAAa,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE;YAC7F,YAAY,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC,CAAC,CACH,CAAC;IACJ,CAAC;IAEO,WAAW,CAAC,QAAkB;QACpC,sCAAsC;QACtC,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;QAEjE,+DAA+D;QAC/D,IAAI,iBAAiB,KAAK,MAAM;YAAE,OAAO,IAAI,CAAC;QAC9C,IAAI,iBAAiB,KAAK,OAAO;YAAE,OAAO,KAAK,CAAC;QAEhD,6BAA6B;QAC7B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG;YAAE,OAAO,IAAI,CAAC;QAEzC,0BAA0B;QAC1B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG;YAAE,OAAO,IAAI,CAAC;QAEzC,wBAAwB;QACxB,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG;YAAE,OAAO,IAAI,CAAC;QAEzC,yBAAyB;QACzB,IAAI,QAAQ,CAAC,MAAM,IAAI,GAAG;YAAE,OAAO,IAAI,CAAC;QAExC,OAAO,KAAK,CAAC;IACf,CAAC;IAEO,KAAK,CAAC,YAAY,CACxB,OAA4B,EAC5B,gBAAwB,EACxB,eAAqC;QAErC,IAAI,aAAiC,CAAC;QAEtC,mHAAmH;QACnH,MAAM,sBAAsB,GAAG,eAAe,EAAE,CAAC,gBAAgB,CAAC,CAAC;QACnE,IAAI,sBAAsB,EAAE;YAC1B,MAAM,SAAS,GAAG,UAAU,CAAC,sBAAsB,CAAC,CAAC;YACrD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE;gBAC5B,aAAa,GAAG,SAAS,CAAC;aAC3B;SACF;QAED,sGAAsG;QACtG,MAAM,gBAAgB,GAAG,eAAe,EAAE,CAAC,aAAa,CAAC,CAAC;QAC1D,IAAI,gBAAgB,IAAI,CAAC,aAAa,EAAE;YACtC,MAAM,cAAc,GAAG,UAAU,CAAC,gBAAgB,CAAC,CAAC;YACpD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,cAAc,CAAC,EAAE;gBACjC,aAAa,GAAG,cAAc,GAAG,IAAI,CAAC;aACvC;iBAAM;gBACL,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;aAC3D;SACF;QAED,sFAAsF;QACtF,0DAA0D;QAC1D,IAAI,CAAC,CAAC,aAAa,IAAI,CAAC,IAAI,aAAa,IAAI,aAAa,GAAG,EAAE,GAAG,IAAI,CAAC,EAAE;YACvE,MAAM,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,IAAI,CAAC,UAAU,CAAC;YACzD,aAAa,GAAG,IAAI,CAAC,kCAAkC,CAAC,gBAAgB,EAAE,UAAU,CAAC,CAAC;SACvF;QACD,MAAM,KAAK,CAAC,aAAa,CAAC,CAAC;QAE3B,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,gBAAgB,GAAG,CAAC,CAAC,CAAC;IACzD,CAAC;IAEO,kCAAkC,CAAC,gBAAwB,EAAE,UAAkB;QACrF,MAAM,iBAAiB,GAAG,GAAG,CAAC;QAC9B,MAAM,aAAa,GAAG,GAAG,CAAC;QAE1B,MAAM,UAAU,GAAG,UAAU,GAAG,gBAAgB,CAAC;QAEjD,wDAAwD;QACxD,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,UAAU,CAAC,EAAE,aAAa,CAAC,CAAC;QAE1F,sEAAsE;QACtE,MAAM,MAAM,GAAG,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC;QAExC,OAAO,YAAY,GAAG,MAAM,GAAG,IAAI,CAAC;IACtC,CAAC;IAEO,YAAY;QAClB,OAAO,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,OAAO,OAAO,EAAE,CAAC;IAClD,CAAC;CACF;AAID,MAAM,OAAgB,YAAY;IAOhC,YAAY,MAAiB,EAAE,QAAkB,EAAE,IAAa,EAAE,OAA4B;QAN9F,uCAAmB;QAOjB,uBAAA,IAAI,wBAAW,MAAM,MAAA,CAAC;QACtB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;IACnB,CAAC;IAUD,WAAW;QACT,MAAM,KAAK,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;QACvC,IAAI,CAAC,KAAK,CAAC,MAAM;YAAE,OAAO,KAAK,CAAC;QAChC,OAAO,IAAI,CAAC,YAAY,EAAE,IAAI,IAAI,CAAC;IACrC,CAAC;IAED,KAAK,CAAC,WAAW;QACf,MAAM,QAAQ,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QACrC,IAAI,CAAC,QAAQ,EAAE;YACb,MAAM,IAAI,WAAW,CACnB,uFAAuF,CACxF,CAAC;SACH;QACD,MAAM,WAAW,GAAG,EAAE,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;QACxC,IAAI,QAAQ,IAAI,QAAQ,IAAI,OAAO,WAAW,CAAC,KAAK,KAAK,QAAQ,EAAE;YACjE,WAAW,CAAC,KAAK,GAAG,EAAE,GAAG,WAAW,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC;SAClE;aAAM,IAAI,KAAK,IAAI,QAAQ,EAAE;YAC5B,MAAM,MAAM,GAAG,CAAC,GAAG,MAAM,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE,CAAC,EAAE,GAAG,QAAQ,CAAC,GAAG,CAAC,YAAY,CAAC,OAAO,EAAE,CAAC,CAAC;YACpG,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,EAAE;gBACjC,QAAQ,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,KAAY,CAAC,CAAC;aAClD;YACD,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;YAC9B,WAAW,CAAC,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC;SAC5C;QACD,OAAO,MAAM,uBAAA,IAAI,4BAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,WAAkB,EAAE,WAAW,CAAC,CAAC;IACjF,CAAC;IAED,KAAK,CAAC,CAAC,SAAS;QACd,4DAA4D;QAC5D,IAAI,IAAI,GAAS,IAAI,CAAC;QACtB,MAAM,IAAI,CAAC;QACX,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE;YACzB,IAAI,GAAG,MAAM,IAAI,CAAC,WAAW,EAAE,CAAC;YAChC,MAAM,IAAI,CAAC;SACZ;IACH,CAAC;IAED,KAAK,CAAC,CAAC,wCAAC,MAAM,CAAC,aAAa,EAAC;QAC3B,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YACzC,KAAK,MAAM,IAAI,IAAI,IAAI,CAAC,iBAAiB,EAAE,EAAE;gBAC3C,MAAM,IAAI,CAAC;aACZ;SACF;IACH,CAAC;CACF;AAED;;;;;;;;GAQG;AACH,MAAM,OAAO,WAIX,SAAQ,UAAqB;IAG7B,YACE,MAAiB,EACjB,OAAkC,EAClC,IAA4E;QAE5E,KAAK,CACH,OAAO,EACP,KAAK,EAAE,KAAK,EAAE,EAAE,CACd,IAAI,IAAI,CACN,MAAM,EACN,KAAK,CAAC,QAAQ,EACd,MAAM,oBAAoB,CAAC,KAAK,CAAC,EACjC,KAAK,CAAC,OAAO,CACc,CAChC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC;QAC3B,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC;QACxB,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,IAAI,EAAE;YAC7B,MAAM,IAAI,CAAC;SACZ;IACH,CAAC;CACF;AAED,MAAM,CAAC,MAAM,qBAAqB,GAAG,CACnC,OAA8C,EACtB,EAAE;IAC1B,OAAO,IAAI,KAAK,CACd,MAAM,CAAC,WAAW;IAChB,aAAa;IACb,OAAO,CAAC,OAAO,EAAE,CAClB,EACD;QACE,GAAG,CAAC,MAAM,EAAE,IAAI;YACd,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,OAAO,MAAM,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC;QAClD,CAAC;KACF,CACF,CAAC;AACJ,CAAC,CAAC;AA8BF,yFAAyF;AACzF,qFAAqF;AACrF,wEAAwE;AACxE,MAAM,kBAAkB,GAA6B;IACnD,MAAM,EAAE,IAAI;IACZ,IAAI,EAAE,IAAI;IACV,KAAK,EAAE,IAAI;IACX,IAAI,EAAE,IAAI;IACV,OAAO,EAAE,IAAI;IAEb,UAAU,EAAE,IAAI;IAChB,MAAM,EAAE,IAAI;IACZ,OAAO,EAAE,IAAI;IACb,SAAS,EAAE,IAAI;IACf,MAAM,EAAE,IAAI;IACZ,cAAc,EAAE,IAAI;IAEpB,eAAe,EAAE,IAAI;IACrB,gBAAgB,EAAE,IAAI;IACtB,aAAa,EAAE,IAAI;CACpB,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,GAAY,EAAyB,EAAE;IACtE,OAAO,CACL,OAAO,GAAG,KAAK,QAAQ;QACvB,GAAG,KAAK,IAAI;QACZ,CAAC,UAAU,CAAC,GAAG,CAAC;QAChB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAC,CAC7D,CAAC;AACJ,CAAC,CAAC;AA8BF,MAAM,qBAAqB,GAAG,GAAuB,EAAE;IACrD,IAAI,OAAO,IAAI,KAAK,WAAW,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,EAAE;QACrD,OAAO;YACL,kBAAkB,EAAE,IAAI;YACxB,6BAA6B,EAAE,OAAO;YACtC,gBAAgB,EAAE,iBAAiB,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;YAClD,kBAAkB,EAAE,aAAa,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC;YAClD,qBAAqB,EAAE,MAAM;YAC7B,6BAA6B,EAC3B,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,IAAI,SAAS;SACpF,CAAC;KACH;IACD,IAAI,OAAO,WAAW,KAAK,WAAW,EAAE;QACtC,OAAO;YACL,kBAAkB,EAAE,IAAI;YACxB,6BAA6B,EAAE,OAAO;YACtC,gBAAgB,EAAE,SAAS;YAC3B,kBAAkB,EAAE,SAAS,WAAW,EAAE;YAC1C,qBAAqB,EAAE,MAAM;YAC7B,6BAA6B,EAAE,OAAO,CAAC,OAAO;SAC/C,CAAC;KACH;IACD,mBAAmB;IACnB,IAAI,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,kBAAkB,EAAE;QACvG,OAAO;YACL,kBAAkB,EAAE,IAAI;YACxB,6BAA6B,EAAE,OAAO;YACtC,gBAAgB,EAAE,iBAAiB,CAAC,OAAO,CAAC,QAAQ,CAAC;YACrD,kBAAkB,EAAE,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC;YAC/C,qBAAqB,EAAE,MAAM;YAC7B,6BAA6B,EAAE,OAAO,CAAC,OAAO;SAC/C,CAAC;KACH;IAED,MAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,IAAI,WAAW,EAAE;QACf,OAAO;YACL,kBAAkB,EAAE,IAAI;YACxB,6BAA6B,EAAE,OAAO;YACtC,gBAAgB,EAAE,SAAS;YAC3B,kBAAkB,EAAE,SAAS;YAC7B,qBAAqB,EAAE,WAAW,WAAW,CAAC,OAAO,EAAE;YACvD,6BAA6B,EAAE,WAAW,CAAC,OAAO;SACnD,CAAC;KACH;IAED,gDAAgD;IAChD,OAAO;QACL,kBAAkB,EAAE,IAAI;QACxB,6BAA6B,EAAE,OAAO;QACtC,gBAAgB,EAAE,SAAS;QAC3B,kBAAkB,EAAE,SAAS;QAC7B,qBAAqB,EAAE,SAAS;QAChC,6BAA6B,EAAE,SAAS;KACzC,CAAC;AACJ,CAAC,CAAC;AASF,8IAA8I;AAC9I,SAAS,cAAc;IACrB,IAAI,OAAO,SAAS,KAAK,WAAW,IAAI,CAAC,SAAS,EAAE;QAClD,OAAO,IAAI,CAAC;KACb;IAED,gCAAgC;IAChC,MAAM,eAAe,GAAG;QACtB,EAAE,GAAG,EAAE,MAAe,EAAE,OAAO,EAAE,sCAAsC,EAAE;QACzE,EAAE,GAAG,EAAE,IAAa,EAAE,OAAO,EAAE,sCAAsC,EAAE;QACvE,EAAE,GAAG,EAAE,IAAa,EAAE,OAAO,EAAE,4CAA4C,EAAE;QAC7E,EAAE,GAAG,EAAE,QAAiB,EAAE,OAAO,EAAE,wCAAwC,EAAE;QAC7E,EAAE,GAAG,EAAE,SAAkB,EAAE,OAAO,EAAE,yCAAyC,EAAE;QAC/E,EAAE,GAAG,EAAE,QAAiB,EAAE,OAAO,EAAE,mEAAmE,EAAE;KACzG,CAAC;IAEF,kCAAkC;IAClC,KAAK,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,IAAI,eAAe,EAAE;QAC9C,MAAM,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;QAChD,IAAI,KAAK,EAAE;YACT,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YAC5B,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YAC5B,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YAE5B,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,OAAO,EAAE,GAAG,KAAK,IAAI,KAAK,IAAI,KAAK,EAAE,EAAE,CAAC;SAChE;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED,MAAM,aAAa,GAAG,CAAC,IAAY,EAAQ,EAAE;IAC3C,aAAa;IACb,oDAAoD;IACpD,aAAa;IACb,mDAAmD;IACnD,IAAI,IAAI,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACjC,IAAI,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACtD,IAAI,IAAI,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACjC,IAAI,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,OAAO;QAAE,OAAO,OAAO,CAAC;IAC3D,IAAI,IAAI;QAAE,OAAO,SAAS,IAAI,EAAE,CAAC;IACjC,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AAEF,MAAM,iBAAiB,GAAG,CAAC,QAAgB,EAAgB,EAAE;IAC3D,kBAAkB;IAClB,wDAAwD;IACxD,kBAAkB;IAClB,mDAAmD;IACnD,kDAAkD;IAElD,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,oDAAoD;IACpD,yDAAyD;IACzD,iDAAiD;IACjD,8EAA8E;IAC9E,IAAI,QAAQ,CAAC,QAAQ,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAC3C,IAAI,QAAQ,KAAK,SAAS;QAAE,OAAO,SAAS,CAAC;IAC7C,IAAI,QAAQ,KAAK,QAAQ;QAAE,OAAO,OAAO,CAAC;IAC1C,IAAI,QAAQ,KAAK,OAAO;QAAE,OAAO,SAAS,CAAC;IAC3C,IAAI,QAAQ,KAAK,SAAS;QAAE,OAAO,SAAS,CAAC;IAC7C,IAAI,QAAQ,KAAK,SAAS;QAAE,OAAO,SAAS,CAAC;IAC7C,IAAI,QAAQ,KAAK,OAAO;QAAE,OAAO,OAAO,CAAC;IACzC,IAAI,QAAQ;QAAE,OAAO,SAAS,QAAQ,EAAE,CAAC;IACzC,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AAEF,IAAI,gBAAoC,CAAC;AACzC,MAAM,kBAAkB,GAAG,GAAG,EAAE;IAC9B,OAAO,CAAC,gBAAgB,KAAhB,gBAAgB,GAAK,qBAAqB,EAAE,EAAC,CAAC;AACxD,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAG,CAAC,IAAY,EAAE,EAAE;IACvC,IAAI;QACF,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;KACzB;IAAC,OAAO,GAAG,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;AACH,CAAC,CAAC;AAEF,iDAAiD;AACjD,MAAM,sBAAsB,GAAG,sBAAsB,CAAC;AACtD,MAAM,aAAa,GAAG,CAAC,GAAW,EAAW,EAAE;IAC7C,OAAO,sBAAsB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC1C,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,EAAU,EAAE,EAAE,CAAC,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC;AAEvF,MAAM,uBAAuB,GAAG,CAAC,IAAY,EAAE,CAAU,EAAU,EAAE;IACnE,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;QACjD,MAAM,IAAI,WAAW,CAAC,GAAG,IAAI,qBAAqB,CAAC,CAAC;KACrD;IACD,IAAI,CAAC,GAAG,CAAC,EAAE;QACT,MAAM,IAAI,WAAW,CAAC,GAAG,IAAI,6BAA6B,CAAC,CAAC;KAC7D;IACD,OAAO,CAAC,CAAC;AACX,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAG,CAAC,GAAQ,EAAS,EAAE;IAC7C,IAAI,GAAG,YAAY,KAAK;QAAE,OAAO,GAAG,CAAC;IACrC,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK,IAAI,EAAE;QAC3C,IAAI;YACF,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC;SACvC;QAAC,MAAM,GAAE;KACX;IACD,OAAO,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC;AACxB,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAG,CAAI,KAA2B,EAAK,EAAE;IACjE,IAAI,KAAK,IAAI,IAAI;QAAE,MAAM,IAAI,WAAW,CAAC,6CAA6C,KAAK,WAAW,CAAC,CAAC;IACxG,OAAO,KAAK,CAAC;AACf,CAAC,CAAC;AAEF;;;;;;GAMG;AACH,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,GAAW,EAAsB,EAAE;IACzD,IAAI,OAAO,OAAO,KAAK,WAAW,EAAE;QAClC,OAAO,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,IAAI,EAAE,IAAI,SAAS,CAAC;KAChD;IACD,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;QAC/B,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,IAAI,EAAE,CAAC;KACrC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,KAAc,EAAU,EAAE;IACtD,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACxD,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,QAAQ,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;IAE1D,MAAM,IAAI,WAAW,CAAC,oBAAoB,KAAK,WAAW,OAAO,KAAK,iBAAiB,CAAC,CAAC;AAC3F,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAG,CAAC,KAAc,EAAU,EAAE;IACpD,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,KAAK,CAAC;IAC5C,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,UAAU,CAAC,KAAK,CAAC,CAAC;IAExD,MAAM,IAAI,WAAW,CAAC,oBAAoB,KAAK,WAAW,OAAO,KAAK,iBAAiB,CAAC,CAAC;AAC3F,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,KAAc,EAAW,EAAE;IACvD,IAAI,OAAO,KAAK,KAAK,SAAS;QAAE,OAAO,KAAK,CAAC;IAC7C,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,KAAK,KAAK,MAAM,CAAC;IACvD,OAAO,OAAO,CAAC,KAAK,CAAC,CAAC;AACxB,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAAC,KAAc,EAAsB,EAAE;IACvE,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,aAAa,CAAC,KAAK,CAAC,CAAC;AAC9B,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,KAAc,EAAsB,EAAE;IACrE,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,WAAW,CAAC,KAAK,CAAC,CAAC;AAC5B,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAAC,KAAc,EAAuB,EAAE;IACxE,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,aAAa,CAAC,KAAK,CAAC,CAAC;AAC9B,CAAC,CAAC;AAEF,uCAAuC;AACvC,MAAM,UAAU,UAAU,CAAC,GAA8B;IACvD,IAAI,CAAC,GAAG;QAAE,OAAO,IAAI,CAAC;IACtB,KAAK,MAAM,EAAE,IAAI,GAAG;QAAE,OAAO,KAAK,CAAC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC;AAED,6DAA6D;AAC7D,MAAM,UAAU,MAAM,CAAC,GAAW,EAAE,GAAW;IAC7C,OAAO,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;AACxD,CAAC;AAED;;;;;GAKG;AACH,SAAS,eAAe,CAAC,aAAsB,EAAE,UAAmB;IAClE,KAAK,MAAM,CAAC,IAAI,UAAU,EAAE;QAC1B,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC;YAAE,SAAS;QACrC,MAAM,QAAQ,GAAG,CAAC,CAAC,WAAW,EAAE,CAAC;QACjC,IAAI,CAAC,QAAQ;YAAE,SAAS;QAExB,MAAM,GAAG,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC;QAE1B,IAAI,GAAG,KAAK,IAAI,EAAE;YAChB,OAAO,aAAa,CAAC,QAAQ,CAAC,CAAC;SAChC;aAAM,IAAI,GAAG,KAAK,SAAS,EAAE;YAC5B,aAAa,CAAC,QAAQ,CAAC,GAAG,GAAG,CAAC;SAC/B;KACF;AACH,CAAC;AAED,MAAM,UAAU,KAAK,CAAC,MAAc,EAAE,GAAG,IAAW;IAClD,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,KAAK,MAAM,EAAE;QACxE,OAAO,CAAC,GAAG,CAAC,gBAAgB,MAAM,EAAE,EAAE,GAAG,IAAI,CAAC,CAAC;KAChD;AACH,CAAC;AAED;;GAEG;AACH,MAAM,KAAK,GAAG,GAAG,EAAE;IACjB,OAAO,sCAAsC,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,EAAE;QACnE,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;QACnC,MAAM,CAAC,GAAG,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;QAC1C,OAAO,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IACxB,CAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAG,GAAG,EAAE;IACrC,OAAO;IACL,aAAa;IACb,OAAO,MAAM,KAAK,WAAW;QAC7B,aAAa;QACb,OAAO,MAAM,CAAC,QAAQ,KAAK,WAAW;QACtC,aAAa;QACb,OAAO,SAAS,KAAK,WAAW,CACjC,CAAC;AACJ,CAAC,CAAC;AAOF,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,OAAY,EAA8B,EAAE;IAC5E,OAAO,OAAO,OAAO,EAAE,GAAG,KAAK,UAAU,CAAC;AAC5C,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,OAA8B,EAAE,MAAc,EAAU,EAAE;IAC1F,MAAM,WAAW,GAAG,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC/C,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,IAAI,KAAK,CAAC,kBAAkB,MAAM,SAAS,CAAC,CAAC;KACpD;IACD,OAAO,WAAW,CAAC;AACrB,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,OAA8B,EAAE,MAAc,EAAsB,EAAE;IAC9F,MAAM,gBAAgB,GAAG,MAAM,CAAC,WAAW,EAAE,CAAC;IAC9C,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,uEAAuE;QACvE,MAAM,eAAe,GACnB,MAAM,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE;YACxB,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,cAAc,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;QACrF,KAAK,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE,gBAAgB,EAAE,MAAM,CAAC,WAAW,EAAE,EAAE,eAAe,CAAC,EAAE;YACnF,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YAC/B,IAAI,KAAK,EAAE;gBACT,OAAO,KAAK,CAAC;aACd;SACF;KACF;IAED,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAClD,IAAI,GAAG,CAAC,WAAW,EAAE,KAAK,gBAAgB,EAAE;YAC1C,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;gBACxB,IAAI,KAAK,CAAC,MAAM,IAAI,CAAC;oBAAE,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;gBACvC,OAAO,CAAC,IAAI,CAAC,YAAY,KAAK,CAAC,MAAM,oBAAoB,MAAM,iCAAiC,CAAC,CAAC;gBAClG,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;aACjB;YACD,OAAO,KAAK,CAAC;SACd;KACF;IAED,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,QAAQ,GAAG,CAAC,GAA8B,EAAU,EAAE;IACjE,IAAI,CAAC,GAAG;QAAE,OAAO,EAAE,CAAC;IACpB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;QACjC,OAAO,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;KAC5C;IAED,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;QAC/B,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;KAClB;IAED,MAAM,IAAI,WAAW,CAAC,uEAAuE,CAAC,CAAC;AACjG,CAAC,CAAC;AAEF,MAAM,UAAU,KAAK,CAAC,GAAY;IAChC,OAAO,GAAG,IAAI,IAAI,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACvE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/error.d.ts b/node_modules/openai/error.d.ts new file mode 100644 index 0000000..28ebcf3 --- /dev/null +++ b/node_modules/openai/error.d.ts @@ -0,0 +1,57 @@ +import { Headers } from "./core.js"; +export declare class OpenAIError extends Error { +} +export declare class APIError extends OpenAIError { + /** HTTP status for the response that caused the error */ + readonly status: TStatus; + /** HTTP headers for the response that caused the error */ + readonly headers: THeaders; + /** JSON body of the response that caused the error */ + readonly error: TError; + readonly code: string | null | undefined; + readonly param: string | null | undefined; + readonly type: string | undefined; + readonly request_id: string | null | undefined; + constructor(status: TStatus, error: TError, message: string | undefined, headers: THeaders); + private static makeMessage; + static generate(status: number | undefined, errorResponse: Object | undefined, message: string | undefined, headers: Headers | undefined): APIError; +} +export declare class APIUserAbortError extends APIError { + constructor({ message }?: { + message?: string; + }); +} +export declare class APIConnectionError extends APIError { + constructor({ message, cause }: { + message?: string | undefined; + cause?: Error | undefined; + }); +} +export declare class APIConnectionTimeoutError extends APIConnectionError { + constructor({ message }?: { + message?: string; + }); +} +export declare class BadRequestError extends APIError<400, Headers> { +} +export declare class AuthenticationError extends APIError<401, Headers> { +} +export declare class PermissionDeniedError extends APIError<403, Headers> { +} +export declare class NotFoundError extends APIError<404, Headers> { +} +export declare class ConflictError extends APIError<409, Headers> { +} +export declare class UnprocessableEntityError extends APIError<422, Headers> { +} +export declare class RateLimitError extends APIError<429, Headers> { +} +export declare class InternalServerError extends APIError { +} +export declare class LengthFinishReasonError extends OpenAIError { + constructor(); +} +export declare class ContentFilterFinishReasonError extends OpenAIError { + constructor(); +} +//# sourceMappingURL=error.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/error.d.ts.map b/node_modules/openai/error.d.ts.map new file mode 100644 index 0000000..e75bd9d --- /dev/null +++ b/node_modules/openai/error.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"error.d.ts","sourceRoot":"","sources":["src/error.ts"],"names":[],"mappings":"AAEA,OAAO,EAAe,OAAO,EAAE,MAAM,QAAQ,CAAC;AAE9C,qBAAa,WAAY,SAAQ,KAAK;CAAG;AAEzC,qBAAa,QAAQ,CACnB,OAAO,SAAS,MAAM,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS,EACvD,QAAQ,SAAS,OAAO,GAAG,SAAS,GAAG,OAAO,GAAG,SAAS,EAC1D,MAAM,SAAS,MAAM,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS,CACtD,SAAQ,WAAW;IACnB,yDAAyD;IACzD,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC;IACzB,0DAA0D;IAC1D,QAAQ,CAAC,OAAO,EAAE,QAAQ,CAAC;IAC3B,sDAAsD;IACtD,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IAEvB,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACzC,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAC1C,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC;IAElC,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;gBAEnC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,SAAS,EAAE,OAAO,EAAE,QAAQ;IAa1F,OAAO,CAAC,MAAM,CAAC,WAAW;IAqB1B,MAAM,CAAC,QAAQ,CACb,MAAM,EAAE,MAAM,GAAG,SAAS,EAC1B,aAAa,EAAE,MAAM,GAAG,SAAS,EACjC,OAAO,EAAE,MAAM,GAAG,SAAS,EAC3B,OAAO,EAAE,OAAO,GAAG,SAAS,GAC3B,QAAQ;CAyCZ;AAED,qBAAa,iBAAkB,SAAQ,QAAQ,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,CAAC;gBAClE,EAAE,OAAO,EAAE,GAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAO;CAGnD;AAED,qBAAa,kBAAmB,SAAQ,QAAQ,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,CAAC;gBACnE,EAAE,OAAO,EAAE,KAAK,EAAE,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QAAC,KAAK,CAAC,EAAE,KAAK,GAAG,SAAS,CAAA;KAAE;CAM5F;AAED,qBAAa,yBAA0B,SAAQ,kBAAkB;gBACnD,EAAE,OAAO,EAAE,GAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAO;CAGnD;AAED,qBAAa,eAAgB,SAAQ,QAAQ,CAAC,GAAG,EAAE,OAAO,CAAC;CAAG;AAE9D,qBAAa,mBAAoB,SAAQ,QAAQ,CAAC,GAAG,EAAE,OAAO,CAAC;CAAG;AAElE,qBAAa,qBAAsB,SAAQ,QAAQ,CAAC,GAAG,EAAE,OAAO,CAAC;CAAG;AAEpE,qBAAa,aAAc,SAAQ,QAAQ,CAAC,GAAG,EAAE,OAAO,CAAC;CAAG;AAE5D,qBAAa,aAAc,SAAQ,QAAQ,CAAC,GAAG,EAAE,OAAO,CAAC;CAAG;AAE5D,qBAAa,wBAAyB,SAAQ,QAAQ,CAAC,GAAG,EAAE,OAAO,CAAC;CAAG;AAEvE,qBAAa,cAAe,SAAQ,QAAQ,CAAC,GAAG,EAAE,OAAO,CAAC;CAAG;AAE7D,qBAAa,mBAAoB,SAAQ,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC;CAAG;AAErE,qBAAa,uBAAwB,SAAQ,WAAW;;CAIvD;AAED,qBAAa,8BAA+B,SAAQ,WAAW;;CAI9D"} \ No newline at end of file diff --git a/node_modules/openai/error.js b/node_modules/openai/error.js new file mode 100644 index 0000000..d08cb5c --- /dev/null +++ b/node_modules/openai/error.js @@ -0,0 +1,130 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ContentFilterFinishReasonError = exports.LengthFinishReasonError = exports.InternalServerError = exports.RateLimitError = exports.UnprocessableEntityError = exports.ConflictError = exports.NotFoundError = exports.PermissionDeniedError = exports.AuthenticationError = exports.BadRequestError = exports.APIConnectionTimeoutError = exports.APIConnectionError = exports.APIUserAbortError = exports.APIError = exports.OpenAIError = void 0; +const core_1 = require("./core.js"); +class OpenAIError extends Error { +} +exports.OpenAIError = OpenAIError; +class APIError extends OpenAIError { + constructor(status, error, message, headers) { + super(`${APIError.makeMessage(status, error, message)}`); + this.status = status; + this.headers = headers; + this.request_id = headers?.['x-request-id']; + this.error = error; + const data = error; + this.code = data?.['code']; + this.param = data?.['param']; + this.type = data?.['type']; + } + static makeMessage(status, error, message) { + const msg = error?.message ? + typeof error.message === 'string' ? + error.message + : JSON.stringify(error.message) + : error ? JSON.stringify(error) + : message; + if (status && msg) { + return `${status} ${msg}`; + } + if (status) { + return `${status} status code (no body)`; + } + if (msg) { + return msg; + } + return '(no status code or body)'; + } + static generate(status, errorResponse, message, headers) { + if (!status || !headers) { + return new APIConnectionError({ message, cause: (0, core_1.castToError)(errorResponse) }); + } + const error = errorResponse?.['error']; + if (status === 400) { + return new BadRequestError(status, error, message, headers); + } + if (status === 401) { + return new AuthenticationError(status, error, message, headers); + } + if (status === 403) { + return new PermissionDeniedError(status, error, message, headers); + } + if (status === 404) { + return new NotFoundError(status, error, message, headers); + } + if (status === 409) { + return new ConflictError(status, error, message, headers); + } + if (status === 422) { + return new UnprocessableEntityError(status, error, message, headers); + } + if (status === 429) { + return new RateLimitError(status, error, message, headers); + } + if (status >= 500) { + return new InternalServerError(status, error, message, headers); + } + return new APIError(status, error, message, headers); + } +} +exports.APIError = APIError; +class APIUserAbortError extends APIError { + constructor({ message } = {}) { + super(undefined, undefined, message || 'Request was aborted.', undefined); + } +} +exports.APIUserAbortError = APIUserAbortError; +class APIConnectionError extends APIError { + constructor({ message, cause }) { + super(undefined, undefined, message || 'Connection error.', undefined); + // in some environments the 'cause' property is already declared + // @ts-ignore + if (cause) + this.cause = cause; + } +} +exports.APIConnectionError = APIConnectionError; +class APIConnectionTimeoutError extends APIConnectionError { + constructor({ message } = {}) { + super({ message: message ?? 'Request timed out.' }); + } +} +exports.APIConnectionTimeoutError = APIConnectionTimeoutError; +class BadRequestError extends APIError { +} +exports.BadRequestError = BadRequestError; +class AuthenticationError extends APIError { +} +exports.AuthenticationError = AuthenticationError; +class PermissionDeniedError extends APIError { +} +exports.PermissionDeniedError = PermissionDeniedError; +class NotFoundError extends APIError { +} +exports.NotFoundError = NotFoundError; +class ConflictError extends APIError { +} +exports.ConflictError = ConflictError; +class UnprocessableEntityError extends APIError { +} +exports.UnprocessableEntityError = UnprocessableEntityError; +class RateLimitError extends APIError { +} +exports.RateLimitError = RateLimitError; +class InternalServerError extends APIError { +} +exports.InternalServerError = InternalServerError; +class LengthFinishReasonError extends OpenAIError { + constructor() { + super(`Could not parse response content as the length limit was reached`); + } +} +exports.LengthFinishReasonError = LengthFinishReasonError; +class ContentFilterFinishReasonError extends OpenAIError { + constructor() { + super(`Could not parse response content as the request was rejected by the content filter`); + } +} +exports.ContentFilterFinishReasonError = ContentFilterFinishReasonError; +//# sourceMappingURL=error.js.map \ No newline at end of file diff --git a/node_modules/openai/error.js.map b/node_modules/openai/error.js.map new file mode 100644 index 0000000..af999f9 --- /dev/null +++ b/node_modules/openai/error.js.map @@ -0,0 +1 @@ +{"version":3,"file":"error.js","sourceRoot":"","sources":["src/error.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,oCAA8C;AAE9C,MAAa,WAAY,SAAQ,KAAK;CAAG;AAAzC,kCAAyC;AAEzC,MAAa,QAIX,SAAQ,WAAW;IAcnB,YAAY,MAAe,EAAE,KAAa,EAAE,OAA2B,EAAE,OAAiB;QACxF,KAAK,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,EAAE,CAAC,CAAC;QACzD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,UAAU,GAAG,OAAO,EAAE,CAAC,cAAc,CAAC,CAAC;QAC5C,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QAEnB,MAAM,IAAI,GAAG,KAA4B,CAAC;QAC1C,IAAI,CAAC,IAAI,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC;QAC3B,IAAI,CAAC,KAAK,GAAG,IAAI,EAAE,CAAC,OAAO,CAAC,CAAC;QAC7B,IAAI,CAAC,IAAI,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7B,CAAC;IAEO,MAAM,CAAC,WAAW,CAAC,MAA0B,EAAE,KAAU,EAAE,OAA2B;QAC5F,MAAM,GAAG,GACP,KAAK,EAAE,OAAO,CAAC,CAAC;YACd,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC;gBACjC,KAAK,CAAC,OAAO;gBACf,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC;YACjC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;gBAC/B,CAAC,CAAC,OAAO,CAAC;QAEZ,IAAI,MAAM,IAAI,GAAG,EAAE;YACjB,OAAO,GAAG,MAAM,IAAI,GAAG,EAAE,CAAC;SAC3B;QACD,IAAI,MAAM,EAAE;YACV,OAAO,GAAG,MAAM,wBAAwB,CAAC;SAC1C;QACD,IAAI,GAAG,EAAE;YACP,OAAO,GAAG,CAAC;SACZ;QACD,OAAO,0BAA0B,CAAC;IACpC,CAAC;IAED,MAAM,CAAC,QAAQ,CACb,MAA0B,EAC1B,aAAiC,EACjC,OAA2B,EAC3B,OAA4B;QAE5B,IAAI,CAAC,MAAM,IAAI,CAAC,OAAO,EAAE;YACvB,OAAO,IAAI,kBAAkB,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,IAAA,kBAAW,EAAC,aAAa,CAAC,EAAE,CAAC,CAAC;SAC/E;QAED,MAAM,KAAK,GAAI,aAAqC,EAAE,CAAC,OAAO,CAAC,CAAC;QAEhE,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,eAAe,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SAC7D;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,mBAAmB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SACjE;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,qBAAqB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SACnE;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,aAAa,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SAC3D;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,aAAa,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SAC3D;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,wBAAwB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SACtE;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SAC5D;QAED,IAAI,MAAM,IAAI,GAAG,EAAE;YACjB,OAAO,IAAI,mBAAmB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SACjE;QAED,OAAO,IAAI,QAAQ,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;CACF;AAlGD,4BAkGC;AAED,MAAa,iBAAkB,SAAQ,QAAyC;IAC9E,YAAY,EAAE,OAAO,KAA2B,EAAE;QAChD,KAAK,CAAC,SAAS,EAAE,SAAS,EAAE,OAAO,IAAI,sBAAsB,EAAE,SAAS,CAAC,CAAC;IAC5E,CAAC;CACF;AAJD,8CAIC;AAED,MAAa,kBAAmB,SAAQ,QAAyC;IAC/E,YAAY,EAAE,OAAO,EAAE,KAAK,EAA+D;QACzF,KAAK,CAAC,SAAS,EAAE,SAAS,EAAE,OAAO,IAAI,mBAAmB,EAAE,SAAS,CAAC,CAAC;QACvE,gEAAgE;QAChE,aAAa;QACb,IAAI,KAAK;YAAE,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IAChC,CAAC;CACF;AAPD,gDAOC;AAED,MAAa,yBAA0B,SAAQ,kBAAkB;IAC/D,YAAY,EAAE,OAAO,KAA2B,EAAE;QAChD,KAAK,CAAC,EAAE,OAAO,EAAE,OAAO,IAAI,oBAAoB,EAAE,CAAC,CAAC;IACtD,CAAC;CACF;AAJD,8DAIC;AAED,MAAa,eAAgB,SAAQ,QAAsB;CAAG;AAA9D,0CAA8D;AAE9D,MAAa,mBAAoB,SAAQ,QAAsB;CAAG;AAAlE,kDAAkE;AAElE,MAAa,qBAAsB,SAAQ,QAAsB;CAAG;AAApE,sDAAoE;AAEpE,MAAa,aAAc,SAAQ,QAAsB;CAAG;AAA5D,sCAA4D;AAE5D,MAAa,aAAc,SAAQ,QAAsB;CAAG;AAA5D,sCAA4D;AAE5D,MAAa,wBAAyB,SAAQ,QAAsB;CAAG;AAAvE,4DAAuE;AAEvE,MAAa,cAAe,SAAQ,QAAsB;CAAG;AAA7D,wCAA6D;AAE7D,MAAa,mBAAoB,SAAQ,QAAyB;CAAG;AAArE,kDAAqE;AAErE,MAAa,uBAAwB,SAAQ,WAAW;IACtD;QACE,KAAK,CAAC,kEAAkE,CAAC,CAAC;IAC5E,CAAC;CACF;AAJD,0DAIC;AAED,MAAa,8BAA+B,SAAQ,WAAW;IAC7D;QACE,KAAK,CAAC,oFAAoF,CAAC,CAAC;IAC9F,CAAC;CACF;AAJD,wEAIC"} \ No newline at end of file diff --git a/node_modules/openai/error.mjs b/node_modules/openai/error.mjs new file mode 100644 index 0000000..7d19f55 --- /dev/null +++ b/node_modules/openai/error.mjs @@ -0,0 +1,112 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { castToError } from "./core.mjs"; +export class OpenAIError extends Error { +} +export class APIError extends OpenAIError { + constructor(status, error, message, headers) { + super(`${APIError.makeMessage(status, error, message)}`); + this.status = status; + this.headers = headers; + this.request_id = headers?.['x-request-id']; + this.error = error; + const data = error; + this.code = data?.['code']; + this.param = data?.['param']; + this.type = data?.['type']; + } + static makeMessage(status, error, message) { + const msg = error?.message ? + typeof error.message === 'string' ? + error.message + : JSON.stringify(error.message) + : error ? JSON.stringify(error) + : message; + if (status && msg) { + return `${status} ${msg}`; + } + if (status) { + return `${status} status code (no body)`; + } + if (msg) { + return msg; + } + return '(no status code or body)'; + } + static generate(status, errorResponse, message, headers) { + if (!status || !headers) { + return new APIConnectionError({ message, cause: castToError(errorResponse) }); + } + const error = errorResponse?.['error']; + if (status === 400) { + return new BadRequestError(status, error, message, headers); + } + if (status === 401) { + return new AuthenticationError(status, error, message, headers); + } + if (status === 403) { + return new PermissionDeniedError(status, error, message, headers); + } + if (status === 404) { + return new NotFoundError(status, error, message, headers); + } + if (status === 409) { + return new ConflictError(status, error, message, headers); + } + if (status === 422) { + return new UnprocessableEntityError(status, error, message, headers); + } + if (status === 429) { + return new RateLimitError(status, error, message, headers); + } + if (status >= 500) { + return new InternalServerError(status, error, message, headers); + } + return new APIError(status, error, message, headers); + } +} +export class APIUserAbortError extends APIError { + constructor({ message } = {}) { + super(undefined, undefined, message || 'Request was aborted.', undefined); + } +} +export class APIConnectionError extends APIError { + constructor({ message, cause }) { + super(undefined, undefined, message || 'Connection error.', undefined); + // in some environments the 'cause' property is already declared + // @ts-ignore + if (cause) + this.cause = cause; + } +} +export class APIConnectionTimeoutError extends APIConnectionError { + constructor({ message } = {}) { + super({ message: message ?? 'Request timed out.' }); + } +} +export class BadRequestError extends APIError { +} +export class AuthenticationError extends APIError { +} +export class PermissionDeniedError extends APIError { +} +export class NotFoundError extends APIError { +} +export class ConflictError extends APIError { +} +export class UnprocessableEntityError extends APIError { +} +export class RateLimitError extends APIError { +} +export class InternalServerError extends APIError { +} +export class LengthFinishReasonError extends OpenAIError { + constructor() { + super(`Could not parse response content as the length limit was reached`); + } +} +export class ContentFilterFinishReasonError extends OpenAIError { + constructor() { + super(`Could not parse response content as the request was rejected by the content filter`); + } +} +//# sourceMappingURL=error.mjs.map \ No newline at end of file diff --git a/node_modules/openai/error.mjs.map b/node_modules/openai/error.mjs.map new file mode 100644 index 0000000..5f051d1 --- /dev/null +++ b/node_modules/openai/error.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"error.mjs","sourceRoot":"","sources":["src/error.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAW;AAE/B,MAAM,OAAO,WAAY,SAAQ,KAAK;CAAG;AAEzC,MAAM,OAAO,QAIX,SAAQ,WAAW;IAcnB,YAAY,MAAe,EAAE,KAAa,EAAE,OAA2B,EAAE,OAAiB;QACxF,KAAK,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,EAAE,CAAC,CAAC;QACzD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,UAAU,GAAG,OAAO,EAAE,CAAC,cAAc,CAAC,CAAC;QAC5C,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QAEnB,MAAM,IAAI,GAAG,KAA4B,CAAC;QAC1C,IAAI,CAAC,IAAI,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC;QAC3B,IAAI,CAAC,KAAK,GAAG,IAAI,EAAE,CAAC,OAAO,CAAC,CAAC;QAC7B,IAAI,CAAC,IAAI,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC;IAC7B,CAAC;IAEO,MAAM,CAAC,WAAW,CAAC,MAA0B,EAAE,KAAU,EAAE,OAA2B;QAC5F,MAAM,GAAG,GACP,KAAK,EAAE,OAAO,CAAC,CAAC;YACd,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC;gBACjC,KAAK,CAAC,OAAO;gBACf,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC;YACjC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;gBAC/B,CAAC,CAAC,OAAO,CAAC;QAEZ,IAAI,MAAM,IAAI,GAAG,EAAE;YACjB,OAAO,GAAG,MAAM,IAAI,GAAG,EAAE,CAAC;SAC3B;QACD,IAAI,MAAM,EAAE;YACV,OAAO,GAAG,MAAM,wBAAwB,CAAC;SAC1C;QACD,IAAI,GAAG,EAAE;YACP,OAAO,GAAG,CAAC;SACZ;QACD,OAAO,0BAA0B,CAAC;IACpC,CAAC;IAED,MAAM,CAAC,QAAQ,CACb,MAA0B,EAC1B,aAAiC,EACjC,OAA2B,EAC3B,OAA4B;QAE5B,IAAI,CAAC,MAAM,IAAI,CAAC,OAAO,EAAE;YACvB,OAAO,IAAI,kBAAkB,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,WAAW,CAAC,aAAa,CAAC,EAAE,CAAC,CAAC;SAC/E;QAED,MAAM,KAAK,GAAI,aAAqC,EAAE,CAAC,OAAO,CAAC,CAAC;QAEhE,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,eAAe,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SAC7D;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,mBAAmB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SACjE;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,qBAAqB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SACnE;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,aAAa,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SAC3D;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,aAAa,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SAC3D;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,wBAAwB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SACtE;QAED,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,IAAI,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SAC5D;QAED,IAAI,MAAM,IAAI,GAAG,EAAE;YACjB,OAAO,IAAI,mBAAmB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;SACjE;QAED,OAAO,IAAI,QAAQ,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;CACF;AAED,MAAM,OAAO,iBAAkB,SAAQ,QAAyC;IAC9E,YAAY,EAAE,OAAO,KAA2B,EAAE;QAChD,KAAK,CAAC,SAAS,EAAE,SAAS,EAAE,OAAO,IAAI,sBAAsB,EAAE,SAAS,CAAC,CAAC;IAC5E,CAAC;CACF;AAED,MAAM,OAAO,kBAAmB,SAAQ,QAAyC;IAC/E,YAAY,EAAE,OAAO,EAAE,KAAK,EAA+D;QACzF,KAAK,CAAC,SAAS,EAAE,SAAS,EAAE,OAAO,IAAI,mBAAmB,EAAE,SAAS,CAAC,CAAC;QACvE,gEAAgE;QAChE,aAAa;QACb,IAAI,KAAK;YAAE,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IAChC,CAAC;CACF;AAED,MAAM,OAAO,yBAA0B,SAAQ,kBAAkB;IAC/D,YAAY,EAAE,OAAO,KAA2B,EAAE;QAChD,KAAK,CAAC,EAAE,OAAO,EAAE,OAAO,IAAI,oBAAoB,EAAE,CAAC,CAAC;IACtD,CAAC;CACF;AAED,MAAM,OAAO,eAAgB,SAAQ,QAAsB;CAAG;AAE9D,MAAM,OAAO,mBAAoB,SAAQ,QAAsB;CAAG;AAElE,MAAM,OAAO,qBAAsB,SAAQ,QAAsB;CAAG;AAEpE,MAAM,OAAO,aAAc,SAAQ,QAAsB;CAAG;AAE5D,MAAM,OAAO,aAAc,SAAQ,QAAsB;CAAG;AAE5D,MAAM,OAAO,wBAAyB,SAAQ,QAAsB;CAAG;AAEvE,MAAM,OAAO,cAAe,SAAQ,QAAsB;CAAG;AAE7D,MAAM,OAAO,mBAAoB,SAAQ,QAAyB;CAAG;AAErE,MAAM,OAAO,uBAAwB,SAAQ,WAAW;IACtD;QACE,KAAK,CAAC,kEAAkE,CAAC,CAAC;IAC5E,CAAC;CACF;AAED,MAAM,OAAO,8BAA+B,SAAQ,WAAW;IAC7D;QACE,KAAK,CAAC,oFAAoF,CAAC,CAAC;IAC9F,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/helpers/zod.d.ts b/node_modules/openai/helpers/zod.d.ts new file mode 100644 index 0000000..1db4ffd --- /dev/null +++ b/node_modules/openai/helpers/zod.d.ts @@ -0,0 +1,57 @@ +import { ResponseFormatJSONSchema } from "../resources/index.js"; +import type { infer as zodInfer, ZodType } from 'zod'; +import { AutoParseableResponseFormat, AutoParseableTool } from "../lib/parser.js"; +/** + * Creates a chat completion `JSONSchema` response format object from + * the given Zod schema. + * + * If this is passed to the `.parse()`, `.stream()` or `.runTools()` + * chat completion methods then the response message will contain a + * `.parsed` property that is the result of parsing the content with + * the given Zod object. + * + * ```ts + * const completion = await client.beta.chat.completions.parse({ + * model: 'gpt-4o-2024-08-06', + * messages: [ + * { role: 'system', content: 'You are a helpful math tutor.' }, + * { role: 'user', content: 'solve 8x + 31 = 2' }, + * ], + * response_format: zodResponseFormat( + * z.object({ + * steps: z.array(z.object({ + * explanation: z.string(), + * answer: z.string(), + * })), + * final_answer: z.string(), + * }), + * 'math_answer', + * ), + * }); + * const message = completion.choices[0]?.message; + * if (message?.parsed) { + * console.log(message.parsed); + * console.log(message.parsed.final_answer); + * } + * ``` + * + * This can be passed directly to the `.create()` method but will not + * result in any automatic parsing, you'll have to parse the response yourself. + */ +export declare function zodResponseFormat(zodObject: ZodInput, name: string, props?: Omit): AutoParseableResponseFormat>; +/** + * Creates a chat completion `function` tool that can be invoked + * automatically by the chat completion `.runTools()` method or automatically + * parsed by `.parse()` / `.stream()`. + */ +export declare function zodFunction(options: { + name: string; + parameters: Parameters; + function?: ((args: zodInfer) => unknown | Promise) | undefined; + description?: string | undefined; +}): AutoParseableTool<{ + arguments: Parameters; + name: string; + function: (args: zodInfer) => unknown; +}>; +//# sourceMappingURL=zod.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/helpers/zod.d.ts.map b/node_modules/openai/helpers/zod.d.ts.map new file mode 100644 index 0000000..978106e --- /dev/null +++ b/node_modules/openai/helpers/zod.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"zod.d.ts","sourceRoot":"","sources":["../src/helpers/zod.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,wBAAwB,EAAE,MAAM,oBAAoB,CAAC;AAC9D,OAAO,KAAK,EAAE,KAAK,IAAI,QAAQ,EAAE,OAAO,EAAE,MAAM,KAAK,CAAC;AACtD,OAAO,EACL,2BAA2B,EAC3B,iBAAiB,EAGlB,MAAM,eAAe,CAAC;AAavB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoCG;AACH,wBAAgB,iBAAiB,CAAC,QAAQ,SAAS,OAAO,EACxD,SAAS,EAAE,QAAQ,EACnB,IAAI,EAAE,MAAM,EACZ,KAAK,CAAC,EAAE,IAAI,CAAC,wBAAwB,CAAC,UAAU,EAAE,QAAQ,GAAG,QAAQ,GAAG,MAAM,CAAC,GAC9E,2BAA2B,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAajD;AAED;;;;GAIG;AACH,wBAAgB,WAAW,CAAC,UAAU,SAAS,OAAO,EAAE,OAAO,EAAE;IAC/D,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,UAAU,CAAC;IACvB,QAAQ,CAAC,EAAE,CAAC,CAAC,IAAI,EAAE,QAAQ,CAAC,UAAU,CAAC,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG,SAAS,CAAC;IACpF,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;CAClC,GAAG,iBAAiB,CAAC;IACpB,SAAS,EAAE,UAAU,CAAC;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,CAAC,IAAI,EAAE,QAAQ,CAAC,UAAU,CAAC,KAAK,OAAO,CAAC;CACnD,CAAC,CAiBD"} \ No newline at end of file diff --git a/node_modules/openai/helpers/zod.js b/node_modules/openai/helpers/zod.js new file mode 100644 index 0000000..54b3195 --- /dev/null +++ b/node_modules/openai/helpers/zod.js @@ -0,0 +1,85 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.zodFunction = exports.zodResponseFormat = void 0; +const parser_1 = require("../lib/parser.js"); +const zod_to_json_schema_1 = require("../_vendor/zod-to-json-schema/index.js"); +function zodToJsonSchema(schema, options) { + return (0, zod_to_json_schema_1.zodToJsonSchema)(schema, { + openaiStrictMode: true, + name: options.name, + nameStrategy: 'duplicate-ref', + $refStrategy: 'extract-to-root', + nullableStrategy: 'property', + }); +} +/** + * Creates a chat completion `JSONSchema` response format object from + * the given Zod schema. + * + * If this is passed to the `.parse()`, `.stream()` or `.runTools()` + * chat completion methods then the response message will contain a + * `.parsed` property that is the result of parsing the content with + * the given Zod object. + * + * ```ts + * const completion = await client.beta.chat.completions.parse({ + * model: 'gpt-4o-2024-08-06', + * messages: [ + * { role: 'system', content: 'You are a helpful math tutor.' }, + * { role: 'user', content: 'solve 8x + 31 = 2' }, + * ], + * response_format: zodResponseFormat( + * z.object({ + * steps: z.array(z.object({ + * explanation: z.string(), + * answer: z.string(), + * })), + * final_answer: z.string(), + * }), + * 'math_answer', + * ), + * }); + * const message = completion.choices[0]?.message; + * if (message?.parsed) { + * console.log(message.parsed); + * console.log(message.parsed.final_answer); + * } + * ``` + * + * This can be passed directly to the `.create()` method but will not + * result in any automatic parsing, you'll have to parse the response yourself. + */ +function zodResponseFormat(zodObject, name, props) { + return (0, parser_1.makeParseableResponseFormat)({ + type: 'json_schema', + json_schema: { + ...props, + name, + strict: true, + schema: zodToJsonSchema(zodObject, { name }), + }, + }, (content) => zodObject.parse(JSON.parse(content))); +} +exports.zodResponseFormat = zodResponseFormat; +/** + * Creates a chat completion `function` tool that can be invoked + * automatically by the chat completion `.runTools()` method or automatically + * parsed by `.parse()` / `.stream()`. + */ +function zodFunction(options) { + // @ts-expect-error TODO + return (0, parser_1.makeParseableTool)({ + type: 'function', + function: { + name: options.name, + parameters: zodToJsonSchema(options.parameters, { name: options.name }), + strict: true, + ...(options.description ? { description: options.description } : undefined), + }, + }, { + callback: options.function, + parser: (args) => options.parameters.parse(JSON.parse(args)), + }); +} +exports.zodFunction = zodFunction; +//# sourceMappingURL=zod.js.map \ No newline at end of file diff --git a/node_modules/openai/helpers/zod.js.map b/node_modules/openai/helpers/zod.js.map new file mode 100644 index 0000000..f2f077c --- /dev/null +++ b/node_modules/openai/helpers/zod.js.map @@ -0,0 +1 @@ +{"version":3,"file":"zod.js","sourceRoot":"","sources":["../src/helpers/zod.ts"],"names":[],"mappings":";;;AAEA,6CAKuB;AACvB,+EAAoF;AAEpF,SAAS,eAAe,CAAC,MAAe,EAAE,OAAyB;IACjE,OAAO,IAAA,oCAAgB,EAAC,MAAM,EAAE;QAC9B,gBAAgB,EAAE,IAAI;QACtB,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,YAAY,EAAE,eAAe;QAC7B,YAAY,EAAE,iBAAiB;QAC/B,gBAAgB,EAAE,UAAU;KAC7B,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoCG;AACH,SAAgB,iBAAiB,CAC/B,SAAmB,EACnB,IAAY,EACZ,KAA+E;IAE/E,OAAO,IAAA,oCAA2B,EAChC;QACE,IAAI,EAAE,aAAa;QACnB,WAAW,EAAE;YACX,GAAG,KAAK;YACR,IAAI;YACJ,MAAM,EAAE,IAAI;YACZ,MAAM,EAAE,eAAe,CAAC,SAAS,EAAE,EAAE,IAAI,EAAE,CAAC;SAC7C;KACF,EACD,CAAC,OAAO,EAAE,EAAE,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAClD,CAAC;AACJ,CAAC;AAjBD,8CAiBC;AAED;;;;GAIG;AACH,SAAgB,WAAW,CAA6B,OAKvD;IAKC,wBAAwB;IACxB,OAAO,IAAA,0BAAiB,EACtB;QACE,IAAI,EAAE,UAAU;QAChB,QAAQ,EAAE;YACR,IAAI,EAAE,OAAO,CAAC,IAAI;YAClB,UAAU,EAAE,eAAe,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,IAAI,EAAE,CAAC;YACvE,MAAM,EAAE,IAAI;YACZ,GAAG,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;SAC5E;KACF,EACD;QACE,QAAQ,EAAE,OAAO,CAAC,QAAQ;QAC1B,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;KAC7D,CACF,CAAC;AACJ,CAAC;AA1BD,kCA0BC"} \ No newline at end of file diff --git a/node_modules/openai/helpers/zod.mjs b/node_modules/openai/helpers/zod.mjs new file mode 100644 index 0000000..ca7a986 --- /dev/null +++ b/node_modules/openai/helpers/zod.mjs @@ -0,0 +1,80 @@ +import { makeParseableResponseFormat, makeParseableTool, } from "../lib/parser.mjs"; +import { zodToJsonSchema as _zodToJsonSchema } from "../_vendor/zod-to-json-schema/index.mjs"; +function zodToJsonSchema(schema, options) { + return _zodToJsonSchema(schema, { + openaiStrictMode: true, + name: options.name, + nameStrategy: 'duplicate-ref', + $refStrategy: 'extract-to-root', + nullableStrategy: 'property', + }); +} +/** + * Creates a chat completion `JSONSchema` response format object from + * the given Zod schema. + * + * If this is passed to the `.parse()`, `.stream()` or `.runTools()` + * chat completion methods then the response message will contain a + * `.parsed` property that is the result of parsing the content with + * the given Zod object. + * + * ```ts + * const completion = await client.beta.chat.completions.parse({ + * model: 'gpt-4o-2024-08-06', + * messages: [ + * { role: 'system', content: 'You are a helpful math tutor.' }, + * { role: 'user', content: 'solve 8x + 31 = 2' }, + * ], + * response_format: zodResponseFormat( + * z.object({ + * steps: z.array(z.object({ + * explanation: z.string(), + * answer: z.string(), + * })), + * final_answer: z.string(), + * }), + * 'math_answer', + * ), + * }); + * const message = completion.choices[0]?.message; + * if (message?.parsed) { + * console.log(message.parsed); + * console.log(message.parsed.final_answer); + * } + * ``` + * + * This can be passed directly to the `.create()` method but will not + * result in any automatic parsing, you'll have to parse the response yourself. + */ +export function zodResponseFormat(zodObject, name, props) { + return makeParseableResponseFormat({ + type: 'json_schema', + json_schema: { + ...props, + name, + strict: true, + schema: zodToJsonSchema(zodObject, { name }), + }, + }, (content) => zodObject.parse(JSON.parse(content))); +} +/** + * Creates a chat completion `function` tool that can be invoked + * automatically by the chat completion `.runTools()` method or automatically + * parsed by `.parse()` / `.stream()`. + */ +export function zodFunction(options) { + // @ts-expect-error TODO + return makeParseableTool({ + type: 'function', + function: { + name: options.name, + parameters: zodToJsonSchema(options.parameters, { name: options.name }), + strict: true, + ...(options.description ? { description: options.description } : undefined), + }, + }, { + callback: options.function, + parser: (args) => options.parameters.parse(JSON.parse(args)), + }); +} +//# sourceMappingURL=zod.mjs.map \ No newline at end of file diff --git a/node_modules/openai/helpers/zod.mjs.map b/node_modules/openai/helpers/zod.mjs.map new file mode 100644 index 0000000..2251eef --- /dev/null +++ b/node_modules/openai/helpers/zod.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"zod.mjs","sourceRoot":"","sources":["../src/helpers/zod.ts"],"names":[],"mappings":"OAEO,EAGL,2BAA2B,EAC3B,iBAAiB,GAClB;OACM,EAAE,eAAe,IAAI,gBAAgB,EAAE;AAE9C,SAAS,eAAe,CAAC,MAAe,EAAE,OAAyB;IACjE,OAAO,gBAAgB,CAAC,MAAM,EAAE;QAC9B,gBAAgB,EAAE,IAAI;QACtB,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,YAAY,EAAE,eAAe;QAC7B,YAAY,EAAE,iBAAiB;QAC/B,gBAAgB,EAAE,UAAU;KAC7B,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoCG;AACH,MAAM,UAAU,iBAAiB,CAC/B,SAAmB,EACnB,IAAY,EACZ,KAA+E;IAE/E,OAAO,2BAA2B,CAChC;QACE,IAAI,EAAE,aAAa;QACnB,WAAW,EAAE;YACX,GAAG,KAAK;YACR,IAAI;YACJ,MAAM,EAAE,IAAI;YACZ,MAAM,EAAE,eAAe,CAAC,SAAS,EAAE,EAAE,IAAI,EAAE,CAAC;SAC7C;KACF,EACD,CAAC,OAAO,EAAE,EAAE,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAClD,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAA6B,OAKvD;IAKC,wBAAwB;IACxB,OAAO,iBAAiB,CACtB;QACE,IAAI,EAAE,UAAU;QAChB,QAAQ,EAAE;YACR,IAAI,EAAE,OAAO,CAAC,IAAI;YAClB,UAAU,EAAE,eAAe,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,IAAI,EAAE,CAAC;YACvE,MAAM,EAAE,IAAI;YACZ,GAAG,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;SAC5E;KACF,EACD;QACE,QAAQ,EAAE,OAAO,CAAC,QAAQ;QAC1B,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;KAC7D,CACF,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/index.d.mts b/node_modules/openai/index.d.mts new file mode 100644 index 0000000..0c87c6f --- /dev/null +++ b/node_modules/openai/index.d.mts @@ -0,0 +1,232 @@ +import { type Agent, type RequestInit } from "./_shims/index.js"; +import * as Core from "./core.js"; +import * as Errors from "./error.js"; +import * as Pagination from "./pagination.js"; +import { type CursorPageParams, CursorPageResponse, PageResponse } from "./pagination.js"; +import * as Uploads from "./uploads.js"; +import * as API from "./resources/index.js"; +import { Batch, BatchCreateParams, BatchError, BatchListParams, BatchRequestCounts, Batches, BatchesPage } from "./resources/batches.js"; +import { Completion, CompletionChoice, CompletionCreateParams, CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming, CompletionUsage, Completions } from "./resources/completions.js"; +import { CreateEmbeddingResponse, Embedding, EmbeddingCreateParams, EmbeddingModel, Embeddings } from "./resources/embeddings.js"; +import { FileContent, FileCreateParams, FileDeleted, FileListParams, FileObject, FileObjectsPage, FilePurpose, Files } from "./resources/files.js"; +import { Image, ImageCreateVariationParams, ImageEditParams, ImageGenerateParams, ImageModel, Images, ImagesResponse } from "./resources/images.js"; +import { Model, ModelDeleted, Models, ModelsPage } from "./resources/models.js"; +import { Moderation, ModerationCreateParams, ModerationCreateResponse, ModerationImageURLInput, ModerationModel, ModerationMultiModalInput, ModerationTextInput, Moderations } from "./resources/moderations.js"; +import { Audio, AudioModel, AudioResponseFormat } from "./resources/audio/audio.js"; +import { Beta } from "./resources/beta/beta.js"; +import { Chat, ChatModel } from "./resources/chat/chat.js"; +import { ChatCompletion, ChatCompletionAssistantMessageParam, ChatCompletionAudio, ChatCompletionAudioParam, ChatCompletionChunk, ChatCompletionContentPart, ChatCompletionContentPartImage, ChatCompletionContentPartInputAudio, ChatCompletionContentPartRefusal, ChatCompletionContentPartText, ChatCompletionCreateParams, ChatCompletionCreateParamsNonStreaming, ChatCompletionCreateParamsStreaming, ChatCompletionDeveloperMessageParam, ChatCompletionFunctionCallOption, ChatCompletionFunctionMessageParam, ChatCompletionMessage, ChatCompletionMessageParam, ChatCompletionMessageToolCall, ChatCompletionModality, ChatCompletionNamedToolChoice, ChatCompletionPredictionContent, ChatCompletionReasoningEffort, ChatCompletionRole, ChatCompletionStreamOptions, ChatCompletionSystemMessageParam, ChatCompletionTokenLogprob, ChatCompletionTool, ChatCompletionToolChoiceOption, ChatCompletionToolMessageParam, ChatCompletionUserMessageParam } from "./resources/chat/completions.js"; +import { FineTuning } from "./resources/fine-tuning/fine-tuning.js"; +import { Upload, UploadCompleteParams, UploadCreateParams, Uploads as UploadsAPIUploads } from "./resources/uploads/uploads.js"; +export interface ClientOptions { + /** + * Defaults to process.env['OPENAI_API_KEY']. + */ + apiKey?: string | undefined; + /** + * Defaults to process.env['OPENAI_ORG_ID']. + */ + organization?: string | null | undefined; + /** + * Defaults to process.env['OPENAI_PROJECT_ID']. + */ + project?: string | null | undefined; + /** + * Override the default base URL for the API, e.g., "https://api.example.com/v2/" + * + * Defaults to process.env['OPENAI_BASE_URL']. + */ + baseURL?: string | null | undefined; + /** + * The maximum amount of time (in milliseconds) that the client should wait for a response + * from the server before timing out a single request. + * + * Note that request timeouts are retried by default, so in a worst-case scenario you may wait + * much longer than this timeout before the promise succeeds or fails. + */ + timeout?: number; + /** + * An HTTP agent used to manage HTTP(S) connections. + * + * If not provided, an agent will be constructed by default in the Node.js environment, + * otherwise no agent is used. + */ + httpAgent?: Agent; + /** + * Specify a custom `fetch` function implementation. + * + * If not provided, we use `node-fetch` on Node.js and otherwise expect that `fetch` is + * defined globally. + */ + fetch?: Core.Fetch | undefined; + /** + * The maximum number of times that the client will retry a request in case of a + * temporary failure, like a network error or a 5XX error from the server. + * + * @default 2 + */ + maxRetries?: number; + /** + * Default headers to include with every request to the API. + * + * These can be removed in individual requests by explicitly setting the + * header to `undefined` or `null` in request options. + */ + defaultHeaders?: Core.Headers; + /** + * Default query parameters to include with every request to the API. + * + * These can be removed in individual requests by explicitly setting the + * param to `undefined` in request options. + */ + defaultQuery?: Core.DefaultQuery; + /** + * By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + * Only set this option to `true` if you understand the risks and have appropriate mitigations in place. + */ + dangerouslyAllowBrowser?: boolean; +} +/** + * API Client for interfacing with the OpenAI API. + */ +export declare class OpenAI extends Core.APIClient { + apiKey: string; + organization: string | null; + project: string | null; + private _options; + /** + * API Client for interfacing with the OpenAI API. + * + * @param {string | undefined} [opts.apiKey=process.env['OPENAI_API_KEY'] ?? undefined] + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string | null | undefined} [opts.project=process.env['OPENAI_PROJECT_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL'] ?? https://api.openai.com/v1] - Override the default base URL for the API. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ baseURL, apiKey, organization, project, ...opts }?: ClientOptions); + completions: API.Completions; + chat: API.Chat; + embeddings: API.Embeddings; + files: API.Files; + images: API.Images; + audio: API.Audio; + moderations: API.Moderations; + models: API.Models; + fineTuning: API.FineTuning; + beta: API.Beta; + batches: API.Batches; + uploads: API.Uploads; + protected defaultQuery(): Core.DefaultQuery | undefined; + protected defaultHeaders(opts: Core.FinalRequestOptions): Core.Headers; + protected authHeaders(opts: Core.FinalRequestOptions): Core.Headers; + protected stringifyQuery(query: Record): string; + static OpenAI: typeof OpenAI; + static DEFAULT_TIMEOUT: number; + static OpenAIError: typeof Errors.OpenAIError; + static APIError: typeof Errors.APIError; + static APIConnectionError: typeof Errors.APIConnectionError; + static APIConnectionTimeoutError: typeof Errors.APIConnectionTimeoutError; + static APIUserAbortError: typeof Errors.APIUserAbortError; + static NotFoundError: typeof Errors.NotFoundError; + static ConflictError: typeof Errors.ConflictError; + static RateLimitError: typeof Errors.RateLimitError; + static BadRequestError: typeof Errors.BadRequestError; + static AuthenticationError: typeof Errors.AuthenticationError; + static InternalServerError: typeof Errors.InternalServerError; + static PermissionDeniedError: typeof Errors.PermissionDeniedError; + static UnprocessableEntityError: typeof Errors.UnprocessableEntityError; + static toFile: typeof Uploads.toFile; + static fileFromPath: typeof Uploads.fileFromPath; +} +export declare namespace OpenAI { + export type RequestOptions = Core.RequestOptions; + export import Page = Pagination.Page; + export { type PageResponse as PageResponse }; + export import CursorPage = Pagination.CursorPage; + export { type CursorPageParams as CursorPageParams, type CursorPageResponse as CursorPageResponse }; + export { Completions as Completions, type Completion as Completion, type CompletionChoice as CompletionChoice, type CompletionUsage as CompletionUsage, type CompletionCreateParams as CompletionCreateParams, type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming, type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming, }; + export { Chat as Chat, type ChatModel as ChatModel, type ChatCompletion as ChatCompletion, type ChatCompletionAssistantMessageParam as ChatCompletionAssistantMessageParam, type ChatCompletionAudio as ChatCompletionAudio, type ChatCompletionAudioParam as ChatCompletionAudioParam, type ChatCompletionChunk as ChatCompletionChunk, type ChatCompletionContentPart as ChatCompletionContentPart, type ChatCompletionContentPartImage as ChatCompletionContentPartImage, type ChatCompletionContentPartInputAudio as ChatCompletionContentPartInputAudio, type ChatCompletionContentPartRefusal as ChatCompletionContentPartRefusal, type ChatCompletionContentPartText as ChatCompletionContentPartText, type ChatCompletionDeveloperMessageParam as ChatCompletionDeveloperMessageParam, type ChatCompletionFunctionCallOption as ChatCompletionFunctionCallOption, type ChatCompletionFunctionMessageParam as ChatCompletionFunctionMessageParam, type ChatCompletionMessage as ChatCompletionMessage, type ChatCompletionMessageParam as ChatCompletionMessageParam, type ChatCompletionMessageToolCall as ChatCompletionMessageToolCall, type ChatCompletionModality as ChatCompletionModality, type ChatCompletionNamedToolChoice as ChatCompletionNamedToolChoice, type ChatCompletionPredictionContent as ChatCompletionPredictionContent, type ChatCompletionReasoningEffort as ChatCompletionReasoningEffort, type ChatCompletionRole as ChatCompletionRole, type ChatCompletionStreamOptions as ChatCompletionStreamOptions, type ChatCompletionSystemMessageParam as ChatCompletionSystemMessageParam, type ChatCompletionTokenLogprob as ChatCompletionTokenLogprob, type ChatCompletionTool as ChatCompletionTool, type ChatCompletionToolChoiceOption as ChatCompletionToolChoiceOption, type ChatCompletionToolMessageParam as ChatCompletionToolMessageParam, type ChatCompletionUserMessageParam as ChatCompletionUserMessageParam, type ChatCompletionCreateParams as ChatCompletionCreateParams, type ChatCompletionCreateParamsNonStreaming as ChatCompletionCreateParamsNonStreaming, type ChatCompletionCreateParamsStreaming as ChatCompletionCreateParamsStreaming, }; + export { Embeddings as Embeddings, type CreateEmbeddingResponse as CreateEmbeddingResponse, type Embedding as Embedding, type EmbeddingModel as EmbeddingModel, type EmbeddingCreateParams as EmbeddingCreateParams, }; + export { Files as Files, type FileContent as FileContent, type FileDeleted as FileDeleted, type FileObject as FileObject, type FilePurpose as FilePurpose, FileObjectsPage as FileObjectsPage, type FileCreateParams as FileCreateParams, type FileListParams as FileListParams, }; + export { Images as Images, type Image as Image, type ImageModel as ImageModel, type ImagesResponse as ImagesResponse, type ImageCreateVariationParams as ImageCreateVariationParams, type ImageEditParams as ImageEditParams, type ImageGenerateParams as ImageGenerateParams, }; + export { Audio as Audio, type AudioModel as AudioModel, type AudioResponseFormat as AudioResponseFormat }; + export { Moderations as Moderations, type Moderation as Moderation, type ModerationImageURLInput as ModerationImageURLInput, type ModerationModel as ModerationModel, type ModerationMultiModalInput as ModerationMultiModalInput, type ModerationTextInput as ModerationTextInput, type ModerationCreateResponse as ModerationCreateResponse, type ModerationCreateParams as ModerationCreateParams, }; + export { Models as Models, type Model as Model, type ModelDeleted as ModelDeleted, ModelsPage as ModelsPage, }; + export { FineTuning as FineTuning }; + export { Beta as Beta }; + export { Batches as Batches, type Batch as Batch, type BatchError as BatchError, type BatchRequestCounts as BatchRequestCounts, BatchesPage as BatchesPage, type BatchCreateParams as BatchCreateParams, type BatchListParams as BatchListParams, }; + export { UploadsAPIUploads as Uploads, type Upload as Upload, type UploadCreateParams as UploadCreateParams, type UploadCompleteParams as UploadCompleteParams, }; + export type ErrorObject = API.ErrorObject; + export type FunctionDefinition = API.FunctionDefinition; + export type FunctionParameters = API.FunctionParameters; + export type ResponseFormatJSONObject = API.ResponseFormatJSONObject; + export type ResponseFormatJSONSchema = API.ResponseFormatJSONSchema; + export type ResponseFormatText = API.ResponseFormatText; +} +/** API Client for interfacing with the Azure OpenAI API. */ +export interface AzureClientOptions extends ClientOptions { + /** + * Defaults to process.env['OPENAI_API_VERSION']. + */ + apiVersion?: string | undefined; + /** + * Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + */ + endpoint?: string | undefined; + /** + * A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * Note: this means you won't be able to use non-deployment endpoints. Not supported with Assistants APIs. + */ + deployment?: string | undefined; + /** + * Defaults to process.env['AZURE_OPENAI_API_KEY']. + */ + apiKey?: string | undefined; + /** + * A function that returns an access token for Microsoft Entra (formerly known as Azure Active Directory), + * which will be invoked on every request. + */ + azureADTokenProvider?: (() => Promise) | undefined; +} +/** API Client for interfacing with the Azure OpenAI API. */ +export declare class AzureOpenAI extends OpenAI { + private _azureADTokenProvider; + private _deployment; + apiVersion: string; + /** + * API Client for interfacing with the Azure OpenAI API. + * + * @param {string | undefined} [opts.apiVersion=process.env['OPENAI_API_VERSION'] ?? undefined] + * @param {string | undefined} [opts.endpoint=process.env['AZURE_OPENAI_ENDPOINT'] ?? undefined] - Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + * @param {string | undefined} [opts.apiKey=process.env['AZURE_OPENAI_API_KEY'] ?? undefined] + * @param {string | undefined} opts.deployment - A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL']] - Sets the base URL for the API, e.g. `https://example-resource.azure.openai.com/openai/`. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ baseURL, apiKey, apiVersion, endpoint, deployment, azureADTokenProvider, dangerouslyAllowBrowser, ...opts }?: AzureClientOptions); + buildRequest(options: Core.FinalRequestOptions): { + req: RequestInit; + url: string; + timeout: number; + }; + private _getAzureADToken; + protected authHeaders(opts: Core.FinalRequestOptions): Core.Headers; + protected prepareOptions(opts: Core.FinalRequestOptions): Promise; +} +export { toFile, fileFromPath } from "./uploads.js"; +export { OpenAIError, APIError, APIConnectionError, APIConnectionTimeoutError, APIUserAbortError, NotFoundError, ConflictError, RateLimitError, BadRequestError, AuthenticationError, InternalServerError, PermissionDeniedError, UnprocessableEntityError, } from "./error.js"; +export default OpenAI; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/index.d.ts b/node_modules/openai/index.d.ts new file mode 100644 index 0000000..0c87c6f --- /dev/null +++ b/node_modules/openai/index.d.ts @@ -0,0 +1,232 @@ +import { type Agent, type RequestInit } from "./_shims/index.js"; +import * as Core from "./core.js"; +import * as Errors from "./error.js"; +import * as Pagination from "./pagination.js"; +import { type CursorPageParams, CursorPageResponse, PageResponse } from "./pagination.js"; +import * as Uploads from "./uploads.js"; +import * as API from "./resources/index.js"; +import { Batch, BatchCreateParams, BatchError, BatchListParams, BatchRequestCounts, Batches, BatchesPage } from "./resources/batches.js"; +import { Completion, CompletionChoice, CompletionCreateParams, CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming, CompletionUsage, Completions } from "./resources/completions.js"; +import { CreateEmbeddingResponse, Embedding, EmbeddingCreateParams, EmbeddingModel, Embeddings } from "./resources/embeddings.js"; +import { FileContent, FileCreateParams, FileDeleted, FileListParams, FileObject, FileObjectsPage, FilePurpose, Files } from "./resources/files.js"; +import { Image, ImageCreateVariationParams, ImageEditParams, ImageGenerateParams, ImageModel, Images, ImagesResponse } from "./resources/images.js"; +import { Model, ModelDeleted, Models, ModelsPage } from "./resources/models.js"; +import { Moderation, ModerationCreateParams, ModerationCreateResponse, ModerationImageURLInput, ModerationModel, ModerationMultiModalInput, ModerationTextInput, Moderations } from "./resources/moderations.js"; +import { Audio, AudioModel, AudioResponseFormat } from "./resources/audio/audio.js"; +import { Beta } from "./resources/beta/beta.js"; +import { Chat, ChatModel } from "./resources/chat/chat.js"; +import { ChatCompletion, ChatCompletionAssistantMessageParam, ChatCompletionAudio, ChatCompletionAudioParam, ChatCompletionChunk, ChatCompletionContentPart, ChatCompletionContentPartImage, ChatCompletionContentPartInputAudio, ChatCompletionContentPartRefusal, ChatCompletionContentPartText, ChatCompletionCreateParams, ChatCompletionCreateParamsNonStreaming, ChatCompletionCreateParamsStreaming, ChatCompletionDeveloperMessageParam, ChatCompletionFunctionCallOption, ChatCompletionFunctionMessageParam, ChatCompletionMessage, ChatCompletionMessageParam, ChatCompletionMessageToolCall, ChatCompletionModality, ChatCompletionNamedToolChoice, ChatCompletionPredictionContent, ChatCompletionReasoningEffort, ChatCompletionRole, ChatCompletionStreamOptions, ChatCompletionSystemMessageParam, ChatCompletionTokenLogprob, ChatCompletionTool, ChatCompletionToolChoiceOption, ChatCompletionToolMessageParam, ChatCompletionUserMessageParam } from "./resources/chat/completions.js"; +import { FineTuning } from "./resources/fine-tuning/fine-tuning.js"; +import { Upload, UploadCompleteParams, UploadCreateParams, Uploads as UploadsAPIUploads } from "./resources/uploads/uploads.js"; +export interface ClientOptions { + /** + * Defaults to process.env['OPENAI_API_KEY']. + */ + apiKey?: string | undefined; + /** + * Defaults to process.env['OPENAI_ORG_ID']. + */ + organization?: string | null | undefined; + /** + * Defaults to process.env['OPENAI_PROJECT_ID']. + */ + project?: string | null | undefined; + /** + * Override the default base URL for the API, e.g., "https://api.example.com/v2/" + * + * Defaults to process.env['OPENAI_BASE_URL']. + */ + baseURL?: string | null | undefined; + /** + * The maximum amount of time (in milliseconds) that the client should wait for a response + * from the server before timing out a single request. + * + * Note that request timeouts are retried by default, so in a worst-case scenario you may wait + * much longer than this timeout before the promise succeeds or fails. + */ + timeout?: number; + /** + * An HTTP agent used to manage HTTP(S) connections. + * + * If not provided, an agent will be constructed by default in the Node.js environment, + * otherwise no agent is used. + */ + httpAgent?: Agent; + /** + * Specify a custom `fetch` function implementation. + * + * If not provided, we use `node-fetch` on Node.js and otherwise expect that `fetch` is + * defined globally. + */ + fetch?: Core.Fetch | undefined; + /** + * The maximum number of times that the client will retry a request in case of a + * temporary failure, like a network error or a 5XX error from the server. + * + * @default 2 + */ + maxRetries?: number; + /** + * Default headers to include with every request to the API. + * + * These can be removed in individual requests by explicitly setting the + * header to `undefined` or `null` in request options. + */ + defaultHeaders?: Core.Headers; + /** + * Default query parameters to include with every request to the API. + * + * These can be removed in individual requests by explicitly setting the + * param to `undefined` in request options. + */ + defaultQuery?: Core.DefaultQuery; + /** + * By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + * Only set this option to `true` if you understand the risks and have appropriate mitigations in place. + */ + dangerouslyAllowBrowser?: boolean; +} +/** + * API Client for interfacing with the OpenAI API. + */ +export declare class OpenAI extends Core.APIClient { + apiKey: string; + organization: string | null; + project: string | null; + private _options; + /** + * API Client for interfacing with the OpenAI API. + * + * @param {string | undefined} [opts.apiKey=process.env['OPENAI_API_KEY'] ?? undefined] + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string | null | undefined} [opts.project=process.env['OPENAI_PROJECT_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL'] ?? https://api.openai.com/v1] - Override the default base URL for the API. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ baseURL, apiKey, organization, project, ...opts }?: ClientOptions); + completions: API.Completions; + chat: API.Chat; + embeddings: API.Embeddings; + files: API.Files; + images: API.Images; + audio: API.Audio; + moderations: API.Moderations; + models: API.Models; + fineTuning: API.FineTuning; + beta: API.Beta; + batches: API.Batches; + uploads: API.Uploads; + protected defaultQuery(): Core.DefaultQuery | undefined; + protected defaultHeaders(opts: Core.FinalRequestOptions): Core.Headers; + protected authHeaders(opts: Core.FinalRequestOptions): Core.Headers; + protected stringifyQuery(query: Record): string; + static OpenAI: typeof OpenAI; + static DEFAULT_TIMEOUT: number; + static OpenAIError: typeof Errors.OpenAIError; + static APIError: typeof Errors.APIError; + static APIConnectionError: typeof Errors.APIConnectionError; + static APIConnectionTimeoutError: typeof Errors.APIConnectionTimeoutError; + static APIUserAbortError: typeof Errors.APIUserAbortError; + static NotFoundError: typeof Errors.NotFoundError; + static ConflictError: typeof Errors.ConflictError; + static RateLimitError: typeof Errors.RateLimitError; + static BadRequestError: typeof Errors.BadRequestError; + static AuthenticationError: typeof Errors.AuthenticationError; + static InternalServerError: typeof Errors.InternalServerError; + static PermissionDeniedError: typeof Errors.PermissionDeniedError; + static UnprocessableEntityError: typeof Errors.UnprocessableEntityError; + static toFile: typeof Uploads.toFile; + static fileFromPath: typeof Uploads.fileFromPath; +} +export declare namespace OpenAI { + export type RequestOptions = Core.RequestOptions; + export import Page = Pagination.Page; + export { type PageResponse as PageResponse }; + export import CursorPage = Pagination.CursorPage; + export { type CursorPageParams as CursorPageParams, type CursorPageResponse as CursorPageResponse }; + export { Completions as Completions, type Completion as Completion, type CompletionChoice as CompletionChoice, type CompletionUsage as CompletionUsage, type CompletionCreateParams as CompletionCreateParams, type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming, type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming, }; + export { Chat as Chat, type ChatModel as ChatModel, type ChatCompletion as ChatCompletion, type ChatCompletionAssistantMessageParam as ChatCompletionAssistantMessageParam, type ChatCompletionAudio as ChatCompletionAudio, type ChatCompletionAudioParam as ChatCompletionAudioParam, type ChatCompletionChunk as ChatCompletionChunk, type ChatCompletionContentPart as ChatCompletionContentPart, type ChatCompletionContentPartImage as ChatCompletionContentPartImage, type ChatCompletionContentPartInputAudio as ChatCompletionContentPartInputAudio, type ChatCompletionContentPartRefusal as ChatCompletionContentPartRefusal, type ChatCompletionContentPartText as ChatCompletionContentPartText, type ChatCompletionDeveloperMessageParam as ChatCompletionDeveloperMessageParam, type ChatCompletionFunctionCallOption as ChatCompletionFunctionCallOption, type ChatCompletionFunctionMessageParam as ChatCompletionFunctionMessageParam, type ChatCompletionMessage as ChatCompletionMessage, type ChatCompletionMessageParam as ChatCompletionMessageParam, type ChatCompletionMessageToolCall as ChatCompletionMessageToolCall, type ChatCompletionModality as ChatCompletionModality, type ChatCompletionNamedToolChoice as ChatCompletionNamedToolChoice, type ChatCompletionPredictionContent as ChatCompletionPredictionContent, type ChatCompletionReasoningEffort as ChatCompletionReasoningEffort, type ChatCompletionRole as ChatCompletionRole, type ChatCompletionStreamOptions as ChatCompletionStreamOptions, type ChatCompletionSystemMessageParam as ChatCompletionSystemMessageParam, type ChatCompletionTokenLogprob as ChatCompletionTokenLogprob, type ChatCompletionTool as ChatCompletionTool, type ChatCompletionToolChoiceOption as ChatCompletionToolChoiceOption, type ChatCompletionToolMessageParam as ChatCompletionToolMessageParam, type ChatCompletionUserMessageParam as ChatCompletionUserMessageParam, type ChatCompletionCreateParams as ChatCompletionCreateParams, type ChatCompletionCreateParamsNonStreaming as ChatCompletionCreateParamsNonStreaming, type ChatCompletionCreateParamsStreaming as ChatCompletionCreateParamsStreaming, }; + export { Embeddings as Embeddings, type CreateEmbeddingResponse as CreateEmbeddingResponse, type Embedding as Embedding, type EmbeddingModel as EmbeddingModel, type EmbeddingCreateParams as EmbeddingCreateParams, }; + export { Files as Files, type FileContent as FileContent, type FileDeleted as FileDeleted, type FileObject as FileObject, type FilePurpose as FilePurpose, FileObjectsPage as FileObjectsPage, type FileCreateParams as FileCreateParams, type FileListParams as FileListParams, }; + export { Images as Images, type Image as Image, type ImageModel as ImageModel, type ImagesResponse as ImagesResponse, type ImageCreateVariationParams as ImageCreateVariationParams, type ImageEditParams as ImageEditParams, type ImageGenerateParams as ImageGenerateParams, }; + export { Audio as Audio, type AudioModel as AudioModel, type AudioResponseFormat as AudioResponseFormat }; + export { Moderations as Moderations, type Moderation as Moderation, type ModerationImageURLInput as ModerationImageURLInput, type ModerationModel as ModerationModel, type ModerationMultiModalInput as ModerationMultiModalInput, type ModerationTextInput as ModerationTextInput, type ModerationCreateResponse as ModerationCreateResponse, type ModerationCreateParams as ModerationCreateParams, }; + export { Models as Models, type Model as Model, type ModelDeleted as ModelDeleted, ModelsPage as ModelsPage, }; + export { FineTuning as FineTuning }; + export { Beta as Beta }; + export { Batches as Batches, type Batch as Batch, type BatchError as BatchError, type BatchRequestCounts as BatchRequestCounts, BatchesPage as BatchesPage, type BatchCreateParams as BatchCreateParams, type BatchListParams as BatchListParams, }; + export { UploadsAPIUploads as Uploads, type Upload as Upload, type UploadCreateParams as UploadCreateParams, type UploadCompleteParams as UploadCompleteParams, }; + export type ErrorObject = API.ErrorObject; + export type FunctionDefinition = API.FunctionDefinition; + export type FunctionParameters = API.FunctionParameters; + export type ResponseFormatJSONObject = API.ResponseFormatJSONObject; + export type ResponseFormatJSONSchema = API.ResponseFormatJSONSchema; + export type ResponseFormatText = API.ResponseFormatText; +} +/** API Client for interfacing with the Azure OpenAI API. */ +export interface AzureClientOptions extends ClientOptions { + /** + * Defaults to process.env['OPENAI_API_VERSION']. + */ + apiVersion?: string | undefined; + /** + * Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + */ + endpoint?: string | undefined; + /** + * A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * Note: this means you won't be able to use non-deployment endpoints. Not supported with Assistants APIs. + */ + deployment?: string | undefined; + /** + * Defaults to process.env['AZURE_OPENAI_API_KEY']. + */ + apiKey?: string | undefined; + /** + * A function that returns an access token for Microsoft Entra (formerly known as Azure Active Directory), + * which will be invoked on every request. + */ + azureADTokenProvider?: (() => Promise) | undefined; +} +/** API Client for interfacing with the Azure OpenAI API. */ +export declare class AzureOpenAI extends OpenAI { + private _azureADTokenProvider; + private _deployment; + apiVersion: string; + /** + * API Client for interfacing with the Azure OpenAI API. + * + * @param {string | undefined} [opts.apiVersion=process.env['OPENAI_API_VERSION'] ?? undefined] + * @param {string | undefined} [opts.endpoint=process.env['AZURE_OPENAI_ENDPOINT'] ?? undefined] - Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + * @param {string | undefined} [opts.apiKey=process.env['AZURE_OPENAI_API_KEY'] ?? undefined] + * @param {string | undefined} opts.deployment - A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL']] - Sets the base URL for the API, e.g. `https://example-resource.azure.openai.com/openai/`. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ baseURL, apiKey, apiVersion, endpoint, deployment, azureADTokenProvider, dangerouslyAllowBrowser, ...opts }?: AzureClientOptions); + buildRequest(options: Core.FinalRequestOptions): { + req: RequestInit; + url: string; + timeout: number; + }; + private _getAzureADToken; + protected authHeaders(opts: Core.FinalRequestOptions): Core.Headers; + protected prepareOptions(opts: Core.FinalRequestOptions): Promise; +} +export { toFile, fileFromPath } from "./uploads.js"; +export { OpenAIError, APIError, APIConnectionError, APIConnectionTimeoutError, APIUserAbortError, NotFoundError, ConflictError, RateLimitError, BadRequestError, AuthenticationError, InternalServerError, PermissionDeniedError, UnprocessableEntityError, } from "./error.js"; +export default OpenAI; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/index.d.ts.map b/node_modules/openai/index.d.ts.map new file mode 100644 index 0000000..b196629 --- /dev/null +++ b/node_modules/openai/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,KAAK,KAAK,EAAE,KAAK,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE9D,OAAO,KAAK,IAAI,MAAM,QAAQ,CAAC;AAC/B,OAAO,KAAK,MAAM,MAAM,SAAS,CAAC;AAClC,OAAO,KAAK,UAAU,MAAM,cAAc,CAAC;AAC3C,OAAO,EAAE,KAAK,gBAAgB,EAAE,kBAAkB,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AACvF,OAAO,KAAK,OAAO,MAAM,WAAW,CAAC;AACrC,OAAO,KAAK,GAAG,MAAM,mBAAmB,CAAC;AACzC,OAAO,EACL,KAAK,EACL,iBAAiB,EACjB,UAAU,EACV,eAAe,EACf,kBAAkB,EAClB,OAAO,EACP,WAAW,EACZ,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EACL,UAAU,EACV,gBAAgB,EAChB,sBAAsB,EACtB,kCAAkC,EAClC,+BAA+B,EAC/B,eAAe,EACf,WAAW,EACZ,MAAM,yBAAyB,CAAC;AACjC,OAAO,EACL,uBAAuB,EACvB,SAAS,EACT,qBAAqB,EACrB,cAAc,EACd,UAAU,EACX,MAAM,wBAAwB,CAAC;AAChC,OAAO,EACL,WAAW,EACX,gBAAgB,EAChB,WAAW,EACX,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,EACX,KAAK,EACN,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,KAAK,EACL,0BAA0B,EAC1B,eAAe,EACf,mBAAmB,EACnB,UAAU,EACV,MAAM,EACN,cAAc,EACf,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,KAAK,EAAE,YAAY,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AAC7E,OAAO,EACL,UAAU,EACV,sBAAsB,EACtB,wBAAwB,EACxB,uBAAuB,EACvB,eAAe,EACf,yBAAyB,EACzB,mBAAmB,EACnB,WAAW,EACZ,MAAM,yBAAyB,CAAC;AACjC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;AACjF,OAAO,EAAE,IAAI,EAAE,MAAM,uBAAuB,CAAC;AAC7C,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,uBAAuB,CAAC;AACxD,OAAO,EACL,cAAc,EACd,mCAAmC,EACnC,mBAAmB,EACnB,wBAAwB,EACxB,mBAAmB,EACnB,yBAAyB,EACzB,8BAA8B,EAC9B,mCAAmC,EACnC,gCAAgC,EAChC,6BAA6B,EAC7B,0BAA0B,EAC1B,sCAAsC,EACtC,mCAAmC,EACnC,mCAAmC,EACnC,gCAAgC,EAChC,kCAAkC,EAClC,qBAAqB,EACrB,0BAA0B,EAC1B,6BAA6B,EAC7B,sBAAsB,EACtB,6BAA6B,EAC7B,+BAA+B,EAC/B,6BAA6B,EAC7B,kBAAkB,EAClB,2BAA2B,EAC3B,gCAAgC,EAChC,0BAA0B,EAC1B,kBAAkB,EAClB,8BAA8B,EAC9B,8BAA8B,EAC9B,8BAA8B,EAC/B,MAAM,8BAA8B,CAAC;AACtC,OAAO,EAAE,UAAU,EAAE,MAAM,qCAAqC,CAAC;AACjE,OAAO,EACL,MAAM,EACN,oBAAoB,EACpB,kBAAkB,EAClB,OAAO,IAAI,iBAAiB,EAC7B,MAAM,6BAA6B,CAAC;AAErC,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE5B;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAEzC;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAEpC;;;;OAIG;IACH,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAEpC;;;;;;OAMG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;;;;OAKG;IACH,SAAS,CAAC,EAAE,KAAK,CAAC;IAElB;;;;;OAKG;IACH,KAAK,CAAC,EAAE,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;IAE/B;;;;;OAKG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;;;;OAKG;IACH,cAAc,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC;IAE9B;;;;;OAKG;IACH,YAAY,CAAC,EAAE,IAAI,CAAC,YAAY,CAAC;IAEjC;;;OAGG;IACH,uBAAuB,CAAC,EAAE,OAAO,CAAC;CACnC;AAED;;GAEG;AACH,qBAAa,MAAO,SAAQ,IAAI,CAAC,SAAS;IACxC,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IAEvB,OAAO,CAAC,QAAQ,CAAgB;IAEhC;;;;;;;;;;;;;;OAcG;gBACS,EACV,OAAyC,EACzC,MAAuC,EACvC,YAAoD,EACpD,OAAmD,EACnD,GAAG,IAAI,EACR,GAAE,aAAkB;IAoCrB,WAAW,EAAE,GAAG,CAAC,WAAW,CAA6B;IACzD,IAAI,EAAE,GAAG,CAAC,IAAI,CAAsB;IACpC,UAAU,EAAE,GAAG,CAAC,UAAU,CAA4B;IACtD,KAAK,EAAE,GAAG,CAAC,KAAK,CAAuB;IACvC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAwB;IAC1C,KAAK,EAAE,GAAG,CAAC,KAAK,CAAuB;IACvC,WAAW,EAAE,GAAG,CAAC,WAAW,CAA6B;IACzD,MAAM,EAAE,GAAG,CAAC,MAAM,CAAwB;IAC1C,UAAU,EAAE,GAAG,CAAC,UAAU,CAA4B;IACtD,IAAI,EAAE,GAAG,CAAC,IAAI,CAAsB;IACpC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAyB;IAC7C,OAAO,EAAE,GAAG,CAAC,OAAO,CAAyB;cAE1B,YAAY,IAAI,IAAI,CAAC,YAAY,GAAG,SAAS;cAI7C,cAAc,CAAC,IAAI,EAAE,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,OAAO;cAS5D,WAAW,CAAC,IAAI,EAAE,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,OAAO;cAIzD,cAAc,CAAC,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM;IAIzE,MAAM,CAAC,MAAM,gBAAQ;IACrB,MAAM,CAAC,eAAe,SAAU;IAEhC,MAAM,CAAC,WAAW,4BAAsB;IACxC,MAAM,CAAC,QAAQ,yBAAmB;IAClC,MAAM,CAAC,kBAAkB,mCAA6B;IACtD,MAAM,CAAC,yBAAyB,0CAAoC;IACpE,MAAM,CAAC,iBAAiB,kCAA4B;IACpD,MAAM,CAAC,aAAa,8BAAwB;IAC5C,MAAM,CAAC,aAAa,8BAAwB;IAC5C,MAAM,CAAC,cAAc,+BAAyB;IAC9C,MAAM,CAAC,eAAe,gCAA0B;IAChD,MAAM,CAAC,mBAAmB,oCAA8B;IACxD,MAAM,CAAC,mBAAmB,oCAA8B;IACxD,MAAM,CAAC,qBAAqB,sCAAgC;IAC5D,MAAM,CAAC,wBAAwB,yCAAmC;IAElE,MAAM,CAAC,MAAM,wBAAkB;IAC/B,MAAM,CAAC,YAAY,8BAAwB;CAC5C;AAiBD,MAAM,CAAC,OAAO,WAAW,MAAM,CAAC;IAC9B,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC;IAEjD,MAAM,QAAQ,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC;IACrC,OAAO,EAAE,KAAK,YAAY,IAAI,YAAY,EAAE,CAAC;IAE7C,MAAM,QAAQ,UAAU,GAAG,UAAU,CAAC,UAAU,CAAC;IACjD,OAAO,EAAE,KAAK,gBAAgB,IAAI,gBAAgB,EAAE,KAAK,kBAAkB,IAAI,kBAAkB,EAAE,CAAC;IAEpG,OAAO,EACL,WAAW,IAAI,WAAW,EAC1B,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,KAAK,kCAAkC,IAAI,kCAAkC,EAC7E,KAAK,+BAA+B,IAAI,+BAA+B,GACxE,CAAC;IAEF,OAAO,EACL,IAAI,IAAI,IAAI,EACZ,KAAK,SAAS,IAAI,SAAS,EAC3B,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,kCAAkC,IAAI,kCAAkC,EAC7E,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,+BAA+B,IAAI,+BAA+B,EACvE,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,sCAAsC,IAAI,sCAAsC,EACrF,KAAK,mCAAmC,IAAI,mCAAmC,GAChF,CAAC;IAEF,OAAO,EACL,UAAU,IAAI,UAAU,EACxB,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,SAAS,IAAI,SAAS,EAC3B,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,qBAAqB,IAAI,qBAAqB,GACpD,CAAC;IAEF,OAAO,EACL,KAAK,IAAI,KAAK,EACd,KAAK,WAAW,IAAI,WAAW,EAC/B,KAAK,WAAW,IAAI,WAAW,EAC/B,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,WAAW,IAAI,WAAW,EAC/B,eAAe,IAAI,eAAe,EAClC,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,cAAc,IAAI,cAAc,GACtC,CAAC;IAEF,OAAO,EACL,MAAM,IAAI,MAAM,EAChB,KAAK,KAAK,IAAI,KAAK,EACnB,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,mBAAmB,IAAI,mBAAmB,GAChD,CAAC;IAEF,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE,KAAK,UAAU,IAAI,UAAU,EAAE,KAAK,mBAAmB,IAAI,mBAAmB,EAAE,CAAC;IAE1G,OAAO,EACL,WAAW,IAAI,WAAW,EAC1B,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,sBAAsB,IAAI,sBAAsB,GACtD,CAAC;IAEF,OAAO,EACL,MAAM,IAAI,MAAM,EAChB,KAAK,KAAK,IAAI,KAAK,EACnB,KAAK,YAAY,IAAI,YAAY,EACjC,UAAU,IAAI,UAAU,GACzB,CAAC;IAEF,OAAO,EAAE,UAAU,IAAI,UAAU,EAAE,CAAC;IAEpC,OAAO,EAAE,IAAI,IAAI,IAAI,EAAE,CAAC;IAExB,OAAO,EACL,OAAO,IAAI,OAAO,EAClB,KAAK,KAAK,IAAI,KAAK,EACnB,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,WAAW,IAAI,WAAW,EAC1B,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,eAAe,IAAI,eAAe,GACxC,CAAC;IAEF,OAAO,EACL,iBAAiB,IAAI,OAAO,EAC5B,KAAK,MAAM,IAAI,MAAM,EACrB,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,oBAAoB,IAAI,oBAAoB,GAClD,CAAC;IAEF,MAAM,MAAM,WAAW,GAAG,GAAG,CAAC,WAAW,CAAC;IAC1C,MAAM,MAAM,kBAAkB,GAAG,GAAG,CAAC,kBAAkB,CAAC;IACxD,MAAM,MAAM,kBAAkB,GAAG,GAAG,CAAC,kBAAkB,CAAC;IACxD,MAAM,MAAM,wBAAwB,GAAG,GAAG,CAAC,wBAAwB,CAAC;IACpE,MAAM,MAAM,wBAAwB,GAAG,GAAG,CAAC,wBAAwB,CAAC;IACpE,MAAM,MAAM,kBAAkB,GAAG,GAAG,CAAC,kBAAkB,CAAC;CACzD;AAID,4DAA4D;AAC5D,MAAM,WAAW,kBAAmB,SAAQ,aAAa;IACvD;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAEhC;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE9B;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAEhC;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE5B;;;OAGG;IACH,oBAAoB,CAAC,EAAE,CAAC,MAAM,OAAO,CAAC,MAAM,CAAC,CAAC,GAAG,SAAS,CAAC;CAC5D;AAED,4DAA4D;AAC5D,qBAAa,WAAY,SAAQ,MAAM;IACrC,OAAO,CAAC,qBAAqB,CAAsC;IACnE,OAAO,CAAC,WAAW,CAAqB;IACxC,UAAU,EAAE,MAAM,CAAM;IACxB;;;;;;;;;;;;;;;;OAgBG;gBACS,EACV,OAAyC,EACzC,MAA6C,EAC7C,UAA+C,EAC/C,QAAQ,EACR,UAAU,EACV,oBAAoB,EACpB,uBAAuB,EACvB,GAAG,IAAI,EACR,GAAE,kBAAuB;IA0DjB,YAAY,CAAC,OAAO,EAAE,IAAI,CAAC,mBAAmB,CAAC,OAAO,CAAC,GAAG;QACjE,GAAG,EAAE,WAAW,CAAC;QACjB,GAAG,EAAE,MAAM,CAAC;QACZ,OAAO,EAAE,MAAM,CAAC;KACjB;YAaa,gBAAgB;cAaX,WAAW,CAAC,IAAI,EAAE,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,OAAO;cAInD,cAAc,CAAC,IAAI,EAAE,IAAI,CAAC,mBAAmB,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;CAqBhG;AAgBD,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AACjD,OAAO,EACL,WAAW,EACX,QAAQ,EACR,kBAAkB,EAClB,yBAAyB,EACzB,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,cAAc,EACd,eAAe,EACf,mBAAmB,EACnB,mBAAmB,EACnB,qBAAqB,EACrB,wBAAwB,GACzB,MAAM,SAAS,CAAC;AAEjB,eAAe,MAAM,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/index.js b/node_modules/openai/index.js new file mode 100644 index 0000000..574e3d3 --- /dev/null +++ b/node_modules/openai/index.js @@ -0,0 +1,297 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var _a; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UnprocessableEntityError = exports.PermissionDeniedError = exports.InternalServerError = exports.AuthenticationError = exports.BadRequestError = exports.RateLimitError = exports.ConflictError = exports.NotFoundError = exports.APIUserAbortError = exports.APIConnectionTimeoutError = exports.APIConnectionError = exports.APIError = exports.OpenAIError = exports.fileFromPath = exports.toFile = exports.AzureOpenAI = exports.OpenAI = void 0; +const qs = __importStar(require("./internal/qs/index.js")); +const Core = __importStar(require("./core.js")); +const Errors = __importStar(require("./error.js")); +const Pagination = __importStar(require("./pagination.js")); +const Uploads = __importStar(require("./uploads.js")); +const API = __importStar(require("./resources/index.js")); +const batches_1 = require("./resources/batches.js"); +const completions_1 = require("./resources/completions.js"); +const embeddings_1 = require("./resources/embeddings.js"); +const files_1 = require("./resources/files.js"); +const images_1 = require("./resources/images.js"); +const models_1 = require("./resources/models.js"); +const moderations_1 = require("./resources/moderations.js"); +const audio_1 = require("./resources/audio/audio.js"); +const beta_1 = require("./resources/beta/beta.js"); +const chat_1 = require("./resources/chat/chat.js"); +const fine_tuning_1 = require("./resources/fine-tuning/fine-tuning.js"); +const uploads_1 = require("./resources/uploads/uploads.js"); +/** + * API Client for interfacing with the OpenAI API. + */ +class OpenAI extends Core.APIClient { + /** + * API Client for interfacing with the OpenAI API. + * + * @param {string | undefined} [opts.apiKey=process.env['OPENAI_API_KEY'] ?? undefined] + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string | null | undefined} [opts.project=process.env['OPENAI_PROJECT_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL'] ?? https://api.openai.com/v1] - Override the default base URL for the API. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ baseURL = Core.readEnv('OPENAI_BASE_URL'), apiKey = Core.readEnv('OPENAI_API_KEY'), organization = Core.readEnv('OPENAI_ORG_ID') ?? null, project = Core.readEnv('OPENAI_PROJECT_ID') ?? null, ...opts } = {}) { + if (apiKey === undefined) { + throw new Errors.OpenAIError("The OPENAI_API_KEY environment variable is missing or empty; either provide it, or instantiate the OpenAI client with an apiKey option, like new OpenAI({ apiKey: 'My API Key' })."); + } + const options = { + apiKey, + organization, + project, + ...opts, + baseURL: baseURL || `https://api.openai.com/v1`, + }; + if (!options.dangerouslyAllowBrowser && Core.isRunningInBrowser()) { + throw new Errors.OpenAIError("It looks like you're running in a browser-like environment.\n\nThis is disabled by default, as it risks exposing your secret API credentials to attackers.\nIf you understand the risks and have appropriate mitigations in place,\nyou can set the `dangerouslyAllowBrowser` option to `true`, e.g.,\n\nnew OpenAI({ apiKey, dangerouslyAllowBrowser: true });\n\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\n"); + } + super({ + baseURL: options.baseURL, + timeout: options.timeout ?? 600000 /* 10 minutes */, + httpAgent: options.httpAgent, + maxRetries: options.maxRetries, + fetch: options.fetch, + }); + this.completions = new API.Completions(this); + this.chat = new API.Chat(this); + this.embeddings = new API.Embeddings(this); + this.files = new API.Files(this); + this.images = new API.Images(this); + this.audio = new API.Audio(this); + this.moderations = new API.Moderations(this); + this.models = new API.Models(this); + this.fineTuning = new API.FineTuning(this); + this.beta = new API.Beta(this); + this.batches = new API.Batches(this); + this.uploads = new API.Uploads(this); + this._options = options; + this.apiKey = apiKey; + this.organization = organization; + this.project = project; + } + defaultQuery() { + return this._options.defaultQuery; + } + defaultHeaders(opts) { + return { + ...super.defaultHeaders(opts), + 'OpenAI-Organization': this.organization, + 'OpenAI-Project': this.project, + ...this._options.defaultHeaders, + }; + } + authHeaders(opts) { + return { Authorization: `Bearer ${this.apiKey}` }; + } + stringifyQuery(query) { + return qs.stringify(query, { arrayFormat: 'brackets' }); + } +} +exports.OpenAI = OpenAI; +_a = OpenAI; +OpenAI.OpenAI = _a; +OpenAI.DEFAULT_TIMEOUT = 600000; // 10 minutes +OpenAI.OpenAIError = Errors.OpenAIError; +OpenAI.APIError = Errors.APIError; +OpenAI.APIConnectionError = Errors.APIConnectionError; +OpenAI.APIConnectionTimeoutError = Errors.APIConnectionTimeoutError; +OpenAI.APIUserAbortError = Errors.APIUserAbortError; +OpenAI.NotFoundError = Errors.NotFoundError; +OpenAI.ConflictError = Errors.ConflictError; +OpenAI.RateLimitError = Errors.RateLimitError; +OpenAI.BadRequestError = Errors.BadRequestError; +OpenAI.AuthenticationError = Errors.AuthenticationError; +OpenAI.InternalServerError = Errors.InternalServerError; +OpenAI.PermissionDeniedError = Errors.PermissionDeniedError; +OpenAI.UnprocessableEntityError = Errors.UnprocessableEntityError; +OpenAI.toFile = Uploads.toFile; +OpenAI.fileFromPath = Uploads.fileFromPath; +OpenAI.Completions = completions_1.Completions; +OpenAI.Chat = chat_1.Chat; +OpenAI.Embeddings = embeddings_1.Embeddings; +OpenAI.Files = files_1.Files; +OpenAI.FileObjectsPage = files_1.FileObjectsPage; +OpenAI.Images = images_1.Images; +OpenAI.Audio = audio_1.Audio; +OpenAI.Moderations = moderations_1.Moderations; +OpenAI.Models = models_1.Models; +OpenAI.ModelsPage = models_1.ModelsPage; +OpenAI.FineTuning = fine_tuning_1.FineTuning; +OpenAI.Beta = beta_1.Beta; +OpenAI.Batches = batches_1.Batches; +OpenAI.BatchesPage = batches_1.BatchesPage; +OpenAI.Uploads = uploads_1.Uploads; +/** API Client for interfacing with the Azure OpenAI API. */ +class AzureOpenAI extends OpenAI { + /** + * API Client for interfacing with the Azure OpenAI API. + * + * @param {string | undefined} [opts.apiVersion=process.env['OPENAI_API_VERSION'] ?? undefined] + * @param {string | undefined} [opts.endpoint=process.env['AZURE_OPENAI_ENDPOINT'] ?? undefined] - Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + * @param {string | undefined} [opts.apiKey=process.env['AZURE_OPENAI_API_KEY'] ?? undefined] + * @param {string | undefined} opts.deployment - A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL']] - Sets the base URL for the API, e.g. `https://example-resource.azure.openai.com/openai/`. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ baseURL = Core.readEnv('OPENAI_BASE_URL'), apiKey = Core.readEnv('AZURE_OPENAI_API_KEY'), apiVersion = Core.readEnv('OPENAI_API_VERSION'), endpoint, deployment, azureADTokenProvider, dangerouslyAllowBrowser, ...opts } = {}) { + if (!apiVersion) { + throw new Errors.OpenAIError("The OPENAI_API_VERSION environment variable is missing or empty; either provide it, or instantiate the AzureOpenAI client with an apiVersion option, like new AzureOpenAI({ apiVersion: 'My API Version' })."); + } + if (typeof azureADTokenProvider === 'function') { + dangerouslyAllowBrowser = true; + } + if (!azureADTokenProvider && !apiKey) { + throw new Errors.OpenAIError('Missing credentials. Please pass one of `apiKey` and `azureADTokenProvider`, or set the `AZURE_OPENAI_API_KEY` environment variable.'); + } + if (azureADTokenProvider && apiKey) { + throw new Errors.OpenAIError('The `apiKey` and `azureADTokenProvider` arguments are mutually exclusive; only one can be passed at a time.'); + } + // define a sentinel value to avoid any typing issues + apiKey ?? (apiKey = API_KEY_SENTINEL); + opts.defaultQuery = { ...opts.defaultQuery, 'api-version': apiVersion }; + if (!baseURL) { + if (!endpoint) { + endpoint = process.env['AZURE_OPENAI_ENDPOINT']; + } + if (!endpoint) { + throw new Errors.OpenAIError('Must provide one of the `baseURL` or `endpoint` arguments, or the `AZURE_OPENAI_ENDPOINT` environment variable'); + } + baseURL = `${endpoint}/openai`; + } + else { + if (endpoint) { + throw new Errors.OpenAIError('baseURL and endpoint are mutually exclusive'); + } + } + super({ + apiKey, + baseURL, + ...opts, + ...(dangerouslyAllowBrowser !== undefined ? { dangerouslyAllowBrowser } : {}), + }); + this.apiVersion = ''; + this._azureADTokenProvider = azureADTokenProvider; + this.apiVersion = apiVersion; + this._deployment = deployment; + } + buildRequest(options) { + if (_deployments_endpoints.has(options.path) && options.method === 'post' && options.body !== undefined) { + if (!Core.isObj(options.body)) { + throw new Error('Expected request body to be an object'); + } + const model = this._deployment || options.body['model']; + if (model !== undefined && !this.baseURL.includes('/deployments')) { + options.path = `/deployments/${model}${options.path}`; + } + } + return super.buildRequest(options); + } + async _getAzureADToken() { + if (typeof this._azureADTokenProvider === 'function') { + const token = await this._azureADTokenProvider(); + if (!token || typeof token !== 'string') { + throw new Errors.OpenAIError(`Expected 'azureADTokenProvider' argument to return a string but it returned ${token}`); + } + return token; + } + return undefined; + } + authHeaders(opts) { + return {}; + } + async prepareOptions(opts) { + /** + * The user should provide a bearer token provider if they want + * to use Azure AD authentication. The user shouldn't set the + * Authorization header manually because the header is overwritten + * with the Azure AD token if a bearer token provider is provided. + */ + if (opts.headers?.['api-key']) { + return super.prepareOptions(opts); + } + const token = await this._getAzureADToken(); + opts.headers ?? (opts.headers = {}); + if (token) { + opts.headers['Authorization'] = `Bearer ${token}`; + } + else if (this.apiKey !== API_KEY_SENTINEL) { + opts.headers['api-key'] = this.apiKey; + } + else { + throw new Errors.OpenAIError('Unable to handle auth'); + } + return super.prepareOptions(opts); + } +} +exports.AzureOpenAI = AzureOpenAI; +const _deployments_endpoints = new Set([ + '/completions', + '/chat/completions', + '/embeddings', + '/audio/transcriptions', + '/audio/translations', + '/audio/speech', + '/images/generations', +]); +const API_KEY_SENTINEL = ''; +// ---------------------- End Azure ---------------------- +var uploads_2 = require("./uploads.js"); +Object.defineProperty(exports, "toFile", { enumerable: true, get: function () { return uploads_2.toFile; } }); +Object.defineProperty(exports, "fileFromPath", { enumerable: true, get: function () { return uploads_2.fileFromPath; } }); +var error_1 = require("./error.js"); +Object.defineProperty(exports, "OpenAIError", { enumerable: true, get: function () { return error_1.OpenAIError; } }); +Object.defineProperty(exports, "APIError", { enumerable: true, get: function () { return error_1.APIError; } }); +Object.defineProperty(exports, "APIConnectionError", { enumerable: true, get: function () { return error_1.APIConnectionError; } }); +Object.defineProperty(exports, "APIConnectionTimeoutError", { enumerable: true, get: function () { return error_1.APIConnectionTimeoutError; } }); +Object.defineProperty(exports, "APIUserAbortError", { enumerable: true, get: function () { return error_1.APIUserAbortError; } }); +Object.defineProperty(exports, "NotFoundError", { enumerable: true, get: function () { return error_1.NotFoundError; } }); +Object.defineProperty(exports, "ConflictError", { enumerable: true, get: function () { return error_1.ConflictError; } }); +Object.defineProperty(exports, "RateLimitError", { enumerable: true, get: function () { return error_1.RateLimitError; } }); +Object.defineProperty(exports, "BadRequestError", { enumerable: true, get: function () { return error_1.BadRequestError; } }); +Object.defineProperty(exports, "AuthenticationError", { enumerable: true, get: function () { return error_1.AuthenticationError; } }); +Object.defineProperty(exports, "InternalServerError", { enumerable: true, get: function () { return error_1.InternalServerError; } }); +Object.defineProperty(exports, "PermissionDeniedError", { enumerable: true, get: function () { return error_1.PermissionDeniedError; } }); +Object.defineProperty(exports, "UnprocessableEntityError", { enumerable: true, get: function () { return error_1.UnprocessableEntityError; } }); +exports = module.exports = OpenAI; +module.exports.AzureOpenAI = AzureOpenAI; +exports.default = OpenAI; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/index.js.map b/node_modules/openai/index.js.map new file mode 100644 index 0000000..1dfd051 --- /dev/null +++ b/node_modules/openai/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;;AAGtF,2DAAoC;AACpC,gDAA+B;AAC/B,mDAAkC;AAClC,4DAA2C;AAE3C,sDAAqC;AACrC,0DAAyC;AACzC,oDAQ6B;AAC7B,4DAQiC;AACjC,0DAMgC;AAChC,gDAS2B;AAC3B,kDAQ4B;AAC5B,kDAA6E;AAC7E,4DASiC;AACjC,sDAAiF;AACjF,mDAA6C;AAC7C,mDAAwD;AAkCxD,wEAAiE;AACjE,4DAKqC;AAiFrC;;GAEG;AACH,MAAa,MAAO,SAAQ,IAAI,CAAC,SAAS;IAOxC;;;;;;;;;;;;;;OAcG;IACH,YAAY,EACV,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,EACzC,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,gBAAgB,CAAC,EACvC,YAAY,GAAG,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,IAAI,IAAI,EACpD,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,IAAI,IAAI,EACnD,GAAG,IAAI,KACU,EAAE;QACnB,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,oLAAoL,CACrL,CAAC;SACH;QAED,MAAM,OAAO,GAAkB;YAC7B,MAAM;YACN,YAAY;YACZ,OAAO;YACP,GAAG,IAAI;YACP,OAAO,EAAE,OAAO,IAAI,2BAA2B;SAChD,CAAC;QAEF,IAAI,CAAC,OAAO,CAAC,uBAAuB,IAAI,IAAI,CAAC,kBAAkB,EAAE,EAAE;YACjE,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,obAAob,CACrb,CAAC;SACH;QAED,KAAK,CAAC;YACJ,OAAO,EAAE,OAAO,CAAC,OAAQ;YACzB,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,MAAM,CAAC,gBAAgB;YACnD,SAAS,EAAE,OAAO,CAAC,SAAS;YAC5B,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,KAAK,EAAE,OAAO,CAAC,KAAK;SACrB,CAAC,CAAC;QASL,gBAAW,GAAoB,IAAI,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACzD,SAAI,GAAa,IAAI,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACpC,eAAU,GAAmB,IAAI,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtD,UAAK,GAAc,IAAI,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QACvC,WAAM,GAAe,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC1C,UAAK,GAAc,IAAI,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QACvC,gBAAW,GAAoB,IAAI,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACzD,WAAM,GAAe,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC1C,eAAU,GAAmB,IAAI,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtD,SAAI,GAAa,IAAI,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACpC,YAAO,GAAgB,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAC7C,YAAO,GAAgB,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAlB3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;QAExB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAekB,YAAY;QAC7B,OAAO,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC;IACpC,CAAC;IAEkB,cAAc,CAAC,IAA8B;QAC9D,OAAO;YACL,GAAG,KAAK,CAAC,cAAc,CAAC,IAAI,CAAC;YAC7B,qBAAqB,EAAE,IAAI,CAAC,YAAY;YACxC,gBAAgB,EAAE,IAAI,CAAC,OAAO;YAC9B,GAAG,IAAI,CAAC,QAAQ,CAAC,cAAc;SAChC,CAAC;IACJ,CAAC;IAEkB,WAAW,CAAC,IAA8B;QAC3D,OAAO,EAAE,aAAa,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;IACpD,CAAC;IAEkB,cAAc,CAAC,KAA8B;QAC9D,OAAO,EAAE,CAAC,SAAS,CAAC,KAAK,EAAE,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC,CAAC;IAC1D,CAAC;;AAhGH,wBAqHC;;AAnBQ,aAAM,GAAG,EAAI,CAAC;AACd,sBAAe,GAAG,MAAM,CAAC,CAAC,aAAa;AAEvC,kBAAW,GAAG,MAAM,CAAC,WAAW,CAAC;AACjC,eAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;AAC3B,yBAAkB,GAAG,MAAM,CAAC,kBAAkB,CAAC;AAC/C,gCAAyB,GAAG,MAAM,CAAC,yBAAyB,CAAC;AAC7D,wBAAiB,GAAG,MAAM,CAAC,iBAAiB,CAAC;AAC7C,oBAAa,GAAG,MAAM,CAAC,aAAa,CAAC;AACrC,oBAAa,GAAG,MAAM,CAAC,aAAa,CAAC;AACrC,qBAAc,GAAG,MAAM,CAAC,cAAc,CAAC;AACvC,sBAAe,GAAG,MAAM,CAAC,eAAe,CAAC;AACzC,0BAAmB,GAAG,MAAM,CAAC,mBAAmB,CAAC;AACjD,0BAAmB,GAAG,MAAM,CAAC,mBAAmB,CAAC;AACjD,4BAAqB,GAAG,MAAM,CAAC,qBAAqB,CAAC;AACrD,+BAAwB,GAAG,MAAM,CAAC,wBAAwB,CAAC;AAE3D,aAAM,GAAG,OAAO,CAAC,MAAM,CAAC;AACxB,mBAAY,GAAG,OAAO,CAAC,YAAY,CAAC;AAG7C,MAAM,CAAC,WAAW,GAAG,yBAAW,CAAC;AACjC,MAAM,CAAC,IAAI,GAAG,WAAI,CAAC;AACnB,MAAM,CAAC,UAAU,GAAG,uBAAU,CAAC;AAC/B,MAAM,CAAC,KAAK,GAAG,aAAK,CAAC;AACrB,MAAM,CAAC,eAAe,GAAG,uBAAe,CAAC;AACzC,MAAM,CAAC,MAAM,GAAG,eAAM,CAAC;AACvB,MAAM,CAAC,KAAK,GAAG,aAAK,CAAC;AACrB,MAAM,CAAC,WAAW,GAAG,yBAAW,CAAC;AACjC,MAAM,CAAC,MAAM,GAAG,eAAM,CAAC;AACvB,MAAM,CAAC,UAAU,GAAG,mBAAU,CAAC;AAC/B,MAAM,CAAC,UAAU,GAAG,wBAAU,CAAC;AAC/B,MAAM,CAAC,IAAI,GAAG,WAAI,CAAC;AACnB,MAAM,CAAC,OAAO,GAAG,iBAAO,CAAC;AACzB,MAAM,CAAC,WAAW,GAAG,qBAAW,CAAC;AACjC,MAAM,CAAC,OAAO,GAAG,iBAAiB,CAAC;AAsKnC,4DAA4D;AAC5D,MAAa,WAAY,SAAQ,MAAM;IAIrC;;;;;;;;;;;;;;;;OAgBG;IACH,YAAY,EACV,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,EACzC,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAC7C,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAC/C,QAAQ,EACR,UAAU,EACV,oBAAoB,EACpB,uBAAuB,EACvB,GAAG,IAAI,KACe,EAAE;QACxB,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,8MAA8M,CAC/M,CAAC;SACH;QAED,IAAI,OAAO,oBAAoB,KAAK,UAAU,EAAE;YAC9C,uBAAuB,GAAG,IAAI,CAAC;SAChC;QAED,IAAI,CAAC,oBAAoB,IAAI,CAAC,MAAM,EAAE;YACpC,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,sIAAsI,CACvI,CAAC;SACH;QAED,IAAI,oBAAoB,IAAI,MAAM,EAAE;YAClC,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,6GAA6G,CAC9G,CAAC;SACH;QAED,qDAAqD;QACrD,MAAM,KAAN,MAAM,GAAK,gBAAgB,EAAC;QAE5B,IAAI,CAAC,YAAY,GAAG,EAAE,GAAG,IAAI,CAAC,YAAY,EAAE,aAAa,EAAE,UAAU,EAAE,CAAC;QAExE,IAAI,CAAC,OAAO,EAAE;YACZ,IAAI,CAAC,QAAQ,EAAE;gBACb,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAC;aACjD;YAED,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,gHAAgH,CACjH,CAAC;aACH;YAED,OAAO,GAAG,GAAG,QAAQ,SAAS,CAAC;SAChC;aAAM;YACL,IAAI,QAAQ,EAAE;gBACZ,MAAM,IAAI,MAAM,CAAC,WAAW,CAAC,6CAA6C,CAAC,CAAC;aAC7E;SACF;QAED,KAAK,CAAC;YACJ,MAAM;YACN,OAAO;YACP,GAAG,IAAI;YACP,GAAG,CAAC,uBAAuB,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,uBAAuB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;SAC9E,CAAC,CAAC;QA9EL,eAAU,GAAW,EAAE,CAAC;QAgFtB,IAAI,CAAC,qBAAqB,GAAG,oBAAoB,CAAC;QAClD,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;IAChC,CAAC;IAEQ,YAAY,CAAC,OAA0C;QAK9D,IAAI,sBAAsB,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE;YACvG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YACxD,IAAI,KAAK,KAAK,SAAS,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,EAAE;gBACjE,OAAO,CAAC,IAAI,GAAG,gBAAgB,KAAK,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;aACvD;SACF;QACD,OAAO,KAAK,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;IACrC,CAAC;IAEO,KAAK,CAAC,gBAAgB;QAC5B,IAAI,OAAO,IAAI,CAAC,qBAAqB,KAAK,UAAU,EAAE;YACpD,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,qBAAqB,EAAE,CAAC;YACjD,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,+EAA+E,KAAK,EAAE,CACvF,CAAC;aACH;YACD,OAAO,KAAK,CAAC;SACd;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEkB,WAAW,CAAC,IAA8B;QAC3D,OAAO,EAAE,CAAC;IACZ,CAAC;IAEkB,KAAK,CAAC,cAAc,CAAC,IAAuC;QAC7E;;;;;WAKG;QACH,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC,SAAS,CAAC,EAAE;YAC7B,OAAO,KAAK,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,gBAAgB,EAAE,CAAC;QAC5C,IAAI,CAAC,OAAO,KAAZ,IAAI,CAAC,OAAO,GAAK,EAAE,EAAC;QACpB,IAAI,KAAK,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,KAAK,EAAE,CAAC;SACnD;aAAM,IAAI,IAAI,CAAC,MAAM,KAAK,gBAAgB,EAAE;YAC3C,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC;SACvC;aAAM;YACL,MAAM,IAAI,MAAM,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;SACvD;QACD,OAAO,KAAK,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;IACpC,CAAC;CACF;AA/ID,kCA+IC;AAED,MAAM,sBAAsB,GAAG,IAAI,GAAG,CAAC;IACrC,cAAc;IACd,mBAAmB;IACnB,aAAa;IACb,uBAAuB;IACvB,qBAAqB;IACrB,eAAe;IACf,qBAAqB;CACtB,CAAC,CAAC;AAEH,MAAM,gBAAgB,GAAG,eAAe,CAAC;AAEzC,0DAA0D;AAE1D,wCAAiD;AAAxC,iGAAA,MAAM,OAAA;AAAE,uGAAA,YAAY,OAAA;AAC7B,oCAciB;AAbf,oGAAA,WAAW,OAAA;AACX,iGAAA,QAAQ,OAAA;AACR,2GAAA,kBAAkB,OAAA;AAClB,kHAAA,yBAAyB,OAAA;AACzB,0GAAA,iBAAiB,OAAA;AACjB,sGAAA,aAAa,OAAA;AACb,sGAAA,aAAa,OAAA;AACb,uGAAA,cAAc,OAAA;AACd,wGAAA,eAAe,OAAA;AACf,4GAAA,mBAAmB,OAAA;AACnB,4GAAA,mBAAmB,OAAA;AACnB,8GAAA,qBAAqB,OAAA;AACrB,iHAAA,wBAAwB,OAAA;AAG1B,kBAAe,MAAM,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/index.mjs b/node_modules/openai/index.mjs new file mode 100644 index 0000000..486e30f --- /dev/null +++ b/node_modules/openai/index.mjs @@ -0,0 +1,251 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var _a; +import * as qs from "./internal/qs/index.mjs"; +import * as Core from "./core.mjs"; +import * as Errors from "./error.mjs"; +import * as Pagination from "./pagination.mjs"; +import * as Uploads from "./uploads.mjs"; +import * as API from "./resources/index.mjs"; +import { Batches, BatchesPage, } from "./resources/batches.mjs"; +import { Completions, } from "./resources/completions.mjs"; +import { Embeddings, } from "./resources/embeddings.mjs"; +import { FileObjectsPage, Files, } from "./resources/files.mjs"; +import { Images, } from "./resources/images.mjs"; +import { Models, ModelsPage } from "./resources/models.mjs"; +import { Moderations, } from "./resources/moderations.mjs"; +import { Audio } from "./resources/audio/audio.mjs"; +import { Beta } from "./resources/beta/beta.mjs"; +import { Chat } from "./resources/chat/chat.mjs"; +import { FineTuning } from "./resources/fine-tuning/fine-tuning.mjs"; +import { Uploads as UploadsAPIUploads, } from "./resources/uploads/uploads.mjs"; +/** + * API Client for interfacing with the OpenAI API. + */ +export class OpenAI extends Core.APIClient { + /** + * API Client for interfacing with the OpenAI API. + * + * @param {string | undefined} [opts.apiKey=process.env['OPENAI_API_KEY'] ?? undefined] + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string | null | undefined} [opts.project=process.env['OPENAI_PROJECT_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL'] ?? https://api.openai.com/v1] - Override the default base URL for the API. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ baseURL = Core.readEnv('OPENAI_BASE_URL'), apiKey = Core.readEnv('OPENAI_API_KEY'), organization = Core.readEnv('OPENAI_ORG_ID') ?? null, project = Core.readEnv('OPENAI_PROJECT_ID') ?? null, ...opts } = {}) { + if (apiKey === undefined) { + throw new Errors.OpenAIError("The OPENAI_API_KEY environment variable is missing or empty; either provide it, or instantiate the OpenAI client with an apiKey option, like new OpenAI({ apiKey: 'My API Key' })."); + } + const options = { + apiKey, + organization, + project, + ...opts, + baseURL: baseURL || `https://api.openai.com/v1`, + }; + if (!options.dangerouslyAllowBrowser && Core.isRunningInBrowser()) { + throw new Errors.OpenAIError("It looks like you're running in a browser-like environment.\n\nThis is disabled by default, as it risks exposing your secret API credentials to attackers.\nIf you understand the risks and have appropriate mitigations in place,\nyou can set the `dangerouslyAllowBrowser` option to `true`, e.g.,\n\nnew OpenAI({ apiKey, dangerouslyAllowBrowser: true });\n\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\n"); + } + super({ + baseURL: options.baseURL, + timeout: options.timeout ?? 600000 /* 10 minutes */, + httpAgent: options.httpAgent, + maxRetries: options.maxRetries, + fetch: options.fetch, + }); + this.completions = new API.Completions(this); + this.chat = new API.Chat(this); + this.embeddings = new API.Embeddings(this); + this.files = new API.Files(this); + this.images = new API.Images(this); + this.audio = new API.Audio(this); + this.moderations = new API.Moderations(this); + this.models = new API.Models(this); + this.fineTuning = new API.FineTuning(this); + this.beta = new API.Beta(this); + this.batches = new API.Batches(this); + this.uploads = new API.Uploads(this); + this._options = options; + this.apiKey = apiKey; + this.organization = organization; + this.project = project; + } + defaultQuery() { + return this._options.defaultQuery; + } + defaultHeaders(opts) { + return { + ...super.defaultHeaders(opts), + 'OpenAI-Organization': this.organization, + 'OpenAI-Project': this.project, + ...this._options.defaultHeaders, + }; + } + authHeaders(opts) { + return { Authorization: `Bearer ${this.apiKey}` }; + } + stringifyQuery(query) { + return qs.stringify(query, { arrayFormat: 'brackets' }); + } +} +_a = OpenAI; +OpenAI.OpenAI = _a; +OpenAI.DEFAULT_TIMEOUT = 600000; // 10 minutes +OpenAI.OpenAIError = Errors.OpenAIError; +OpenAI.APIError = Errors.APIError; +OpenAI.APIConnectionError = Errors.APIConnectionError; +OpenAI.APIConnectionTimeoutError = Errors.APIConnectionTimeoutError; +OpenAI.APIUserAbortError = Errors.APIUserAbortError; +OpenAI.NotFoundError = Errors.NotFoundError; +OpenAI.ConflictError = Errors.ConflictError; +OpenAI.RateLimitError = Errors.RateLimitError; +OpenAI.BadRequestError = Errors.BadRequestError; +OpenAI.AuthenticationError = Errors.AuthenticationError; +OpenAI.InternalServerError = Errors.InternalServerError; +OpenAI.PermissionDeniedError = Errors.PermissionDeniedError; +OpenAI.UnprocessableEntityError = Errors.UnprocessableEntityError; +OpenAI.toFile = Uploads.toFile; +OpenAI.fileFromPath = Uploads.fileFromPath; +OpenAI.Completions = Completions; +OpenAI.Chat = Chat; +OpenAI.Embeddings = Embeddings; +OpenAI.Files = Files; +OpenAI.FileObjectsPage = FileObjectsPage; +OpenAI.Images = Images; +OpenAI.Audio = Audio; +OpenAI.Moderations = Moderations; +OpenAI.Models = Models; +OpenAI.ModelsPage = ModelsPage; +OpenAI.FineTuning = FineTuning; +OpenAI.Beta = Beta; +OpenAI.Batches = Batches; +OpenAI.BatchesPage = BatchesPage; +OpenAI.Uploads = UploadsAPIUploads; +/** API Client for interfacing with the Azure OpenAI API. */ +export class AzureOpenAI extends OpenAI { + /** + * API Client for interfacing with the Azure OpenAI API. + * + * @param {string | undefined} [opts.apiVersion=process.env['OPENAI_API_VERSION'] ?? undefined] + * @param {string | undefined} [opts.endpoint=process.env['AZURE_OPENAI_ENDPOINT'] ?? undefined] - Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + * @param {string | undefined} [opts.apiKey=process.env['AZURE_OPENAI_API_KEY'] ?? undefined] + * @param {string | undefined} opts.deployment - A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL']] - Sets the base URL for the API, e.g. `https://example-resource.azure.openai.com/openai/`. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ baseURL = Core.readEnv('OPENAI_BASE_URL'), apiKey = Core.readEnv('AZURE_OPENAI_API_KEY'), apiVersion = Core.readEnv('OPENAI_API_VERSION'), endpoint, deployment, azureADTokenProvider, dangerouslyAllowBrowser, ...opts } = {}) { + if (!apiVersion) { + throw new Errors.OpenAIError("The OPENAI_API_VERSION environment variable is missing or empty; either provide it, or instantiate the AzureOpenAI client with an apiVersion option, like new AzureOpenAI({ apiVersion: 'My API Version' })."); + } + if (typeof azureADTokenProvider === 'function') { + dangerouslyAllowBrowser = true; + } + if (!azureADTokenProvider && !apiKey) { + throw new Errors.OpenAIError('Missing credentials. Please pass one of `apiKey` and `azureADTokenProvider`, or set the `AZURE_OPENAI_API_KEY` environment variable.'); + } + if (azureADTokenProvider && apiKey) { + throw new Errors.OpenAIError('The `apiKey` and `azureADTokenProvider` arguments are mutually exclusive; only one can be passed at a time.'); + } + // define a sentinel value to avoid any typing issues + apiKey ?? (apiKey = API_KEY_SENTINEL); + opts.defaultQuery = { ...opts.defaultQuery, 'api-version': apiVersion }; + if (!baseURL) { + if (!endpoint) { + endpoint = process.env['AZURE_OPENAI_ENDPOINT']; + } + if (!endpoint) { + throw new Errors.OpenAIError('Must provide one of the `baseURL` or `endpoint` arguments, or the `AZURE_OPENAI_ENDPOINT` environment variable'); + } + baseURL = `${endpoint}/openai`; + } + else { + if (endpoint) { + throw new Errors.OpenAIError('baseURL and endpoint are mutually exclusive'); + } + } + super({ + apiKey, + baseURL, + ...opts, + ...(dangerouslyAllowBrowser !== undefined ? { dangerouslyAllowBrowser } : {}), + }); + this.apiVersion = ''; + this._azureADTokenProvider = azureADTokenProvider; + this.apiVersion = apiVersion; + this._deployment = deployment; + } + buildRequest(options) { + if (_deployments_endpoints.has(options.path) && options.method === 'post' && options.body !== undefined) { + if (!Core.isObj(options.body)) { + throw new Error('Expected request body to be an object'); + } + const model = this._deployment || options.body['model']; + if (model !== undefined && !this.baseURL.includes('/deployments')) { + options.path = `/deployments/${model}${options.path}`; + } + } + return super.buildRequest(options); + } + async _getAzureADToken() { + if (typeof this._azureADTokenProvider === 'function') { + const token = await this._azureADTokenProvider(); + if (!token || typeof token !== 'string') { + throw new Errors.OpenAIError(`Expected 'azureADTokenProvider' argument to return a string but it returned ${token}`); + } + return token; + } + return undefined; + } + authHeaders(opts) { + return {}; + } + async prepareOptions(opts) { + /** + * The user should provide a bearer token provider if they want + * to use Azure AD authentication. The user shouldn't set the + * Authorization header manually because the header is overwritten + * with the Azure AD token if a bearer token provider is provided. + */ + if (opts.headers?.['api-key']) { + return super.prepareOptions(opts); + } + const token = await this._getAzureADToken(); + opts.headers ?? (opts.headers = {}); + if (token) { + opts.headers['Authorization'] = `Bearer ${token}`; + } + else if (this.apiKey !== API_KEY_SENTINEL) { + opts.headers['api-key'] = this.apiKey; + } + else { + throw new Errors.OpenAIError('Unable to handle auth'); + } + return super.prepareOptions(opts); + } +} +const _deployments_endpoints = new Set([ + '/completions', + '/chat/completions', + '/embeddings', + '/audio/transcriptions', + '/audio/translations', + '/audio/speech', + '/images/generations', +]); +const API_KEY_SENTINEL = ''; +export { toFile, fileFromPath } from "./uploads.mjs"; +export { OpenAIError, APIError, APIConnectionError, APIConnectionTimeoutError, APIUserAbortError, NotFoundError, ConflictError, RateLimitError, BadRequestError, AuthenticationError, InternalServerError, PermissionDeniedError, UnprocessableEntityError, } from "./error.mjs"; +export default OpenAI; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/index.mjs.map b/node_modules/openai/index.mjs.map new file mode 100644 index 0000000..3c97156 --- /dev/null +++ b/node_modules/openai/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;;OAG/E,KAAK,EAAE;OACP,KAAK,IAAI;OACT,KAAK,MAAM;OACX,KAAK,UAAU;OAEf,KAAK,OAAO;OACZ,KAAK,GAAG;OACR,EAML,OAAO,EACP,WAAW,GACZ;OACM,EAOL,WAAW,GACZ;OACM,EAKL,UAAU,GACX;OACM,EAML,eAAe,EAEf,KAAK,GACN;OACM,EAML,MAAM,GAEP;OACM,EAAuB,MAAM,EAAE,UAAU,EAAE;OAC3C,EAQL,WAAW,GACZ;OACM,EAAE,KAAK,EAAmC;OAC1C,EAAE,IAAI,EAAE;OACR,EAAE,IAAI,EAAa;OAkCnB,EAAE,UAAU,EAAE;OACd,EAIL,OAAO,IAAI,iBAAiB,GAC7B;AAiFD;;GAEG;AACH,MAAM,OAAO,MAAO,SAAQ,IAAI,CAAC,SAAS;IAOxC;;;;;;;;;;;;;;OAcG;IACH,YAAY,EACV,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,EACzC,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,gBAAgB,CAAC,EACvC,YAAY,GAAG,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,IAAI,IAAI,EACpD,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,IAAI,IAAI,EACnD,GAAG,IAAI,KACU,EAAE;QACnB,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,oLAAoL,CACrL,CAAC;SACH;QAED,MAAM,OAAO,GAAkB;YAC7B,MAAM;YACN,YAAY;YACZ,OAAO;YACP,GAAG,IAAI;YACP,OAAO,EAAE,OAAO,IAAI,2BAA2B;SAChD,CAAC;QAEF,IAAI,CAAC,OAAO,CAAC,uBAAuB,IAAI,IAAI,CAAC,kBAAkB,EAAE,EAAE;YACjE,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,obAAob,CACrb,CAAC;SACH;QAED,KAAK,CAAC;YACJ,OAAO,EAAE,OAAO,CAAC,OAAQ;YACzB,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,MAAM,CAAC,gBAAgB;YACnD,SAAS,EAAE,OAAO,CAAC,SAAS;YAC5B,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,KAAK,EAAE,OAAO,CAAC,KAAK;SACrB,CAAC,CAAC;QASL,gBAAW,GAAoB,IAAI,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACzD,SAAI,GAAa,IAAI,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACpC,eAAU,GAAmB,IAAI,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtD,UAAK,GAAc,IAAI,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QACvC,WAAM,GAAe,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC1C,UAAK,GAAc,IAAI,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QACvC,gBAAW,GAAoB,IAAI,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACzD,WAAM,GAAe,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC1C,eAAU,GAAmB,IAAI,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtD,SAAI,GAAa,IAAI,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACpC,YAAO,GAAgB,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAC7C,YAAO,GAAgB,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAlB3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;QAExB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAekB,YAAY;QAC7B,OAAO,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC;IACpC,CAAC;IAEkB,cAAc,CAAC,IAA8B;QAC9D,OAAO;YACL,GAAG,KAAK,CAAC,cAAc,CAAC,IAAI,CAAC;YAC7B,qBAAqB,EAAE,IAAI,CAAC,YAAY;YACxC,gBAAgB,EAAE,IAAI,CAAC,OAAO;YAC9B,GAAG,IAAI,CAAC,QAAQ,CAAC,cAAc;SAChC,CAAC;IACJ,CAAC;IAEkB,WAAW,CAAC,IAA8B;QAC3D,OAAO,EAAE,aAAa,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;IACpD,CAAC;IAEkB,cAAc,CAAC,KAA8B;QAC9D,OAAO,EAAE,CAAC,SAAS,CAAC,KAAK,EAAE,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC,CAAC;IAC1D,CAAC;;;AAEM,aAAM,GAAG,EAAI,CAAC;AACd,sBAAe,GAAG,MAAM,CAAC,CAAC,aAAa;AAEvC,kBAAW,GAAG,MAAM,CAAC,WAAW,CAAC;AACjC,eAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;AAC3B,yBAAkB,GAAG,MAAM,CAAC,kBAAkB,CAAC;AAC/C,gCAAyB,GAAG,MAAM,CAAC,yBAAyB,CAAC;AAC7D,wBAAiB,GAAG,MAAM,CAAC,iBAAiB,CAAC;AAC7C,oBAAa,GAAG,MAAM,CAAC,aAAa,CAAC;AACrC,oBAAa,GAAG,MAAM,CAAC,aAAa,CAAC;AACrC,qBAAc,GAAG,MAAM,CAAC,cAAc,CAAC;AACvC,sBAAe,GAAG,MAAM,CAAC,eAAe,CAAC;AACzC,0BAAmB,GAAG,MAAM,CAAC,mBAAmB,CAAC;AACjD,0BAAmB,GAAG,MAAM,CAAC,mBAAmB,CAAC;AACjD,4BAAqB,GAAG,MAAM,CAAC,qBAAqB,CAAC;AACrD,+BAAwB,GAAG,MAAM,CAAC,wBAAwB,CAAC;AAE3D,aAAM,GAAG,OAAO,CAAC,MAAM,CAAC;AACxB,mBAAY,GAAG,OAAO,CAAC,YAAY,CAAC;AAG7C,MAAM,CAAC,WAAW,GAAG,WAAW,CAAC;AACjC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC;AACnB,MAAM,CAAC,UAAU,GAAG,UAAU,CAAC;AAC/B,MAAM,CAAC,KAAK,GAAG,KAAK,CAAC;AACrB,MAAM,CAAC,eAAe,GAAG,eAAe,CAAC;AACzC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC;AACvB,MAAM,CAAC,KAAK,GAAG,KAAK,CAAC;AACrB,MAAM,CAAC,WAAW,GAAG,WAAW,CAAC;AACjC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC;AACvB,MAAM,CAAC,UAAU,GAAG,UAAU,CAAC;AAC/B,MAAM,CAAC,UAAU,GAAG,UAAU,CAAC;AAC/B,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC;AACnB,MAAM,CAAC,OAAO,GAAG,OAAO,CAAC;AACzB,MAAM,CAAC,WAAW,GAAG,WAAW,CAAC;AACjC,MAAM,CAAC,OAAO,GAAG,iBAAiB,CAAC;AAsKnC,4DAA4D;AAC5D,MAAM,OAAO,WAAY,SAAQ,MAAM;IAIrC;;;;;;;;;;;;;;;;OAgBG;IACH,YAAY,EACV,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,EACzC,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAC7C,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAC/C,QAAQ,EACR,UAAU,EACV,oBAAoB,EACpB,uBAAuB,EACvB,GAAG,IAAI,KACe,EAAE;QACxB,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,8MAA8M,CAC/M,CAAC;SACH;QAED,IAAI,OAAO,oBAAoB,KAAK,UAAU,EAAE;YAC9C,uBAAuB,GAAG,IAAI,CAAC;SAChC;QAED,IAAI,CAAC,oBAAoB,IAAI,CAAC,MAAM,EAAE;YACpC,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,sIAAsI,CACvI,CAAC;SACH;QAED,IAAI,oBAAoB,IAAI,MAAM,EAAE;YAClC,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,6GAA6G,CAC9G,CAAC;SACH;QAED,qDAAqD;QACrD,MAAM,KAAN,MAAM,GAAK,gBAAgB,EAAC;QAE5B,IAAI,CAAC,YAAY,GAAG,EAAE,GAAG,IAAI,CAAC,YAAY,EAAE,aAAa,EAAE,UAAU,EAAE,CAAC;QAExE,IAAI,CAAC,OAAO,EAAE;YACZ,IAAI,CAAC,QAAQ,EAAE;gBACb,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAC;aACjD;YAED,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,gHAAgH,CACjH,CAAC;aACH;YAED,OAAO,GAAG,GAAG,QAAQ,SAAS,CAAC;SAChC;aAAM;YACL,IAAI,QAAQ,EAAE;gBACZ,MAAM,IAAI,MAAM,CAAC,WAAW,CAAC,6CAA6C,CAAC,CAAC;aAC7E;SACF;QAED,KAAK,CAAC;YACJ,MAAM;YACN,OAAO;YACP,GAAG,IAAI;YACP,GAAG,CAAC,uBAAuB,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,uBAAuB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;SAC9E,CAAC,CAAC;QA9EL,eAAU,GAAW,EAAE,CAAC;QAgFtB,IAAI,CAAC,qBAAqB,GAAG,oBAAoB,CAAC;QAClD,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;IAChC,CAAC;IAEQ,YAAY,CAAC,OAA0C;QAK9D,IAAI,sBAAsB,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE;YACvG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YACxD,IAAI,KAAK,KAAK,SAAS,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,EAAE;gBACjE,OAAO,CAAC,IAAI,GAAG,gBAAgB,KAAK,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;aACvD;SACF;QACD,OAAO,KAAK,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;IACrC,CAAC;IAEO,KAAK,CAAC,gBAAgB;QAC5B,IAAI,OAAO,IAAI,CAAC,qBAAqB,KAAK,UAAU,EAAE;YACpD,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,qBAAqB,EAAE,CAAC;YACjD,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,MAAM,CAAC,WAAW,CAC1B,+EAA+E,KAAK,EAAE,CACvF,CAAC;aACH;YACD,OAAO,KAAK,CAAC;SACd;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEkB,WAAW,CAAC,IAA8B;QAC3D,OAAO,EAAE,CAAC;IACZ,CAAC;IAEkB,KAAK,CAAC,cAAc,CAAC,IAAuC;QAC7E;;;;;WAKG;QACH,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC,SAAS,CAAC,EAAE;YAC7B,OAAO,KAAK,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,gBAAgB,EAAE,CAAC;QAC5C,IAAI,CAAC,OAAO,KAAZ,IAAI,CAAC,OAAO,GAAK,EAAE,EAAC;QACpB,IAAI,KAAK,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,KAAK,EAAE,CAAC;SACnD;aAAM,IAAI,IAAI,CAAC,MAAM,KAAK,gBAAgB,EAAE;YAC3C,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC;SACvC;aAAM;YACL,MAAM,IAAI,MAAM,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;SACvD;QACD,OAAO,KAAK,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;IACpC,CAAC;CACF;AAED,MAAM,sBAAsB,GAAG,IAAI,GAAG,CAAC;IACrC,cAAc;IACd,mBAAmB;IACnB,aAAa;IACb,uBAAuB;IACvB,qBAAqB;IACrB,eAAe;IACf,qBAAqB;CACtB,CAAC,CAAC;AAEH,MAAM,gBAAgB,GAAG,eAAe,CAAC;OAIlC,EAAE,MAAM,EAAE,YAAY,EAAE;OACxB,EACL,WAAW,EACX,QAAQ,EACR,kBAAkB,EAClB,yBAAyB,EACzB,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,cAAc,EACd,eAAe,EACf,mBAAmB,EACnB,mBAAmB,EACnB,qBAAqB,EACrB,wBAAwB,GACzB;AAED,eAAe,MAAM,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/decoders/line.d.ts b/node_modules/openai/internal/decoders/line.d.ts new file mode 100644 index 0000000..f9b0cff --- /dev/null +++ b/node_modules/openai/internal/decoders/line.d.ts @@ -0,0 +1,21 @@ + +type Bytes = string | ArrayBuffer | Uint8Array | Buffer | null | undefined; +/** + * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally + * reading lines from text. + * + * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258 + */ +export declare class LineDecoder { + static NEWLINE_CHARS: Set; + static NEWLINE_REGEXP: RegExp; + buffer: string[]; + trailingCR: boolean; + textDecoder: any; + constructor(); + decode(chunk: Bytes): string[]; + decodeText(bytes: Bytes): string; + flush(): string[]; +} +export {}; +//# sourceMappingURL=line.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/internal/decoders/line.d.ts.map b/node_modules/openai/internal/decoders/line.d.ts.map new file mode 100644 index 0000000..cf3b19d --- /dev/null +++ b/node_modules/openai/internal/decoders/line.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"line.d.ts","sourceRoot":"","sources":["../../src/internal/decoders/line.ts"],"names":[],"mappings":";AAEA,KAAK,KAAK,GAAG,MAAM,GAAG,WAAW,GAAG,UAAU,GAAG,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;AAE3E;;;;;GAKG;AACH,qBAAa,WAAW;IAEtB,MAAM,CAAC,aAAa,cAAyB;IAC7C,MAAM,CAAC,cAAc,SAAkB;IAEvC,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,UAAU,EAAE,OAAO,CAAC;IACpB,WAAW,EAAE,GAAG,CAAC;;IAOjB,MAAM,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,EAAE;IA0C9B,UAAU,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM;IAqChC,KAAK,IAAI,MAAM,EAAE;CAUlB"} \ No newline at end of file diff --git a/node_modules/openai/internal/decoders/line.js b/node_modules/openai/internal/decoders/line.js new file mode 100644 index 0000000..815226e --- /dev/null +++ b/node_modules/openai/internal/decoders/line.js @@ -0,0 +1,88 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LineDecoder = void 0; +const error_1 = require("../../error.js"); +/** + * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally + * reading lines from text. + * + * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258 + */ +class LineDecoder { + constructor() { + this.buffer = []; + this.trailingCR = false; + } + decode(chunk) { + let text = this.decodeText(chunk); + if (this.trailingCR) { + text = '\r' + text; + this.trailingCR = false; + } + if (text.endsWith('\r')) { + this.trailingCR = true; + text = text.slice(0, -1); + } + if (!text) { + return []; + } + const trailingNewline = LineDecoder.NEWLINE_CHARS.has(text[text.length - 1] || ''); + let lines = text.split(LineDecoder.NEWLINE_REGEXP); + // if there is a trailing new line then the last entry will be an empty + // string which we don't care about + if (trailingNewline) { + lines.pop(); + } + if (lines.length === 1 && !trailingNewline) { + this.buffer.push(lines[0]); + return []; + } + if (this.buffer.length > 0) { + lines = [this.buffer.join('') + lines[0], ...lines.slice(1)]; + this.buffer = []; + } + if (!trailingNewline) { + this.buffer = [lines.pop() || '']; + } + return lines; + } + decodeText(bytes) { + if (bytes == null) + return ''; + if (typeof bytes === 'string') + return bytes; + // Node: + if (typeof Buffer !== 'undefined') { + if (bytes instanceof Buffer) { + return bytes.toString(); + } + if (bytes instanceof Uint8Array) { + return Buffer.from(bytes).toString(); + } + throw new error_1.OpenAIError(`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.`); + } + // Browser + if (typeof TextDecoder !== 'undefined') { + if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { + this.textDecoder ?? (this.textDecoder = new TextDecoder('utf8')); + return this.textDecoder.decode(bytes); + } + throw new error_1.OpenAIError(`Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.`); + } + throw new error_1.OpenAIError(`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`); + } + flush() { + if (!this.buffer.length && !this.trailingCR) { + return []; + } + const lines = [this.buffer.join('')]; + this.buffer = []; + this.trailingCR = false; + return lines; + } +} +exports.LineDecoder = LineDecoder; +// prettier-ignore +LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r']); +LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g; +//# sourceMappingURL=line.js.map \ No newline at end of file diff --git a/node_modules/openai/internal/decoders/line.js.map b/node_modules/openai/internal/decoders/line.js.map new file mode 100644 index 0000000..6bed82c --- /dev/null +++ b/node_modules/openai/internal/decoders/line.js.map @@ -0,0 +1 @@ +{"version":3,"file":"line.js","sourceRoot":"","sources":["../../src/internal/decoders/line.ts"],"names":[],"mappings":";;;AAAA,0CAA0C;AAI1C;;;;;GAKG;AACH,MAAa,WAAW;IAStB;QACE,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;QACjB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;IAC1B,CAAC;IAED,MAAM,CAAC,KAAY;QACjB,IAAI,IAAI,GAAG,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QAElC,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC;YACnB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;SACzB;QACD,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YACvB,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;YACvB,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;SAC1B;QAED,IAAI,CAAC,IAAI,EAAE;YACT,OAAO,EAAE,CAAC;SACX;QAED,MAAM,eAAe,GAAG,WAAW,CAAC,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;QACnF,IAAI,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,cAAc,CAAC,CAAC;QAEnD,uEAAuE;QACvE,mCAAmC;QACnC,IAAI,eAAe,EAAE;YACnB,KAAK,CAAC,GAAG,EAAE,CAAC;SACb;QAED,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,eAAe,EAAE;YAC1C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC,CAAC;YAC5B,OAAO,EAAE,CAAC;SACX;QAED,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;YAC1B,KAAK,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YAC7D,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;SAClB;QAED,IAAI,CAAC,eAAe,EAAE;YACpB,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC;SACnC;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED,UAAU,CAAC,KAAY;QACrB,IAAI,KAAK,IAAI,IAAI;YAAE,OAAO,EAAE,CAAC;QAC7B,IAAI,OAAO,KAAK,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAC;QAE5C,QAAQ;QACR,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;YACjC,IAAI,KAAK,YAAY,MAAM,EAAE;gBAC3B,OAAO,KAAK,CAAC,QAAQ,EAAE,CAAC;aACzB;YACD,IAAI,KAAK,YAAY,UAAU,EAAE;gBAC/B,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE,CAAC;aACtC;YAED,MAAM,IAAI,mBAAW,CACnB,wCAAwC,KAAK,CAAC,WAAW,CAAC,IAAI,mIAAmI,CAClM,CAAC;SACH;QAED,UAAU;QACV,IAAI,OAAO,WAAW,KAAK,WAAW,EAAE;YACtC,IAAI,KAAK,YAAY,UAAU,IAAI,KAAK,YAAY,WAAW,EAAE;gBAC/D,IAAI,CAAC,WAAW,KAAhB,IAAI,CAAC,WAAW,GAAK,IAAI,WAAW,CAAC,MAAM,CAAC,EAAC;gBAC7C,OAAO,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aACvC;YAED,MAAM,IAAI,mBAAW,CACnB,oDACG,KAAa,CAAC,WAAW,CAAC,IAC7B,gDAAgD,CACjD,CAAC;SACH;QAED,MAAM,IAAI,mBAAW,CACnB,gGAAgG,CACjG,CAAC;IACJ,CAAC;IAED,KAAK;QACH,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;YAC3C,OAAO,EAAE,CAAC;SACX;QAED,MAAM,KAAK,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;QACrC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;QACjB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;QACxB,OAAO,KAAK,CAAC;IACf,CAAC;;AAtGH,kCAuGC;AAtGC,kBAAkB;AACX,yBAAa,GAAG,IAAI,GAAG,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACtC,0BAAc,GAAG,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/decoders/line.mjs b/node_modules/openai/internal/decoders/line.mjs new file mode 100644 index 0000000..a3c41b2 --- /dev/null +++ b/node_modules/openai/internal/decoders/line.mjs @@ -0,0 +1,84 @@ +import { OpenAIError } from "../../error.mjs"; +/** + * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally + * reading lines from text. + * + * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258 + */ +export class LineDecoder { + constructor() { + this.buffer = []; + this.trailingCR = false; + } + decode(chunk) { + let text = this.decodeText(chunk); + if (this.trailingCR) { + text = '\r' + text; + this.trailingCR = false; + } + if (text.endsWith('\r')) { + this.trailingCR = true; + text = text.slice(0, -1); + } + if (!text) { + return []; + } + const trailingNewline = LineDecoder.NEWLINE_CHARS.has(text[text.length - 1] || ''); + let lines = text.split(LineDecoder.NEWLINE_REGEXP); + // if there is a trailing new line then the last entry will be an empty + // string which we don't care about + if (trailingNewline) { + lines.pop(); + } + if (lines.length === 1 && !trailingNewline) { + this.buffer.push(lines[0]); + return []; + } + if (this.buffer.length > 0) { + lines = [this.buffer.join('') + lines[0], ...lines.slice(1)]; + this.buffer = []; + } + if (!trailingNewline) { + this.buffer = [lines.pop() || '']; + } + return lines; + } + decodeText(bytes) { + if (bytes == null) + return ''; + if (typeof bytes === 'string') + return bytes; + // Node: + if (typeof Buffer !== 'undefined') { + if (bytes instanceof Buffer) { + return bytes.toString(); + } + if (bytes instanceof Uint8Array) { + return Buffer.from(bytes).toString(); + } + throw new OpenAIError(`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.`); + } + // Browser + if (typeof TextDecoder !== 'undefined') { + if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { + this.textDecoder ?? (this.textDecoder = new TextDecoder('utf8')); + return this.textDecoder.decode(bytes); + } + throw new OpenAIError(`Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.`); + } + throw new OpenAIError(`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`); + } + flush() { + if (!this.buffer.length && !this.trailingCR) { + return []; + } + const lines = [this.buffer.join('')]; + this.buffer = []; + this.trailingCR = false; + return lines; + } +} +// prettier-ignore +LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r']); +LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g; +//# sourceMappingURL=line.mjs.map \ No newline at end of file diff --git a/node_modules/openai/internal/decoders/line.mjs.map b/node_modules/openai/internal/decoders/line.mjs.map new file mode 100644 index 0000000..989bef4 --- /dev/null +++ b/node_modules/openai/internal/decoders/line.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"line.mjs","sourceRoot":"","sources":["../../src/internal/decoders/line.ts"],"names":[],"mappings":"OAAO,EAAE,WAAW,EAAE;AAItB;;;;;GAKG;AACH,MAAM,OAAO,WAAW;IAStB;QACE,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;QACjB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;IAC1B,CAAC;IAED,MAAM,CAAC,KAAY;QACjB,IAAI,IAAI,GAAG,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QAElC,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC;YACnB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;SACzB;QACD,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YACvB,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;YACvB,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;SAC1B;QAED,IAAI,CAAC,IAAI,EAAE;YACT,OAAO,EAAE,CAAC;SACX;QAED,MAAM,eAAe,GAAG,WAAW,CAAC,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;QACnF,IAAI,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,cAAc,CAAC,CAAC;QAEnD,uEAAuE;QACvE,mCAAmC;QACnC,IAAI,eAAe,EAAE;YACnB,KAAK,CAAC,GAAG,EAAE,CAAC;SACb;QAED,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,eAAe,EAAE;YAC1C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC,CAAC;YAC5B,OAAO,EAAE,CAAC;SACX;QAED,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;YAC1B,KAAK,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YAC7D,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;SAClB;QAED,IAAI,CAAC,eAAe,EAAE;YACpB,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC;SACnC;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED,UAAU,CAAC,KAAY;QACrB,IAAI,KAAK,IAAI,IAAI;YAAE,OAAO,EAAE,CAAC;QAC7B,IAAI,OAAO,KAAK,KAAK,QAAQ;YAAE,OAAO,KAAK,CAAC;QAE5C,QAAQ;QACR,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;YACjC,IAAI,KAAK,YAAY,MAAM,EAAE;gBAC3B,OAAO,KAAK,CAAC,QAAQ,EAAE,CAAC;aACzB;YACD,IAAI,KAAK,YAAY,UAAU,EAAE;gBAC/B,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE,CAAC;aACtC;YAED,MAAM,IAAI,WAAW,CACnB,wCAAwC,KAAK,CAAC,WAAW,CAAC,IAAI,mIAAmI,CAClM,CAAC;SACH;QAED,UAAU;QACV,IAAI,OAAO,WAAW,KAAK,WAAW,EAAE;YACtC,IAAI,KAAK,YAAY,UAAU,IAAI,KAAK,YAAY,WAAW,EAAE;gBAC/D,IAAI,CAAC,WAAW,KAAhB,IAAI,CAAC,WAAW,GAAK,IAAI,WAAW,CAAC,MAAM,CAAC,EAAC;gBAC7C,OAAO,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aACvC;YAED,MAAM,IAAI,WAAW,CACnB,oDACG,KAAa,CAAC,WAAW,CAAC,IAC7B,gDAAgD,CACjD,CAAC;SACH;QAED,MAAM,IAAI,WAAW,CACnB,gGAAgG,CACjG,CAAC;IACJ,CAAC;IAED,KAAK;QACH,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;YAC3C,OAAO,EAAE,CAAC;SACX;QAED,MAAM,KAAK,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;QACrC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;QACjB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;QACxB,OAAO,KAAK,CAAC;IACf,CAAC;;AArGD,kBAAkB;AACX,yBAAa,GAAG,IAAI,GAAG,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACtC,0BAAc,GAAG,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/formats.d.ts b/node_modules/openai/internal/qs/formats.d.ts new file mode 100644 index 0000000..b1f69f3 --- /dev/null +++ b/node_modules/openai/internal/qs/formats.d.ts @@ -0,0 +1,6 @@ +import type { Format } from "./types.js"; +export declare const default_format: Format; +export declare const formatters: Record string>; +export declare const RFC1738 = "RFC1738"; +export declare const RFC3986 = "RFC3986"; +//# sourceMappingURL=formats.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/formats.d.ts.map b/node_modules/openai/internal/qs/formats.d.ts.map new file mode 100644 index 0000000..6fce4fc --- /dev/null +++ b/node_modules/openai/internal/qs/formats.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"formats.d.ts","sourceRoot":"","sources":["../../src/internal/qs/formats.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AAEtC,eAAO,MAAM,cAAc,EAAE,MAAkB,CAAC;AAChD,eAAO,MAAM,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,WAAW,KAAK,MAAM,CAGnE,CAAC;AACF,eAAO,MAAM,OAAO,YAAY,CAAC;AACjC,eAAO,MAAM,OAAO,YAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/formats.js b/node_modules/openai/internal/qs/formats.js new file mode 100644 index 0000000..8c5c1e5 --- /dev/null +++ b/node_modules/openai/internal/qs/formats.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3986 = exports.RFC1738 = exports.formatters = exports.default_format = void 0; +exports.default_format = 'RFC3986'; +exports.formatters = { + RFC1738: (v) => String(v).replace(/%20/g, '+'), + RFC3986: (v) => String(v), +}; +exports.RFC1738 = 'RFC1738'; +exports.RFC3986 = 'RFC3986'; +//# sourceMappingURL=formats.js.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/formats.js.map b/node_modules/openai/internal/qs/formats.js.map new file mode 100644 index 0000000..9416259 --- /dev/null +++ b/node_modules/openai/internal/qs/formats.js.map @@ -0,0 +1 @@ +{"version":3,"file":"formats.js","sourceRoot":"","sources":["../../src/internal/qs/formats.ts"],"names":[],"mappings":";;;AAEa,QAAA,cAAc,GAAW,SAAS,CAAC;AACnC,QAAA,UAAU,GAAiD;IACtE,OAAO,EAAE,CAAC,CAAc,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC;IAC3D,OAAO,EAAE,CAAC,CAAc,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;CACvC,CAAC;AACW,QAAA,OAAO,GAAG,SAAS,CAAC;AACpB,QAAA,OAAO,GAAG,SAAS,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/formats.mjs b/node_modules/openai/internal/qs/formats.mjs new file mode 100644 index 0000000..f9c2365 --- /dev/null +++ b/node_modules/openai/internal/qs/formats.mjs @@ -0,0 +1,8 @@ +export const default_format = 'RFC3986'; +export const formatters = { + RFC1738: (v) => String(v).replace(/%20/g, '+'), + RFC3986: (v) => String(v), +}; +export const RFC1738 = 'RFC1738'; +export const RFC3986 = 'RFC3986'; +//# sourceMappingURL=formats.mjs.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/formats.mjs.map b/node_modules/openai/internal/qs/formats.mjs.map new file mode 100644 index 0000000..b55a936 --- /dev/null +++ b/node_modules/openai/internal/qs/formats.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"formats.mjs","sourceRoot":"","sources":["../../src/internal/qs/formats.ts"],"names":[],"mappings":"AAEA,MAAM,CAAC,MAAM,cAAc,GAAW,SAAS,CAAC;AAChD,MAAM,CAAC,MAAM,UAAU,GAAiD;IACtE,OAAO,EAAE,CAAC,CAAc,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC;IAC3D,OAAO,EAAE,CAAC,CAAc,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;CACvC,CAAC;AACF,MAAM,CAAC,MAAM,OAAO,GAAG,SAAS,CAAC;AACjC,MAAM,CAAC,MAAM,OAAO,GAAG,SAAS,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/index.d.ts b/node_modules/openai/internal/qs/index.d.ts new file mode 100644 index 0000000..843de7a --- /dev/null +++ b/node_modules/openai/internal/qs/index.d.ts @@ -0,0 +1,10 @@ +declare const formats: { + formatters: Record string>; + RFC1738: string; + RFC3986: string; + default: import("./types").Format; +}; +export { stringify } from "./stringify.js"; +export { formats }; +export type { DefaultDecoder, DefaultEncoder, Format, ParseOptions, StringifyOptions } from "./types.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/index.d.ts.map b/node_modules/openai/internal/qs/index.d.ts.map new file mode 100644 index 0000000..fdabb23 --- /dev/null +++ b/node_modules/openai/internal/qs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/internal/qs/index.ts"],"names":[],"mappings":"AAEA,QAAA,MAAM,OAAO;;;;;CAKZ,CAAC;AAEF,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,OAAO,EAAE,CAAC;AAEnB,YAAY,EAAE,cAAc,EAAE,cAAc,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/index.js b/node_modules/openai/internal/qs/index.js new file mode 100644 index 0000000..292aed6 --- /dev/null +++ b/node_modules/openai/internal/qs/index.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.formats = exports.stringify = void 0; +const formats_1 = require("./formats.js"); +const formats = { + formatters: formats_1.formatters, + RFC1738: formats_1.RFC1738, + RFC3986: formats_1.RFC3986, + default: formats_1.default_format, +}; +exports.formats = formats; +var stringify_1 = require("./stringify.js"); +Object.defineProperty(exports, "stringify", { enumerable: true, get: function () { return stringify_1.stringify; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/index.js.map b/node_modules/openai/internal/qs/index.js.map new file mode 100644 index 0000000..fc1c8d4 --- /dev/null +++ b/node_modules/openai/internal/qs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/internal/qs/index.ts"],"names":[],"mappings":";;;AAAA,0CAAyE;AAEzE,MAAM,OAAO,GAAG;IACd,UAAU,EAAV,oBAAU;IACV,OAAO,EAAP,iBAAO;IACP,OAAO,EAAP,iBAAO;IACP,OAAO,EAAE,wBAAc;CACxB,CAAC;AAGO,0BAAO;AADhB,4CAAwC;AAA/B,sGAAA,SAAS,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/index.mjs b/node_modules/openai/internal/qs/index.mjs new file mode 100644 index 0000000..166b754 --- /dev/null +++ b/node_modules/openai/internal/qs/index.mjs @@ -0,0 +1,10 @@ +import { default_format, formatters, RFC1738, RFC3986 } from "./formats.mjs"; +const formats = { + formatters, + RFC1738, + RFC3986, + default: default_format, +}; +export { stringify } from "./stringify.mjs"; +export { formats }; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/index.mjs.map b/node_modules/openai/internal/qs/index.mjs.map new file mode 100644 index 0000000..1d63c5a --- /dev/null +++ b/node_modules/openai/internal/qs/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../src/internal/qs/index.ts"],"names":[],"mappings":"OAAO,EAAE,cAAc,EAAE,UAAU,EAAE,OAAO,EAAE,OAAO,EAAE;AAEvD,MAAM,OAAO,GAAG;IACd,UAAU;IACV,OAAO;IACP,OAAO;IACP,OAAO,EAAE,cAAc;CACxB,CAAC;OAEK,EAAE,SAAS,EAAE;AACpB,OAAO,EAAE,OAAO,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/stringify.d.ts b/node_modules/openai/internal/qs/stringify.d.ts new file mode 100644 index 0000000..ab8d8b0 --- /dev/null +++ b/node_modules/openai/internal/qs/stringify.d.ts @@ -0,0 +1,3 @@ +import type { StringifyOptions } from "./types.js"; +export declare function stringify(object: any, opts?: StringifyOptions): string; +//# sourceMappingURL=stringify.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/stringify.d.ts.map b/node_modules/openai/internal/qs/stringify.d.ts.map new file mode 100644 index 0000000..dbf2abb --- /dev/null +++ b/node_modules/openai/internal/qs/stringify.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"stringify.d.ts","sourceRoot":"","sources":["../../src/internal/qs/stringify.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAyB,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAkTvE,wBAAgB,SAAS,CAAC,MAAM,EAAE,GAAG,EAAE,IAAI,GAAE,gBAAqB,UA+EjE"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/stringify.js b/node_modules/openai/internal/qs/stringify.js new file mode 100644 index 0000000..a553e98 --- /dev/null +++ b/node_modules/openai/internal/qs/stringify.js @@ -0,0 +1,280 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.stringify = void 0; +const utils_1 = require("./utils.js"); +const formats_1 = require("./formats.js"); +const has = Object.prototype.hasOwnProperty; +const array_prefix_generators = { + brackets(prefix) { + return String(prefix) + '[]'; + }, + comma: 'comma', + indices(prefix, key) { + return String(prefix) + '[' + key + ']'; + }, + repeat(prefix) { + return String(prefix); + }, +}; +const is_array = Array.isArray; +const push = Array.prototype.push; +const push_to_array = function (arr, value_or_array) { + push.apply(arr, is_array(value_or_array) ? value_or_array : [value_or_array]); +}; +const to_ISO = Date.prototype.toISOString; +const defaults = { + addQueryPrefix: false, + allowDots: false, + allowEmptyArrays: false, + arrayFormat: 'indices', + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encodeDotInKeys: false, + encoder: utils_1.encode, + encodeValuesOnly: false, + format: formats_1.default_format, + formatter: formats_1.formatters[formats_1.default_format], + /** @deprecated */ + indices: false, + serializeDate(date) { + return to_ISO.call(date); + }, + skipNulls: false, + strictNullHandling: false, +}; +function is_non_nullish_primitive(v) { + return (typeof v === 'string' || + typeof v === 'number' || + typeof v === 'boolean' || + typeof v === 'symbol' || + typeof v === 'bigint'); +} +const sentinel = {}; +function inner_stringify(object, prefix, generateArrayPrefix, commaRoundTrip, allowEmptyArrays, strictNullHandling, skipNulls, encodeDotInKeys, encoder, filter, sort, allowDots, serializeDate, format, formatter, encodeValuesOnly, charset, sideChannel) { + let obj = object; + let tmp_sc = sideChannel; + let step = 0; + let find_flag = false; + while ((tmp_sc = tmp_sc.get(sentinel)) !== void undefined && !find_flag) { + // Where object last appeared in the ref tree + const pos = tmp_sc.get(object); + step += 1; + if (typeof pos !== 'undefined') { + if (pos === step) { + throw new RangeError('Cyclic object value'); + } + else { + find_flag = true; // Break while + } + } + if (typeof tmp_sc.get(sentinel) === 'undefined') { + step = 0; + } + } + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } + else if (obj instanceof Date) { + obj = serializeDate?.(obj); + } + else if (generateArrayPrefix === 'comma' && is_array(obj)) { + obj = (0, utils_1.maybe_map)(obj, function (value) { + if (value instanceof Date) { + return serializeDate?.(value); + } + return value; + }); + } + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? + // @ts-expect-error + encoder(prefix, defaults.encoder, charset, 'key', format) + : prefix; + } + obj = ''; + } + if (is_non_nullish_primitive(obj) || (0, utils_1.is_buffer)(obj)) { + if (encoder) { + const key_value = encodeValuesOnly ? prefix + // @ts-expect-error + : encoder(prefix, defaults.encoder, charset, 'key', format); + return [ + formatter?.(key_value) + + '=' + + // @ts-expect-error + formatter?.(encoder(obj, defaults.encoder, charset, 'value', format)), + ]; + } + return [formatter?.(prefix) + '=' + formatter?.(String(obj))]; + } + const values = []; + if (typeof obj === 'undefined') { + return values; + } + let obj_keys; + if (generateArrayPrefix === 'comma' && is_array(obj)) { + // we need to join elements in + if (encodeValuesOnly && encoder) { + // @ts-expect-error values only + obj = (0, utils_1.maybe_map)(obj, encoder); + } + obj_keys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }]; + } + else if (is_array(filter)) { + obj_keys = filter; + } + else { + const keys = Object.keys(obj); + obj_keys = sort ? keys.sort(sort) : keys; + } + const encoded_prefix = encodeDotInKeys ? String(prefix).replace(/\./g, '%2E') : String(prefix); + const adjusted_prefix = commaRoundTrip && is_array(obj) && obj.length === 1 ? encoded_prefix + '[]' : encoded_prefix; + if (allowEmptyArrays && is_array(obj) && obj.length === 0) { + return adjusted_prefix + '[]'; + } + for (let j = 0; j < obj_keys.length; ++j) { + const key = obj_keys[j]; + const value = + // @ts-ignore + typeof key === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key]; + if (skipNulls && value === null) { + continue; + } + // @ts-ignore + const encoded_key = allowDots && encodeDotInKeys ? key.replace(/\./g, '%2E') : key; + const key_prefix = is_array(obj) ? + typeof generateArrayPrefix === 'function' ? + generateArrayPrefix(adjusted_prefix, encoded_key) + : adjusted_prefix + : adjusted_prefix + (allowDots ? '.' + encoded_key : '[' + encoded_key + ']'); + sideChannel.set(object, step); + const valueSideChannel = new WeakMap(); + valueSideChannel.set(sentinel, sideChannel); + push_to_array(values, inner_stringify(value, key_prefix, generateArrayPrefix, commaRoundTrip, allowEmptyArrays, strictNullHandling, skipNulls, encodeDotInKeys, + // @ts-ignore + generateArrayPrefix === 'comma' && encodeValuesOnly && is_array(obj) ? null : encoder, filter, sort, allowDots, serializeDate, format, formatter, encodeValuesOnly, charset, valueSideChannel)); + } + return values; +} +function normalize_stringify_options(opts = defaults) { + if (typeof opts.allowEmptyArrays !== 'undefined' && typeof opts.allowEmptyArrays !== 'boolean') { + throw new TypeError('`allowEmptyArrays` option can only be `true` or `false`, when provided'); + } + if (typeof opts.encodeDotInKeys !== 'undefined' && typeof opts.encodeDotInKeys !== 'boolean') { + throw new TypeError('`encodeDotInKeys` option can only be `true` or `false`, when provided'); + } + if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + const charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + let format = formats_1.default_format; + if (typeof opts.format !== 'undefined') { + if (!has.call(formats_1.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + const formatter = formats_1.formatters[format]; + let filter = defaults.filter; + if (typeof opts.filter === 'function' || is_array(opts.filter)) { + filter = opts.filter; + } + let arrayFormat; + if (opts.arrayFormat && opts.arrayFormat in array_prefix_generators) { + arrayFormat = opts.arrayFormat; + } + else if ('indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } + else { + arrayFormat = defaults.arrayFormat; + } + if ('commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') { + throw new TypeError('`commaRoundTrip` must be a boolean, or absent'); + } + const allowDots = typeof opts.allowDots === 'undefined' ? + !!opts.encodeDotInKeys === true ? + true + : defaults.allowDots + : !!opts.allowDots; + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + // @ts-ignore + allowDots: allowDots, + allowEmptyArrays: typeof opts.allowEmptyArrays === 'boolean' ? !!opts.allowEmptyArrays : defaults.allowEmptyArrays, + arrayFormat: arrayFormat, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + commaRoundTrip: !!opts.commaRoundTrip, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encodeDotInKeys: typeof opts.encodeDotInKeys === 'boolean' ? opts.encodeDotInKeys : defaults.encodeDotInKeys, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + format: format, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + // @ts-ignore + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling, + }; +} +function stringify(object, opts = {}) { + let obj = object; + const options = normalize_stringify_options(opts); + let obj_keys; + let filter; + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } + else if (is_array(options.filter)) { + filter = options.filter; + obj_keys = filter; + } + const keys = []; + if (typeof obj !== 'object' || obj === null) { + return ''; + } + const generateArrayPrefix = array_prefix_generators[options.arrayFormat]; + const commaRoundTrip = generateArrayPrefix === 'comma' && options.commaRoundTrip; + if (!obj_keys) { + obj_keys = Object.keys(obj); + } + if (options.sort) { + obj_keys.sort(options.sort); + } + const sideChannel = new WeakMap(); + for (let i = 0; i < obj_keys.length; ++i) { + const key = obj_keys[i]; + if (options.skipNulls && obj[key] === null) { + continue; + } + push_to_array(keys, inner_stringify(obj[key], key, + // @ts-expect-error + generateArrayPrefix, commaRoundTrip, options.allowEmptyArrays, options.strictNullHandling, options.skipNulls, options.encodeDotInKeys, options.encode ? options.encoder : null, options.filter, options.sort, options.allowDots, options.serializeDate, options.format, options.formatter, options.encodeValuesOnly, options.charset, sideChannel)); + } + const joined = keys.join(options.delimiter); + let prefix = options.addQueryPrefix === true ? '?' : ''; + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } + else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + return joined.length > 0 ? prefix + joined : ''; +} +exports.stringify = stringify; +//# sourceMappingURL=stringify.js.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/stringify.js.map b/node_modules/openai/internal/qs/stringify.js.map new file mode 100644 index 0000000..59ad7a4 --- /dev/null +++ b/node_modules/openai/internal/qs/stringify.js.map @@ -0,0 +1 @@ +{"version":3,"file":"stringify.js","sourceRoot":"","sources":["../../src/internal/qs/stringify.ts"],"names":[],"mappings":";;;AAAA,sCAAuD;AACvD,0CAAuD;AAGvD,MAAM,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC;AAE5C,MAAM,uBAAuB,GAAG;IAC9B,QAAQ,CAAC,MAAmB;QAC1B,OAAO,MAAM,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC;IAC/B,CAAC;IACD,KAAK,EAAE,OAAO;IACd,OAAO,CAAC,MAAmB,EAAE,GAAW;QACtC,OAAO,MAAM,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,MAAmB;QACxB,OAAO,MAAM,CAAC,MAAM,CAAC,CAAC;IACxB,CAAC;CACF,CAAC;AAEF,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;AAC/B,MAAM,IAAI,GAAG,KAAK,CAAC,SAAS,CAAC,IAAI,CAAC;AAClC,MAAM,aAAa,GAAG,UAAU,GAAU,EAAE,cAAmB;IAC7D,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC;AAChF,CAAC,CAAC;AAEF,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC;AAE1C,MAAM,QAAQ,GAAG;IACf,cAAc,EAAE,KAAK;IACrB,SAAS,EAAE,KAAK;IAChB,gBAAgB,EAAE,KAAK;IACvB,WAAW,EAAE,SAAS;IACtB,OAAO,EAAE,OAAO;IAChB,eAAe,EAAE,KAAK;IACtB,SAAS,EAAE,GAAG;IACd,MAAM,EAAE,IAAI;IACZ,eAAe,EAAE,KAAK;IACtB,OAAO,EAAE,cAAM;IACf,gBAAgB,EAAE,KAAK;IACvB,MAAM,EAAE,wBAAc;IACtB,SAAS,EAAE,oBAAU,CAAC,wBAAc,CAAC;IACrC,kBAAkB;IAClB,OAAO,EAAE,KAAK;IACd,aAAa,CAAC,IAAI;QAChB,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC3B,CAAC;IACD,SAAS,EAAE,KAAK;IAChB,kBAAkB,EAAE,KAAK;CACiE,CAAC;AAE7F,SAAS,wBAAwB,CAAC,CAAU;IAC1C,OAAO,CACL,OAAO,CAAC,KAAK,QAAQ;QACrB,OAAO,CAAC,KAAK,QAAQ;QACrB,OAAO,CAAC,KAAK,SAAS;QACtB,OAAO,CAAC,KAAK,QAAQ;QACrB,OAAO,CAAC,KAAK,QAAQ,CACtB,CAAC;AACJ,CAAC;AAED,MAAM,QAAQ,GAAG,EAAE,CAAC;AAEpB,SAAS,eAAe,CACtB,MAAW,EACX,MAAmB,EACnB,mBAAgG,EAChG,cAAuB,EACvB,gBAAyB,EACzB,kBAA2B,EAC3B,SAAkB,EAClB,eAAwB,EACxB,OAAoC,EACpC,MAAkC,EAClC,IAA8B,EAC9B,SAAwC,EACxC,aAAgD,EAChD,MAAkC,EAClC,SAAwC,EACxC,gBAAyB,EACzB,OAAoC,EACpC,WAA8B;IAE9B,IAAI,GAAG,GAAG,MAAM,CAAC;IAEjB,IAAI,MAAM,GAAG,WAAW,CAAC;IACzB,IAAI,IAAI,GAAG,CAAC,CAAC;IACb,IAAI,SAAS,GAAG,KAAK,CAAC;IACtB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,KAAK,KAAK,SAAS,IAAI,CAAC,SAAS,EAAE;QACvE,6CAA6C;QAC7C,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QAC/B,IAAI,IAAI,CAAC,CAAC;QACV,IAAI,OAAO,GAAG,KAAK,WAAW,EAAE;YAC9B,IAAI,GAAG,KAAK,IAAI,EAAE;gBAChB,MAAM,IAAI,UAAU,CAAC,qBAAqB,CAAC,CAAC;aAC7C;iBAAM;gBACL,SAAS,GAAG,IAAI,CAAC,CAAC,cAAc;aACjC;SACF;QACD,IAAI,OAAO,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,WAAW,EAAE;YAC/C,IAAI,GAAG,CAAC,CAAC;SACV;KACF;IAED,IAAI,OAAO,MAAM,KAAK,UAAU,EAAE;QAChC,GAAG,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;KAC3B;SAAM,IAAI,GAAG,YAAY,IAAI,EAAE;QAC9B,GAAG,GAAG,aAAa,EAAE,CAAC,GAAG,CAAC,CAAC;KAC5B;SAAM,IAAI,mBAAmB,KAAK,OAAO,IAAI,QAAQ,CAAC,GAAG,CAAC,EAAE;QAC3D,GAAG,GAAG,IAAA,iBAAS,EAAC,GAAG,EAAE,UAAU,KAAK;YAClC,IAAI,KAAK,YAAY,IAAI,EAAE;gBACzB,OAAO,aAAa,EAAE,CAAC,KAAK,CAAC,CAAC;aAC/B;YACD,OAAO,KAAK,CAAC;QACf,CAAC,CAAC,CAAC;KACJ;IAED,IAAI,GAAG,KAAK,IAAI,EAAE;QAChB,IAAI,kBAAkB,EAAE;YACtB,OAAO,OAAO,IAAI,CAAC,gBAAgB,CAAC,CAAC;gBACjC,mBAAmB;gBACnB,OAAO,CAAC,MAAM,EAAE,QAAQ,CAAC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,CAAC;gBAC3D,CAAC,CAAC,MAAM,CAAC;SACZ;QAED,GAAG,GAAG,EAAE,CAAC;KACV;IAED,IAAI,wBAAwB,CAAC,GAAG,CAAC,IAAI,IAAA,iBAAS,EAAC,GAAG,CAAC,EAAE;QACnD,IAAI,OAAO,EAAE;YACX,MAAM,SAAS,GACb,gBAAgB,CAAC,CAAC,CAAC,MAAM;gBACvB,mBAAmB;gBACrB,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,CAAC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;YAC9D,OAAO;gBACL,SAAS,EAAE,CAAC,SAAS,CAAC;oBACpB,GAAG;oBACH,mBAAmB;oBACnB,SAAS,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;aACxE,CAAC;SACH;QACD,OAAO,CAAC,SAAS,EAAE,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,SAAS,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;KAC/D;IAED,MAAM,MAAM,GAAa,EAAE,CAAC;IAE5B,IAAI,OAAO,GAAG,KAAK,WAAW,EAAE;QAC9B,OAAO,MAAM,CAAC;KACf;IAED,IAAI,QAAQ,CAAC;IACb,IAAI,mBAAmB,KAAK,OAAO,IAAI,QAAQ,CAAC,GAAG,CAAC,EAAE;QACpD,8BAA8B;QAC9B,IAAI,gBAAgB,IAAI,OAAO,EAAE;YAC/B,+BAA+B;YAC/B,GAAG,GAAG,IAAA,iBAAS,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC;SAC/B;QACD,QAAQ,GAAG,CAAC,EAAE,KAAK,EAAE,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC,CAAC;KACjF;SAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,EAAE;QAC3B,QAAQ,GAAG,MAAM,CAAC;KACnB;SAAM;QACL,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC9B,QAAQ,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;KAC1C;IAED,MAAM,cAAc,GAAG,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAE/F,MAAM,eAAe,GACnB,cAAc,IAAI,QAAQ,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,cAAc,GAAG,IAAI,CAAC,CAAC,CAAC,cAAc,CAAC;IAE/F,IAAI,gBAAgB,IAAI,QAAQ,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;QACzD,OAAO,eAAe,GAAG,IAAI,CAAC;KAC/B;IAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,MAAM,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QACxB,MAAM,KAAK;QACT,aAAa;QACb,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,CAAC,KAAK,KAAK,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,GAAU,CAAC,CAAC;QAE5F,IAAI,SAAS,IAAI,KAAK,KAAK,IAAI,EAAE;YAC/B,SAAS;SACV;QAED,aAAa;QACb,MAAM,WAAW,GAAG,SAAS,IAAI,eAAe,CAAC,CAAC,CAAE,GAAW,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;QAC5F,MAAM,UAAU,GACd,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;YACb,OAAO,mBAAmB,KAAK,UAAU,CAAC,CAAC;gBACzC,mBAAmB,CAAC,eAAe,EAAE,WAAW,CAAC;gBACnD,CAAC,CAAC,eAAe;YACnB,CAAC,CAAC,eAAe,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,GAAG,WAAW,CAAC,CAAC,CAAC,GAAG,GAAG,WAAW,GAAG,GAAG,CAAC,CAAC;QAEhF,WAAW,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAC9B,MAAM,gBAAgB,GAAG,IAAI,OAAO,EAAE,CAAC;QACvC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAC5C,aAAa,CACX,MAAM,EACN,eAAe,CACb,KAAK,EACL,UAAU,EACV,mBAAmB,EACnB,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,SAAS,EACT,eAAe;QACf,aAAa;QACb,mBAAmB,KAAK,OAAO,IAAI,gBAAgB,IAAI,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EACrF,MAAM,EACN,IAAI,EACJ,SAAS,EACT,aAAa,EACb,MAAM,EACN,SAAS,EACT,gBAAgB,EAChB,OAAO,EACP,gBAAgB,CACjB,CACF,CAAC;KACH;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,2BAA2B,CAClC,OAAyB,QAAQ;IAEjC,IAAI,OAAO,IAAI,CAAC,gBAAgB,KAAK,WAAW,IAAI,OAAO,IAAI,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC9F,MAAM,IAAI,SAAS,CAAC,wEAAwE,CAAC,CAAC;KAC/F;IAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,WAAW,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,SAAS,EAAE;QAC5F,MAAM,IAAI,SAAS,CAAC,uEAAuE,CAAC,CAAC;KAC9F;IAED,IAAI,IAAI,CAAC,OAAO,KAAK,IAAI,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,WAAW,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,UAAU,EAAE;QACtG,MAAM,IAAI,SAAS,CAAC,+BAA+B,CAAC,CAAC;KACtD;IAED,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,IAAI,QAAQ,CAAC,OAAO,CAAC;IACjD,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,WAAW,IAAI,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,OAAO,KAAK,YAAY,EAAE;QACpG,MAAM,IAAI,SAAS,CAAC,mEAAmE,CAAC,CAAC;KAC1F;IAED,IAAI,MAAM,GAAG,wBAAc,CAAC;IAC5B,IAAI,OAAO,IAAI,CAAC,MAAM,KAAK,WAAW,EAAE;QACtC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAU,EAAE,IAAI,CAAC,MAAM,CAAC,EAAE;YACtC,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC;SACxD;QACD,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;KACtB;IACD,MAAM,SAAS,GAAG,oBAAU,CAAC,MAAM,CAAC,CAAC;IAErC,IAAI,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC7B,IAAI,OAAO,IAAI,CAAC,MAAM,KAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;KACtB;IAED,IAAI,WAA4C,CAAC;IACjD,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW,IAAI,uBAAuB,EAAE;QACnE,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC;KAChC;SAAM,IAAI,SAAS,IAAI,IAAI,EAAE;QAC5B,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC;KACnD;SAAM;QACL,WAAW,GAAG,QAAQ,CAAC,WAAW,CAAC;KACpC;IAED,IAAI,gBAAgB,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,cAAc,KAAK,SAAS,EAAE;QACxE,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC,CAAC;KACtE;IAED,MAAM,SAAS,GACb,OAAO,IAAI,CAAC,SAAS,KAAK,WAAW,CAAC,CAAC;QACrC,CAAC,CAAC,IAAI,CAAC,eAAe,KAAK,IAAI,CAAC,CAAC;YAC/B,IAAI;YACN,CAAC,CAAC,QAAQ,CAAC,SAAS;QACtB,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC;IAErB,OAAO;QACL,cAAc,EAAE,OAAO,IAAI,CAAC,cAAc,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,cAAc;QACxG,aAAa;QACb,SAAS,EAAE,SAAS;QACpB,gBAAgB,EACd,OAAO,IAAI,CAAC,gBAAgB,KAAK,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,gBAAgB;QAClG,WAAW,EAAE,WAAW;QACxB,OAAO,EAAE,OAAO;QAChB,eAAe,EACb,OAAO,IAAI,CAAC,eAAe,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe;QAC7F,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,cAAc;QACrC,SAAS,EAAE,OAAO,IAAI,CAAC,SAAS,KAAK,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS;QACtF,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM;QACxE,eAAe,EACb,OAAO,IAAI,CAAC,eAAe,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe;QAC7F,OAAO,EAAE,OAAO,IAAI,CAAC,OAAO,KAAK,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO;QAC7E,gBAAgB,EACd,OAAO,IAAI,CAAC,gBAAgB,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,gBAAgB;QAChG,MAAM,EAAE,MAAM;QACd,MAAM,EAAE,MAAM;QACd,SAAS,EAAE,SAAS;QACpB,aAAa,EAAE,OAAO,IAAI,CAAC,aAAa,KAAK,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa;QACrG,SAAS,EAAE,OAAO,IAAI,CAAC,SAAS,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS;QACpF,aAAa;QACb,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI;QACxD,kBAAkB,EAChB,OAAO,IAAI,CAAC,kBAAkB,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,QAAQ,CAAC,kBAAkB;KACvG,CAAC;AACJ,CAAC;AAED,SAAgB,SAAS,CAAC,MAAW,EAAE,OAAyB,EAAE;IAChE,IAAI,GAAG,GAAG,MAAM,CAAC;IACjB,MAAM,OAAO,GAAG,2BAA2B,CAAC,IAAI,CAAC,CAAC;IAElD,IAAI,QAAmC,CAAC;IACxC,IAAI,MAAM,CAAC;IAEX,IAAI,OAAO,OAAO,CAAC,MAAM,KAAK,UAAU,EAAE;QACxC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;QACxB,GAAG,GAAG,MAAM,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;KACvB;SAAM,IAAI,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QACnC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;QACxB,QAAQ,GAAG,MAAM,CAAC;KACnB;IAED,MAAM,IAAI,GAAa,EAAE,CAAC;IAE1B,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK,IAAI,EAAE;QAC3C,OAAO,EAAE,CAAC;KACX;IAED,MAAM,mBAAmB,GAAG,uBAAuB,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACzE,MAAM,cAAc,GAAG,mBAAmB,KAAK,OAAO,IAAI,OAAO,CAAC,cAAc,CAAC;IAEjF,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC7B;IAED,IAAI,OAAO,CAAC,IAAI,EAAE;QAChB,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;KAC7B;IAED,MAAM,WAAW,GAAG,IAAI,OAAO,EAAE,CAAC;IAClC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,MAAM,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAE,CAAC;QAEzB,IAAI,OAAO,CAAC,SAAS,IAAI,GAAG,CAAC,GAAG,CAAC,KAAK,IAAI,EAAE;YAC1C,SAAS;SACV;QACD,aAAa,CACX,IAAI,EACJ,eAAe,CACb,GAAG,CAAC,GAAG,CAAC,EACR,GAAG;QACH,mBAAmB;QACnB,mBAAmB,EACnB,cAAc,EACd,OAAO,CAAC,gBAAgB,EACxB,OAAO,CAAC,kBAAkB,EAC1B,OAAO,CAAC,SAAS,EACjB,OAAO,CAAC,eAAe,EACvB,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,EACvC,OAAO,CAAC,MAAM,EACd,OAAO,CAAC,IAAI,EACZ,OAAO,CAAC,SAAS,EACjB,OAAO,CAAC,aAAa,EACrB,OAAO,CAAC,MAAM,EACd,OAAO,CAAC,SAAS,EACjB,OAAO,CAAC,gBAAgB,EACxB,OAAO,CAAC,OAAO,EACf,WAAW,CACZ,CACF,CAAC;KACH;IAED,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IAC5C,IAAI,MAAM,GAAG,OAAO,CAAC,cAAc,KAAK,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;IAExD,IAAI,OAAO,CAAC,eAAe,EAAE;QAC3B,IAAI,OAAO,CAAC,OAAO,KAAK,YAAY,EAAE;YACpC,qFAAqF;YACrF,MAAM,IAAI,sBAAsB,CAAC;SAClC;aAAM;YACL,0BAA0B;YAC1B,MAAM,IAAI,iBAAiB,CAAC;SAC7B;KACF;IAED,OAAO,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC;AAClD,CAAC;AA/ED,8BA+EC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/stringify.mjs b/node_modules/openai/internal/qs/stringify.mjs new file mode 100644 index 0000000..fe659d3 --- /dev/null +++ b/node_modules/openai/internal/qs/stringify.mjs @@ -0,0 +1,276 @@ +import { encode, is_buffer, maybe_map } from "./utils.mjs"; +import { default_format, formatters } from "./formats.mjs"; +const has = Object.prototype.hasOwnProperty; +const array_prefix_generators = { + brackets(prefix) { + return String(prefix) + '[]'; + }, + comma: 'comma', + indices(prefix, key) { + return String(prefix) + '[' + key + ']'; + }, + repeat(prefix) { + return String(prefix); + }, +}; +const is_array = Array.isArray; +const push = Array.prototype.push; +const push_to_array = function (arr, value_or_array) { + push.apply(arr, is_array(value_or_array) ? value_or_array : [value_or_array]); +}; +const to_ISO = Date.prototype.toISOString; +const defaults = { + addQueryPrefix: false, + allowDots: false, + allowEmptyArrays: false, + arrayFormat: 'indices', + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encodeDotInKeys: false, + encoder: encode, + encodeValuesOnly: false, + format: default_format, + formatter: formatters[default_format], + /** @deprecated */ + indices: false, + serializeDate(date) { + return to_ISO.call(date); + }, + skipNulls: false, + strictNullHandling: false, +}; +function is_non_nullish_primitive(v) { + return (typeof v === 'string' || + typeof v === 'number' || + typeof v === 'boolean' || + typeof v === 'symbol' || + typeof v === 'bigint'); +} +const sentinel = {}; +function inner_stringify(object, prefix, generateArrayPrefix, commaRoundTrip, allowEmptyArrays, strictNullHandling, skipNulls, encodeDotInKeys, encoder, filter, sort, allowDots, serializeDate, format, formatter, encodeValuesOnly, charset, sideChannel) { + let obj = object; + let tmp_sc = sideChannel; + let step = 0; + let find_flag = false; + while ((tmp_sc = tmp_sc.get(sentinel)) !== void undefined && !find_flag) { + // Where object last appeared in the ref tree + const pos = tmp_sc.get(object); + step += 1; + if (typeof pos !== 'undefined') { + if (pos === step) { + throw new RangeError('Cyclic object value'); + } + else { + find_flag = true; // Break while + } + } + if (typeof tmp_sc.get(sentinel) === 'undefined') { + step = 0; + } + } + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } + else if (obj instanceof Date) { + obj = serializeDate?.(obj); + } + else if (generateArrayPrefix === 'comma' && is_array(obj)) { + obj = maybe_map(obj, function (value) { + if (value instanceof Date) { + return serializeDate?.(value); + } + return value; + }); + } + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? + // @ts-expect-error + encoder(prefix, defaults.encoder, charset, 'key', format) + : prefix; + } + obj = ''; + } + if (is_non_nullish_primitive(obj) || is_buffer(obj)) { + if (encoder) { + const key_value = encodeValuesOnly ? prefix + // @ts-expect-error + : encoder(prefix, defaults.encoder, charset, 'key', format); + return [ + formatter?.(key_value) + + '=' + + // @ts-expect-error + formatter?.(encoder(obj, defaults.encoder, charset, 'value', format)), + ]; + } + return [formatter?.(prefix) + '=' + formatter?.(String(obj))]; + } + const values = []; + if (typeof obj === 'undefined') { + return values; + } + let obj_keys; + if (generateArrayPrefix === 'comma' && is_array(obj)) { + // we need to join elements in + if (encodeValuesOnly && encoder) { + // @ts-expect-error values only + obj = maybe_map(obj, encoder); + } + obj_keys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }]; + } + else if (is_array(filter)) { + obj_keys = filter; + } + else { + const keys = Object.keys(obj); + obj_keys = sort ? keys.sort(sort) : keys; + } + const encoded_prefix = encodeDotInKeys ? String(prefix).replace(/\./g, '%2E') : String(prefix); + const adjusted_prefix = commaRoundTrip && is_array(obj) && obj.length === 1 ? encoded_prefix + '[]' : encoded_prefix; + if (allowEmptyArrays && is_array(obj) && obj.length === 0) { + return adjusted_prefix + '[]'; + } + for (let j = 0; j < obj_keys.length; ++j) { + const key = obj_keys[j]; + const value = + // @ts-ignore + typeof key === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key]; + if (skipNulls && value === null) { + continue; + } + // @ts-ignore + const encoded_key = allowDots && encodeDotInKeys ? key.replace(/\./g, '%2E') : key; + const key_prefix = is_array(obj) ? + typeof generateArrayPrefix === 'function' ? + generateArrayPrefix(adjusted_prefix, encoded_key) + : adjusted_prefix + : adjusted_prefix + (allowDots ? '.' + encoded_key : '[' + encoded_key + ']'); + sideChannel.set(object, step); + const valueSideChannel = new WeakMap(); + valueSideChannel.set(sentinel, sideChannel); + push_to_array(values, inner_stringify(value, key_prefix, generateArrayPrefix, commaRoundTrip, allowEmptyArrays, strictNullHandling, skipNulls, encodeDotInKeys, + // @ts-ignore + generateArrayPrefix === 'comma' && encodeValuesOnly && is_array(obj) ? null : encoder, filter, sort, allowDots, serializeDate, format, formatter, encodeValuesOnly, charset, valueSideChannel)); + } + return values; +} +function normalize_stringify_options(opts = defaults) { + if (typeof opts.allowEmptyArrays !== 'undefined' && typeof opts.allowEmptyArrays !== 'boolean') { + throw new TypeError('`allowEmptyArrays` option can only be `true` or `false`, when provided'); + } + if (typeof opts.encodeDotInKeys !== 'undefined' && typeof opts.encodeDotInKeys !== 'boolean') { + throw new TypeError('`encodeDotInKeys` option can only be `true` or `false`, when provided'); + } + if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + const charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + let format = default_format; + if (typeof opts.format !== 'undefined') { + if (!has.call(formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + const formatter = formatters[format]; + let filter = defaults.filter; + if (typeof opts.filter === 'function' || is_array(opts.filter)) { + filter = opts.filter; + } + let arrayFormat; + if (opts.arrayFormat && opts.arrayFormat in array_prefix_generators) { + arrayFormat = opts.arrayFormat; + } + else if ('indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } + else { + arrayFormat = defaults.arrayFormat; + } + if ('commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') { + throw new TypeError('`commaRoundTrip` must be a boolean, or absent'); + } + const allowDots = typeof opts.allowDots === 'undefined' ? + !!opts.encodeDotInKeys === true ? + true + : defaults.allowDots + : !!opts.allowDots; + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + // @ts-ignore + allowDots: allowDots, + allowEmptyArrays: typeof opts.allowEmptyArrays === 'boolean' ? !!opts.allowEmptyArrays : defaults.allowEmptyArrays, + arrayFormat: arrayFormat, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + commaRoundTrip: !!opts.commaRoundTrip, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encodeDotInKeys: typeof opts.encodeDotInKeys === 'boolean' ? opts.encodeDotInKeys : defaults.encodeDotInKeys, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + format: format, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + // @ts-ignore + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling, + }; +} +export function stringify(object, opts = {}) { + let obj = object; + const options = normalize_stringify_options(opts); + let obj_keys; + let filter; + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } + else if (is_array(options.filter)) { + filter = options.filter; + obj_keys = filter; + } + const keys = []; + if (typeof obj !== 'object' || obj === null) { + return ''; + } + const generateArrayPrefix = array_prefix_generators[options.arrayFormat]; + const commaRoundTrip = generateArrayPrefix === 'comma' && options.commaRoundTrip; + if (!obj_keys) { + obj_keys = Object.keys(obj); + } + if (options.sort) { + obj_keys.sort(options.sort); + } + const sideChannel = new WeakMap(); + for (let i = 0; i < obj_keys.length; ++i) { + const key = obj_keys[i]; + if (options.skipNulls && obj[key] === null) { + continue; + } + push_to_array(keys, inner_stringify(obj[key], key, + // @ts-expect-error + generateArrayPrefix, commaRoundTrip, options.allowEmptyArrays, options.strictNullHandling, options.skipNulls, options.encodeDotInKeys, options.encode ? options.encoder : null, options.filter, options.sort, options.allowDots, options.serializeDate, options.format, options.formatter, options.encodeValuesOnly, options.charset, sideChannel)); + } + const joined = keys.join(options.delimiter); + let prefix = options.addQueryPrefix === true ? '?' : ''; + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } + else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + return joined.length > 0 ? prefix + joined : ''; +} +//# sourceMappingURL=stringify.mjs.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/stringify.mjs.map b/node_modules/openai/internal/qs/stringify.mjs.map new file mode 100644 index 0000000..2a01340 --- /dev/null +++ b/node_modules/openai/internal/qs/stringify.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"stringify.mjs","sourceRoot":"","sources":["../../src/internal/qs/stringify.ts"],"names":[],"mappings":"OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,SAAS,EAAE;OAChC,EAAE,cAAc,EAAE,UAAU,EAAE;AAGrC,MAAM,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC;AAE5C,MAAM,uBAAuB,GAAG;IAC9B,QAAQ,CAAC,MAAmB;QAC1B,OAAO,MAAM,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC;IAC/B,CAAC;IACD,KAAK,EAAE,OAAO;IACd,OAAO,CAAC,MAAmB,EAAE,GAAW;QACtC,OAAO,MAAM,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,MAAmB;QACxB,OAAO,MAAM,CAAC,MAAM,CAAC,CAAC;IACxB,CAAC;CACF,CAAC;AAEF,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;AAC/B,MAAM,IAAI,GAAG,KAAK,CAAC,SAAS,CAAC,IAAI,CAAC;AAClC,MAAM,aAAa,GAAG,UAAU,GAAU,EAAE,cAAmB;IAC7D,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC;AAChF,CAAC,CAAC;AAEF,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC;AAE1C,MAAM,QAAQ,GAAG;IACf,cAAc,EAAE,KAAK;IACrB,SAAS,EAAE,KAAK;IAChB,gBAAgB,EAAE,KAAK;IACvB,WAAW,EAAE,SAAS;IACtB,OAAO,EAAE,OAAO;IAChB,eAAe,EAAE,KAAK;IACtB,SAAS,EAAE,GAAG;IACd,MAAM,EAAE,IAAI;IACZ,eAAe,EAAE,KAAK;IACtB,OAAO,EAAE,MAAM;IACf,gBAAgB,EAAE,KAAK;IACvB,MAAM,EAAE,cAAc;IACtB,SAAS,EAAE,UAAU,CAAC,cAAc,CAAC;IACrC,kBAAkB;IAClB,OAAO,EAAE,KAAK;IACd,aAAa,CAAC,IAAI;QAChB,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC3B,CAAC;IACD,SAAS,EAAE,KAAK;IAChB,kBAAkB,EAAE,KAAK;CACiE,CAAC;AAE7F,SAAS,wBAAwB,CAAC,CAAU;IAC1C,OAAO,CACL,OAAO,CAAC,KAAK,QAAQ;QACrB,OAAO,CAAC,KAAK,QAAQ;QACrB,OAAO,CAAC,KAAK,SAAS;QACtB,OAAO,CAAC,KAAK,QAAQ;QACrB,OAAO,CAAC,KAAK,QAAQ,CACtB,CAAC;AACJ,CAAC;AAED,MAAM,QAAQ,GAAG,EAAE,CAAC;AAEpB,SAAS,eAAe,CACtB,MAAW,EACX,MAAmB,EACnB,mBAAgG,EAChG,cAAuB,EACvB,gBAAyB,EACzB,kBAA2B,EAC3B,SAAkB,EAClB,eAAwB,EACxB,OAAoC,EACpC,MAAkC,EAClC,IAA8B,EAC9B,SAAwC,EACxC,aAAgD,EAChD,MAAkC,EAClC,SAAwC,EACxC,gBAAyB,EACzB,OAAoC,EACpC,WAA8B;IAE9B,IAAI,GAAG,GAAG,MAAM,CAAC;IAEjB,IAAI,MAAM,GAAG,WAAW,CAAC;IACzB,IAAI,IAAI,GAAG,CAAC,CAAC;IACb,IAAI,SAAS,GAAG,KAAK,CAAC;IACtB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,KAAK,KAAK,SAAS,IAAI,CAAC,SAAS,EAAE;QACvE,6CAA6C;QAC7C,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QAC/B,IAAI,IAAI,CAAC,CAAC;QACV,IAAI,OAAO,GAAG,KAAK,WAAW,EAAE;YAC9B,IAAI,GAAG,KAAK,IAAI,EAAE;gBAChB,MAAM,IAAI,UAAU,CAAC,qBAAqB,CAAC,CAAC;aAC7C;iBAAM;gBACL,SAAS,GAAG,IAAI,CAAC,CAAC,cAAc;aACjC;SACF;QACD,IAAI,OAAO,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,WAAW,EAAE;YAC/C,IAAI,GAAG,CAAC,CAAC;SACV;KACF;IAED,IAAI,OAAO,MAAM,KAAK,UAAU,EAAE;QAChC,GAAG,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;KAC3B;SAAM,IAAI,GAAG,YAAY,IAAI,EAAE;QAC9B,GAAG,GAAG,aAAa,EAAE,CAAC,GAAG,CAAC,CAAC;KAC5B;SAAM,IAAI,mBAAmB,KAAK,OAAO,IAAI,QAAQ,CAAC,GAAG,CAAC,EAAE;QAC3D,GAAG,GAAG,SAAS,CAAC,GAAG,EAAE,UAAU,KAAK;YAClC,IAAI,KAAK,YAAY,IAAI,EAAE;gBACzB,OAAO,aAAa,EAAE,CAAC,KAAK,CAAC,CAAC;aAC/B;YACD,OAAO,KAAK,CAAC;QACf,CAAC,CAAC,CAAC;KACJ;IAED,IAAI,GAAG,KAAK,IAAI,EAAE;QAChB,IAAI,kBAAkB,EAAE;YACtB,OAAO,OAAO,IAAI,CAAC,gBAAgB,CAAC,CAAC;gBACjC,mBAAmB;gBACnB,OAAO,CAAC,MAAM,EAAE,QAAQ,CAAC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,CAAC;gBAC3D,CAAC,CAAC,MAAM,CAAC;SACZ;QAED,GAAG,GAAG,EAAE,CAAC;KACV;IAED,IAAI,wBAAwB,CAAC,GAAG,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;QACnD,IAAI,OAAO,EAAE;YACX,MAAM,SAAS,GACb,gBAAgB,CAAC,CAAC,CAAC,MAAM;gBACvB,mBAAmB;gBACrB,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,CAAC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;YAC9D,OAAO;gBACL,SAAS,EAAE,CAAC,SAAS,CAAC;oBACpB,GAAG;oBACH,mBAAmB;oBACnB,SAAS,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;aACxE,CAAC;SACH;QACD,OAAO,CAAC,SAAS,EAAE,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,SAAS,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;KAC/D;IAED,MAAM,MAAM,GAAa,EAAE,CAAC;IAE5B,IAAI,OAAO,GAAG,KAAK,WAAW,EAAE;QAC9B,OAAO,MAAM,CAAC;KACf;IAED,IAAI,QAAQ,CAAC;IACb,IAAI,mBAAmB,KAAK,OAAO,IAAI,QAAQ,CAAC,GAAG,CAAC,EAAE;QACpD,8BAA8B;QAC9B,IAAI,gBAAgB,IAAI,OAAO,EAAE;YAC/B,+BAA+B;YAC/B,GAAG,GAAG,SAAS,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;SAC/B;QACD,QAAQ,GAAG,CAAC,EAAE,KAAK,EAAE,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC,CAAC;KACjF;SAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,EAAE;QAC3B,QAAQ,GAAG,MAAM,CAAC;KACnB;SAAM;QACL,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC9B,QAAQ,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;KAC1C;IAED,MAAM,cAAc,GAAG,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAE/F,MAAM,eAAe,GACnB,cAAc,IAAI,QAAQ,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,cAAc,GAAG,IAAI,CAAC,CAAC,CAAC,cAAc,CAAC;IAE/F,IAAI,gBAAgB,IAAI,QAAQ,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;QACzD,OAAO,eAAe,GAAG,IAAI,CAAC;KAC/B;IAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,MAAM,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QACxB,MAAM,KAAK;QACT,aAAa;QACb,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,CAAC,KAAK,KAAK,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,GAAU,CAAC,CAAC;QAE5F,IAAI,SAAS,IAAI,KAAK,KAAK,IAAI,EAAE;YAC/B,SAAS;SACV;QAED,aAAa;QACb,MAAM,WAAW,GAAG,SAAS,IAAI,eAAe,CAAC,CAAC,CAAE,GAAW,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;QAC5F,MAAM,UAAU,GACd,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;YACb,OAAO,mBAAmB,KAAK,UAAU,CAAC,CAAC;gBACzC,mBAAmB,CAAC,eAAe,EAAE,WAAW,CAAC;gBACnD,CAAC,CAAC,eAAe;YACnB,CAAC,CAAC,eAAe,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,GAAG,WAAW,CAAC,CAAC,CAAC,GAAG,GAAG,WAAW,GAAG,GAAG,CAAC,CAAC;QAEhF,WAAW,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAC9B,MAAM,gBAAgB,GAAG,IAAI,OAAO,EAAE,CAAC;QACvC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAC5C,aAAa,CACX,MAAM,EACN,eAAe,CACb,KAAK,EACL,UAAU,EACV,mBAAmB,EACnB,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,SAAS,EACT,eAAe;QACf,aAAa;QACb,mBAAmB,KAAK,OAAO,IAAI,gBAAgB,IAAI,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EACrF,MAAM,EACN,IAAI,EACJ,SAAS,EACT,aAAa,EACb,MAAM,EACN,SAAS,EACT,gBAAgB,EAChB,OAAO,EACP,gBAAgB,CACjB,CACF,CAAC;KACH;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,2BAA2B,CAClC,OAAyB,QAAQ;IAEjC,IAAI,OAAO,IAAI,CAAC,gBAAgB,KAAK,WAAW,IAAI,OAAO,IAAI,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC9F,MAAM,IAAI,SAAS,CAAC,wEAAwE,CAAC,CAAC;KAC/F;IAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,WAAW,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,SAAS,EAAE;QAC5F,MAAM,IAAI,SAAS,CAAC,uEAAuE,CAAC,CAAC;KAC9F;IAED,IAAI,IAAI,CAAC,OAAO,KAAK,IAAI,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,WAAW,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,UAAU,EAAE;QACtG,MAAM,IAAI,SAAS,CAAC,+BAA+B,CAAC,CAAC;KACtD;IAED,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,IAAI,QAAQ,CAAC,OAAO,CAAC;IACjD,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,WAAW,IAAI,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,OAAO,KAAK,YAAY,EAAE;QACpG,MAAM,IAAI,SAAS,CAAC,mEAAmE,CAAC,CAAC;KAC1F;IAED,IAAI,MAAM,GAAG,cAAc,CAAC;IAC5B,IAAI,OAAO,IAAI,CAAC,MAAM,KAAK,WAAW,EAAE;QACtC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,EAAE;YACtC,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC;SACxD;QACD,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;KACtB;IACD,MAAM,SAAS,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC;IAErC,IAAI,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC7B,IAAI,OAAO,IAAI,CAAC,MAAM,KAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;KACtB;IAED,IAAI,WAA4C,CAAC;IACjD,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW,IAAI,uBAAuB,EAAE;QACnE,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC;KAChC;SAAM,IAAI,SAAS,IAAI,IAAI,EAAE;QAC5B,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC;KACnD;SAAM;QACL,WAAW,GAAG,QAAQ,CAAC,WAAW,CAAC;KACpC;IAED,IAAI,gBAAgB,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,cAAc,KAAK,SAAS,EAAE;QACxE,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC,CAAC;KACtE;IAED,MAAM,SAAS,GACb,OAAO,IAAI,CAAC,SAAS,KAAK,WAAW,CAAC,CAAC;QACrC,CAAC,CAAC,IAAI,CAAC,eAAe,KAAK,IAAI,CAAC,CAAC;YAC/B,IAAI;YACN,CAAC,CAAC,QAAQ,CAAC,SAAS;QACtB,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC;IAErB,OAAO;QACL,cAAc,EAAE,OAAO,IAAI,CAAC,cAAc,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,cAAc;QACxG,aAAa;QACb,SAAS,EAAE,SAAS;QACpB,gBAAgB,EACd,OAAO,IAAI,CAAC,gBAAgB,KAAK,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,gBAAgB;QAClG,WAAW,EAAE,WAAW;QACxB,OAAO,EAAE,OAAO;QAChB,eAAe,EACb,OAAO,IAAI,CAAC,eAAe,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe;QAC7F,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,cAAc;QACrC,SAAS,EAAE,OAAO,IAAI,CAAC,SAAS,KAAK,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS;QACtF,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM;QACxE,eAAe,EACb,OAAO,IAAI,CAAC,eAAe,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe;QAC7F,OAAO,EAAE,OAAO,IAAI,CAAC,OAAO,KAAK,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO;QAC7E,gBAAgB,EACd,OAAO,IAAI,CAAC,gBAAgB,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,gBAAgB;QAChG,MAAM,EAAE,MAAM;QACd,MAAM,EAAE,MAAM;QACd,SAAS,EAAE,SAAS;QACpB,aAAa,EAAE,OAAO,IAAI,CAAC,aAAa,KAAK,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa;QACrG,SAAS,EAAE,OAAO,IAAI,CAAC,SAAS,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS;QACpF,aAAa;QACb,IAAI,EAAE,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI;QACxD,kBAAkB,EAChB,OAAO,IAAI,CAAC,kBAAkB,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,QAAQ,CAAC,kBAAkB;KACvG,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,SAAS,CAAC,MAAW,EAAE,OAAyB,EAAE;IAChE,IAAI,GAAG,GAAG,MAAM,CAAC;IACjB,MAAM,OAAO,GAAG,2BAA2B,CAAC,IAAI,CAAC,CAAC;IAElD,IAAI,QAAmC,CAAC;IACxC,IAAI,MAAM,CAAC;IAEX,IAAI,OAAO,OAAO,CAAC,MAAM,KAAK,UAAU,EAAE;QACxC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;QACxB,GAAG,GAAG,MAAM,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;KACvB;SAAM,IAAI,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QACnC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;QACxB,QAAQ,GAAG,MAAM,CAAC;KACnB;IAED,MAAM,IAAI,GAAa,EAAE,CAAC;IAE1B,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK,IAAI,EAAE;QAC3C,OAAO,EAAE,CAAC;KACX;IAED,MAAM,mBAAmB,GAAG,uBAAuB,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACzE,MAAM,cAAc,GAAG,mBAAmB,KAAK,OAAO,IAAI,OAAO,CAAC,cAAc,CAAC;IAEjF,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC7B;IAED,IAAI,OAAO,CAAC,IAAI,EAAE;QAChB,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;KAC7B;IAED,MAAM,WAAW,GAAG,IAAI,OAAO,EAAE,CAAC;IAClC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,MAAM,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAE,CAAC;QAEzB,IAAI,OAAO,CAAC,SAAS,IAAI,GAAG,CAAC,GAAG,CAAC,KAAK,IAAI,EAAE;YAC1C,SAAS;SACV;QACD,aAAa,CACX,IAAI,EACJ,eAAe,CACb,GAAG,CAAC,GAAG,CAAC,EACR,GAAG;QACH,mBAAmB;QACnB,mBAAmB,EACnB,cAAc,EACd,OAAO,CAAC,gBAAgB,EACxB,OAAO,CAAC,kBAAkB,EAC1B,OAAO,CAAC,SAAS,EACjB,OAAO,CAAC,eAAe,EACvB,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,EACvC,OAAO,CAAC,MAAM,EACd,OAAO,CAAC,IAAI,EACZ,OAAO,CAAC,SAAS,EACjB,OAAO,CAAC,aAAa,EACrB,OAAO,CAAC,MAAM,EACd,OAAO,CAAC,SAAS,EACjB,OAAO,CAAC,gBAAgB,EACxB,OAAO,CAAC,OAAO,EACf,WAAW,CACZ,CACF,CAAC;KACH;IAED,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IAC5C,IAAI,MAAM,GAAG,OAAO,CAAC,cAAc,KAAK,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;IAExD,IAAI,OAAO,CAAC,eAAe,EAAE;QAC3B,IAAI,OAAO,CAAC,OAAO,KAAK,YAAY,EAAE;YACpC,qFAAqF;YACrF,MAAM,IAAI,sBAAsB,CAAC;SAClC;aAAM;YACL,0BAA0B;YAC1B,MAAM,IAAI,iBAAiB,CAAC;SAC7B;KACF;IAED,OAAO,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC;AAClD,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/types.d.ts b/node_modules/openai/internal/qs/types.d.ts new file mode 100644 index 0000000..989a959 --- /dev/null +++ b/node_modules/openai/internal/qs/types.d.ts @@ -0,0 +1,57 @@ +export type Format = 'RFC1738' | 'RFC3986'; +export type DefaultEncoder = (str: any, defaultEncoder?: any, charset?: string) => string; +export type DefaultDecoder = (str: string, decoder?: any, charset?: string) => string; +export type BooleanOptional = boolean | undefined; +export type StringifyBaseOptions = { + delimiter?: string; + allowDots?: boolean; + encodeDotInKeys?: boolean; + strictNullHandling?: boolean; + skipNulls?: boolean; + encode?: boolean; + encoder?: (str: any, defaultEncoder: DefaultEncoder, charset: string, type: 'key' | 'value', format?: Format) => string; + filter?: Array | ((prefix: PropertyKey, value: any) => any); + arrayFormat?: 'indices' | 'brackets' | 'repeat' | 'comma'; + indices?: boolean; + sort?: ((a: PropertyKey, b: PropertyKey) => number) | null; + serializeDate?: (d: Date) => string; + format?: 'RFC1738' | 'RFC3986'; + formatter?: (str: PropertyKey) => string; + encodeValuesOnly?: boolean; + addQueryPrefix?: boolean; + charset?: 'utf-8' | 'iso-8859-1'; + charsetSentinel?: boolean; + allowEmptyArrays?: boolean; + commaRoundTrip?: boolean; +}; +export type StringifyOptions = StringifyBaseOptions; +export type ParseBaseOptions = { + comma?: boolean; + delimiter?: string | RegExp; + depth?: number | false; + decoder?: (str: string, defaultDecoder: DefaultDecoder, charset: string, type: 'key' | 'value') => any; + arrayLimit?: number; + parseArrays?: boolean; + plainObjects?: boolean; + allowPrototypes?: boolean; + allowSparse?: boolean; + parameterLimit?: number; + strictDepth?: boolean; + strictNullHandling?: boolean; + ignoreQueryPrefix?: boolean; + charset?: 'utf-8' | 'iso-8859-1'; + charsetSentinel?: boolean; + interpretNumericEntities?: boolean; + allowEmptyArrays?: boolean; + duplicates?: 'combine' | 'first' | 'last'; + allowDots?: boolean; + decodeDotInKeys?: boolean; +}; +export type ParseOptions = ParseBaseOptions; +export type ParsedQs = { + [key: string]: undefined | string | string[] | ParsedQs | ParsedQs[]; +}; +export type NonNullableProperties = { + [K in keyof T]-?: Exclude; +}; +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/types.d.ts.map b/node_modules/openai/internal/qs/types.d.ts.map new file mode 100644 index 0000000..83608b9 --- /dev/null +++ b/node_modules/openai/internal/qs/types.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/internal/qs/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,MAAM,GAAG,SAAS,GAAG,SAAS,CAAC;AAE3C,MAAM,MAAM,cAAc,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,cAAc,CAAC,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;AAC1F,MAAM,MAAM,cAAc,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;AAEtF,MAAM,MAAM,eAAe,GAAG,OAAO,GAAG,SAAS,CAAC;AAElD,MAAM,MAAM,oBAAoB,GAAG;IACjC,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,OAAO,CAAC,EAAE,CACR,GAAG,EAAE,GAAG,EACR,cAAc,EAAE,cAAc,EAC9B,OAAO,EAAE,MAAM,EACf,IAAI,EAAE,KAAK,GAAG,OAAO,EACrB,MAAM,CAAC,EAAE,MAAM,KACZ,MAAM,CAAC;IACZ,MAAM,CAAC,EAAE,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,GAAG,KAAK,GAAG,CAAC,CAAC;IACzE,WAAW,CAAC,EAAE,SAAS,GAAG,UAAU,GAAG,QAAQ,GAAG,OAAO,CAAC;IAC1D,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,CAAC,EAAE,WAAW,KAAK,MAAM,CAAC,GAAG,IAAI,CAAC;IAC3D,aAAa,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,MAAM,CAAC;IACpC,MAAM,CAAC,EAAE,SAAS,GAAG,SAAS,CAAC;IAC/B,SAAS,CAAC,EAAE,CAAC,GAAG,EAAE,WAAW,KAAK,MAAM,CAAC;IACzC,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,OAAO,CAAC,EAAE,OAAO,GAAG,YAAY,CAAC;IACjC,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,cAAc,CAAC,EAAE,OAAO,CAAC;CAC1B,CAAC;AAEF,MAAM,MAAM,gBAAgB,GAAG,oBAAoB,CAAC;AAEpD,MAAM,MAAM,gBAAgB,GAAG;IAC7B,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IAC5B,KAAK,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IACvB,OAAO,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,cAAc,EAAE,cAAc,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,KAAK,GAAG,OAAO,KAAK,GAAG,CAAC;IACvG,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,OAAO,CAAC,EAAE,OAAO,GAAG,YAAY,CAAC;IACjC,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,wBAAwB,CAAC,EAAE,OAAO,CAAC;IACnC,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,UAAU,CAAC,EAAE,SAAS,GAAG,OAAO,GAAG,MAAM,CAAC;IAC1C,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,eAAe,CAAC,EAAE,OAAO,CAAC;CAC3B,CAAC;AAEF,MAAM,MAAM,YAAY,GAAG,gBAAgB,CAAC;AAE5C,MAAM,MAAM,QAAQ,GAAG;IACrB,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,QAAQ,GAAG,QAAQ,EAAE,CAAC;CACtE,CAAC;AAGF,MAAM,MAAM,qBAAqB,CAAC,CAAC,IAAI;KACpC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,SAAS,GAAG,IAAI,CAAC;CAClD,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/types.js b/node_modules/openai/internal/qs/types.js new file mode 100644 index 0000000..11e638d --- /dev/null +++ b/node_modules/openai/internal/qs/types.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/types.js.map b/node_modules/openai/internal/qs/types.js.map new file mode 100644 index 0000000..3f947ac --- /dev/null +++ b/node_modules/openai/internal/qs/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/internal/qs/types.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/types.mjs b/node_modules/openai/internal/qs/types.mjs new file mode 100644 index 0000000..ce3aaac --- /dev/null +++ b/node_modules/openai/internal/qs/types.mjs @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=types.mjs.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/types.mjs.map b/node_modules/openai/internal/qs/types.mjs.map new file mode 100644 index 0000000..e64a9c6 --- /dev/null +++ b/node_modules/openai/internal/qs/types.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"types.mjs","sourceRoot":"","sources":["../../src/internal/qs/types.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/utils.d.ts b/node_modules/openai/internal/qs/utils.d.ts new file mode 100644 index 0000000..1964c6c --- /dev/null +++ b/node_modules/openai/internal/qs/utils.d.ts @@ -0,0 +1,14 @@ +import type { DefaultEncoder, Format } from "./types.js"; +export declare function merge(target: any, source: any, options?: { + plainObjects?: boolean; + allowPrototypes?: boolean; +}): any; +export declare function assign_single_source(target: any, source: any): any; +export declare function decode(str: string, _: any, charset: string): string; +export declare const encode: (str: any, defaultEncoder: DefaultEncoder, charset: string, type: 'key' | 'value', format: Format) => string; +export declare function compact(value: any): any; +export declare function is_regexp(obj: any): boolean; +export declare function is_buffer(obj: any): boolean; +export declare function combine(a: any, b: any): never[]; +export declare function maybe_map(val: T[], fn: (v: T) => T): T | T[]; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/utils.d.ts.map b/node_modules/openai/internal/qs/utils.d.ts.map new file mode 100644 index 0000000..260d24c --- /dev/null +++ b/node_modules/openai/internal/qs/utils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../src/internal/qs/utils.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AA+CtD,wBAAgB,KAAK,CACnB,MAAM,EAAE,GAAG,EACX,MAAM,EAAE,GAAG,EACX,OAAO,GAAE;IAAE,YAAY,CAAC,EAAE,OAAO,CAAC;IAAC,eAAe,CAAC,EAAE,OAAO,CAAA;CAAO,OA2DpE;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,OAK5D;AAED,wBAAgB,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,CAAC,EAAE,GAAG,EAAE,OAAO,EAAE,MAAM,UAY1D;AAID,eAAO,MAAM,MAAM,EAAE,CACnB,GAAG,EAAE,GAAG,EACR,cAAc,EAAE,cAAc,EAC9B,OAAO,EAAE,MAAM,EACf,IAAI,EAAE,KAAK,GAAG,OAAO,EACrB,MAAM,EAAE,MAAM,KACX,MAuEJ,CAAC;AAEF,wBAAgB,OAAO,CAAC,KAAK,EAAE,GAAG,OAuBjC;AAED,wBAAgB,SAAS,CAAC,GAAG,EAAE,GAAG,WAEjC;AAED,wBAAgB,SAAS,CAAC,GAAG,EAAE,GAAG,WAMjC;AAED,wBAAgB,OAAO,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,WAErC;AAED,wBAAgB,SAAS,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WASrD"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/utils.js b/node_modules/openai/internal/qs/utils.js new file mode 100644 index 0000000..8264f64 --- /dev/null +++ b/node_modules/openai/internal/qs/utils.js @@ -0,0 +1,229 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.maybe_map = exports.combine = exports.is_buffer = exports.is_regexp = exports.compact = exports.encode = exports.decode = exports.assign_single_source = exports.merge = void 0; +const formats_1 = require("./formats.js"); +const has = Object.prototype.hasOwnProperty; +const is_array = Array.isArray; +const hex_table = (() => { + const array = []; + for (let i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + return array; +})(); +function compact_queue(queue) { + while (queue.length > 1) { + const item = queue.pop(); + if (!item) + continue; + const obj = item.obj[item.prop]; + if (is_array(obj)) { + const compacted = []; + for (let j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + // @ts-ignore + item.obj[item.prop] = compacted; + } + } +} +function array_to_object(source, options) { + const obj = options && options.plainObjects ? Object.create(null) : {}; + for (let i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + return obj; +} +function merge(target, source, options = {}) { + if (!source) { + return target; + } + if (typeof source !== 'object') { + if (is_array(target)) { + target.push(source); + } + else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || + !has.call(Object.prototype, source)) { + target[source] = true; + } + } + else { + return [target, source]; + } + return target; + } + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + let mergeTarget = target; + if (is_array(target) && !is_array(source)) { + // @ts-ignore + mergeTarget = array_to_object(target, options); + } + if (is_array(target) && is_array(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + const targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } + else { + target.push(item); + } + } + else { + target[i] = item; + } + }); + return target; + } + return Object.keys(source).reduce(function (acc, key) { + const value = source[key]; + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } + else { + acc[key] = value; + } + return acc; + }, mergeTarget); +} +exports.merge = merge; +function assign_single_source(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +} +exports.assign_single_source = assign_single_source; +function decode(str, _, charset) { + const strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } + catch (e) { + return strWithoutPlus; + } +} +exports.decode = decode; +const limit = 1024; +const encode = (str, _defaultEncoder, charset, _kind, format) => { + // This code was originally written by Brian White for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + let string = str; + if (typeof str === 'symbol') { + string = Symbol.prototype.toString.call(str); + } + else if (typeof str !== 'string') { + string = String(str); + } + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + let out = ''; + for (let j = 0; j < string.length; j += limit) { + const segment = string.length >= limit ? string.slice(j, j + limit) : string; + const arr = []; + for (let i = 0; i < segment.length; ++i) { + let c = segment.charCodeAt(i); + if (c === 0x2d || // - + c === 0x2e || // . + c === 0x5f || // _ + c === 0x7e || // ~ + (c >= 0x30 && c <= 0x39) || // 0-9 + (c >= 0x41 && c <= 0x5a) || // a-z + (c >= 0x61 && c <= 0x7a) || // A-Z + (format === formats_1.RFC1738 && (c === 0x28 || c === 0x29)) // ( ) + ) { + arr[arr.length] = segment.charAt(i); + continue; + } + if (c < 0x80) { + arr[arr.length] = hex_table[c]; + continue; + } + if (c < 0x800) { + arr[arr.length] = hex_table[0xc0 | (c >> 6)] + hex_table[0x80 | (c & 0x3f)]; + continue; + } + if (c < 0xd800 || c >= 0xe000) { + arr[arr.length] = + hex_table[0xe0 | (c >> 12)] + hex_table[0x80 | ((c >> 6) & 0x3f)] + hex_table[0x80 | (c & 0x3f)]; + continue; + } + i += 1; + c = 0x10000 + (((c & 0x3ff) << 10) | (segment.charCodeAt(i) & 0x3ff)); + arr[arr.length] = + hex_table[0xf0 | (c >> 18)] + + hex_table[0x80 | ((c >> 12) & 0x3f)] + + hex_table[0x80 | ((c >> 6) & 0x3f)] + + hex_table[0x80 | (c & 0x3f)]; + } + out += arr.join(''); + } + return out; +}; +exports.encode = encode; +function compact(value) { + const queue = [{ obj: { o: value }, prop: 'o' }]; + const refs = []; + for (let i = 0; i < queue.length; ++i) { + const item = queue[i]; + // @ts-ignore + const obj = item.obj[item.prop]; + const keys = Object.keys(obj); + for (let j = 0; j < keys.length; ++j) { + const key = keys[j]; + const val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + compact_queue(queue); + return value; +} +exports.compact = compact; +function is_regexp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +} +exports.is_regexp = is_regexp; +function is_buffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +} +exports.is_buffer = is_buffer; +function combine(a, b) { + return [].concat(a, b); +} +exports.combine = combine; +function maybe_map(val, fn) { + if (is_array(val)) { + const mapped = []; + for (let i = 0; i < val.length; i += 1) { + mapped.push(fn(val[i])); + } + return mapped; + } + return fn(val); +} +exports.maybe_map = maybe_map; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/utils.js.map b/node_modules/openai/internal/qs/utils.js.map new file mode 100644 index 0000000..cd5152b --- /dev/null +++ b/node_modules/openai/internal/qs/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/internal/qs/utils.ts"],"names":[],"mappings":";;;AAAA,0CAAoC;AAGpC,MAAM,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC;AAC5C,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;AAE/B,MAAM,SAAS,GAAG,CAAC,GAAG,EAAE;IACtB,MAAM,KAAK,GAAG,EAAE,CAAC;IACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,EAAE,CAAC,EAAE;QAC5B,KAAK,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC;KACxE;IAED,OAAO,KAAK,CAAC;AACf,CAAC,CAAC,EAAE,CAAC;AAEL,SAAS,aAAa,CAAgC,KAAsC;IAC1F,OAAO,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACvB,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QACzB,IAAI,CAAC,IAAI;YAAE,SAAS;QAEpB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAEhC,IAAI,QAAQ,CAAC,GAAG,CAAC,EAAE;YACjB,MAAM,SAAS,GAAc,EAAE,CAAC;YAEhC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACnC,IAAI,OAAO,GAAG,CAAC,CAAC,CAAC,KAAK,WAAW,EAAE;oBACjC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;iBACxB;aACF;YAED,aAAa;YACb,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;SACjC;KACF;AACH,CAAC;AAED,SAAS,eAAe,CAAC,MAAa,EAAE,OAAkC;IACxE,MAAM,GAAG,GAAG,OAAO,IAAI,OAAO,CAAC,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACtC,IAAI,OAAO,MAAM,CAAC,CAAC,CAAC,KAAK,WAAW,EAAE;YACpC,GAAG,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;SACpB;KACF;IAED,OAAO,GAAG,CAAC;AACb,CAAC;AAED,SAAgB,KAAK,CACnB,MAAW,EACX,MAAW,EACX,UAAiE,EAAE;IAEnE,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,MAAM,CAAC;KACf;IAED,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,IAAI,QAAQ,CAAC,MAAM,CAAC,EAAE;YACpB,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACrB;aAAM,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC/C,IACE,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,eAAe,CAAC,CAAC;gBAC9D,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,EACnC;gBACA,MAAM,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC;aACvB;SACF;aAAM;YACL,OAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;SACzB;QAED,OAAO,MAAM,CAAC;KACf;IAED,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;KAChC;IAED,IAAI,WAAW,GAAG,MAAM,CAAC;IACzB,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;QACzC,aAAa;QACb,WAAW,GAAG,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KAChD;IAED,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,QAAQ,CAAC,MAAM,CAAC,EAAE;QACxC,MAAM,CAAC,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;YAC9B,IAAI,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE;gBACvB,MAAM,UAAU,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBAC7B,IAAI,UAAU,IAAI,OAAO,UAAU,KAAK,QAAQ,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;oBACpF,MAAM,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;iBAC9C;qBAAM;oBACL,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBACnB;aACF;iBAAM;gBACL,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;aAClB;QACH,CAAC,CAAC,CAAC;QACH,OAAO,MAAM,CAAC;KACf;IAED,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,UAAU,GAAG,EAAE,GAAG;QAClD,MAAM,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;QAE1B,IAAI,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,EAAE;YACtB,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;SAC5C;aAAM;YACL,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SAClB;QACD,OAAO,GAAG,CAAC;IACb,CAAC,EAAE,WAAW,CAAC,CAAC;AAClB,CAAC;AA9DD,sBA8DC;AAED,SAAgB,oBAAoB,CAAC,MAAW,EAAE,MAAW;IAC3D,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,UAAU,GAAG,EAAE,GAAG;QAClD,GAAG,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;QACvB,OAAO,GAAG,CAAC;IACb,CAAC,EAAE,MAAM,CAAC,CAAC;AACb,CAAC;AALD,oDAKC;AAED,SAAgB,MAAM,CAAC,GAAW,EAAE,CAAM,EAAE,OAAe;IACzD,MAAM,cAAc,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;IAC/C,IAAI,OAAO,KAAK,YAAY,EAAE;QAC5B,gDAAgD;QAChD,OAAO,cAAc,CAAC,OAAO,CAAC,gBAAgB,EAAE,QAAQ,CAAC,CAAC;KAC3D;IACD,QAAQ;IACR,IAAI;QACF,OAAO,kBAAkB,CAAC,cAAc,CAAC,CAAC;KAC3C;IAAC,OAAO,CAAC,EAAE;QACV,OAAO,cAAc,CAAC;KACvB;AACH,CAAC;AAZD,wBAYC;AAED,MAAM,KAAK,GAAG,IAAI,CAAC;AAEZ,MAAM,MAAM,GAML,CAAC,GAAG,EAAE,eAAe,EAAE,OAAO,EAAE,KAAK,EAAE,MAAc,EAAE,EAAE;IACrE,0FAA0F;IAC1F,8DAA8D;IAC9D,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;QACpB,OAAO,GAAG,CAAC;KACZ;IAED,IAAI,MAAM,GAAG,GAAG,CAAC;IACjB,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,MAAM,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC9C;SAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAClC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;KACtB;IAED,IAAI,OAAO,KAAK,YAAY,EAAE;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,iBAAiB,EAAE,UAAU,EAAE;YAC3D,OAAO,QAAQ,GAAG,QAAQ,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,KAAK,CAAC;QACtD,CAAC,CAAC,CAAC;KACJ;IAED,IAAI,GAAG,GAAG,EAAE,CAAC;IACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,IAAI,KAAK,EAAE;QAC7C,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;QAC7E,MAAM,GAAG,GAAG,EAAE,CAAC;QAEf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,IAAI,CAAC,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YAC9B,IACE,CAAC,KAAK,IAAI,IAAI,IAAI;gBAClB,CAAC,KAAK,IAAI,IAAI,IAAI;gBAClB,CAAC,KAAK,IAAI,IAAI,IAAI;gBAClB,CAAC,KAAK,IAAI,IAAI,IAAI;gBAClB,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,MAAM;gBAClC,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,MAAM;gBAClC,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,MAAM;gBAClC,CAAC,MAAM,KAAK,iBAAO,IAAI,CAAC,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,MAAM;cACzD;gBACA,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;gBACpC,SAAS;aACV;YAED,IAAI,CAAC,GAAG,IAAI,EAAE;gBACZ,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;gBAC/B,SAAS;aACV;YAED,IAAI,CAAC,GAAG,KAAK,EAAE;gBACb,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAE,GAAG,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;gBAC7E,SAAS;aACV;YAED,IAAI,CAAC,GAAG,MAAM,IAAI,CAAC,IAAI,MAAM,EAAE;gBAC7B,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC;oBACb,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAE,GAAG,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;gBACpG,SAAS;aACV;YAED,CAAC,IAAI,CAAC,CAAC;YACP,CAAC,GAAG,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC;YAEtE,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC;gBACb,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAE;oBAC5B,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,IAAI,CAAC,CAAC;oBACpC,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;oBACnC,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;SAChC;QAED,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KACrB;IAED,OAAO,GAAG,CAAC;AACb,CAAC,CAAC;AA7EW,QAAA,MAAM,UA6EjB;AAEF,SAAgB,OAAO,CAAC,KAAU;IAChC,MAAM,KAAK,GAAG,CAAC,EAAE,GAAG,EAAE,EAAE,CAAC,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAC;IACjD,MAAM,IAAI,GAAG,EAAE,CAAC;IAEhB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACrC,MAAM,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACtB,aAAa;QACb,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAEhC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC9B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACpC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAE,CAAC;YACrB,MAAM,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;YACrB,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBACvE,KAAK,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aAChB;SACF;KACF;IAED,aAAa,CAAC,KAAK,CAAC,CAAC;IAErB,OAAO,KAAK,CAAC;AACf,CAAC;AAvBD,0BAuBC;AAED,SAAgB,SAAS,CAAC,GAAQ;IAChC,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,iBAAiB,CAAC;AACnE,CAAC;AAFD,8BAEC;AAED,SAAgB,SAAS,CAAC,GAAQ;IAChC,IAAI,CAAC,GAAG,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QACnC,OAAO,KAAK,CAAC;KACd;IAED,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,WAAW,IAAI,GAAG,CAAC,WAAW,CAAC,QAAQ,IAAI,GAAG,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC1F,CAAC;AAND,8BAMC;AAED,SAAgB,OAAO,CAAC,CAAM,EAAE,CAAM;IACpC,OAAO,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,CAAC;AAFD,0BAEC;AAED,SAAgB,SAAS,CAAI,GAAQ,EAAE,EAAe;IACpD,IAAI,QAAQ,CAAC,GAAG,CAAC,EAAE;QACjB,MAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;YACtC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAE,CAAC,CAAC,CAAC;SAC1B;QACD,OAAO,MAAM,CAAC;KACf;IACD,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC;AACjB,CAAC;AATD,8BASC"} \ No newline at end of file diff --git a/node_modules/openai/internal/qs/utils.mjs b/node_modules/openai/internal/qs/utils.mjs new file mode 100644 index 0000000..68b5046 --- /dev/null +++ b/node_modules/openai/internal/qs/utils.mjs @@ -0,0 +1,217 @@ +import { RFC1738 } from "./formats.mjs"; +const has = Object.prototype.hasOwnProperty; +const is_array = Array.isArray; +const hex_table = (() => { + const array = []; + for (let i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + return array; +})(); +function compact_queue(queue) { + while (queue.length > 1) { + const item = queue.pop(); + if (!item) + continue; + const obj = item.obj[item.prop]; + if (is_array(obj)) { + const compacted = []; + for (let j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + // @ts-ignore + item.obj[item.prop] = compacted; + } + } +} +function array_to_object(source, options) { + const obj = options && options.plainObjects ? Object.create(null) : {}; + for (let i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + return obj; +} +export function merge(target, source, options = {}) { + if (!source) { + return target; + } + if (typeof source !== 'object') { + if (is_array(target)) { + target.push(source); + } + else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || + !has.call(Object.prototype, source)) { + target[source] = true; + } + } + else { + return [target, source]; + } + return target; + } + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + let mergeTarget = target; + if (is_array(target) && !is_array(source)) { + // @ts-ignore + mergeTarget = array_to_object(target, options); + } + if (is_array(target) && is_array(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + const targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } + else { + target.push(item); + } + } + else { + target[i] = item; + } + }); + return target; + } + return Object.keys(source).reduce(function (acc, key) { + const value = source[key]; + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } + else { + acc[key] = value; + } + return acc; + }, mergeTarget); +} +export function assign_single_source(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +} +export function decode(str, _, charset) { + const strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } + catch (e) { + return strWithoutPlus; + } +} +const limit = 1024; +export const encode = (str, _defaultEncoder, charset, _kind, format) => { + // This code was originally written by Brian White for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + let string = str; + if (typeof str === 'symbol') { + string = Symbol.prototype.toString.call(str); + } + else if (typeof str !== 'string') { + string = String(str); + } + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + let out = ''; + for (let j = 0; j < string.length; j += limit) { + const segment = string.length >= limit ? string.slice(j, j + limit) : string; + const arr = []; + for (let i = 0; i < segment.length; ++i) { + let c = segment.charCodeAt(i); + if (c === 0x2d || // - + c === 0x2e || // . + c === 0x5f || // _ + c === 0x7e || // ~ + (c >= 0x30 && c <= 0x39) || // 0-9 + (c >= 0x41 && c <= 0x5a) || // a-z + (c >= 0x61 && c <= 0x7a) || // A-Z + (format === RFC1738 && (c === 0x28 || c === 0x29)) // ( ) + ) { + arr[arr.length] = segment.charAt(i); + continue; + } + if (c < 0x80) { + arr[arr.length] = hex_table[c]; + continue; + } + if (c < 0x800) { + arr[arr.length] = hex_table[0xc0 | (c >> 6)] + hex_table[0x80 | (c & 0x3f)]; + continue; + } + if (c < 0xd800 || c >= 0xe000) { + arr[arr.length] = + hex_table[0xe0 | (c >> 12)] + hex_table[0x80 | ((c >> 6) & 0x3f)] + hex_table[0x80 | (c & 0x3f)]; + continue; + } + i += 1; + c = 0x10000 + (((c & 0x3ff) << 10) | (segment.charCodeAt(i) & 0x3ff)); + arr[arr.length] = + hex_table[0xf0 | (c >> 18)] + + hex_table[0x80 | ((c >> 12) & 0x3f)] + + hex_table[0x80 | ((c >> 6) & 0x3f)] + + hex_table[0x80 | (c & 0x3f)]; + } + out += arr.join(''); + } + return out; +}; +export function compact(value) { + const queue = [{ obj: { o: value }, prop: 'o' }]; + const refs = []; + for (let i = 0; i < queue.length; ++i) { + const item = queue[i]; + // @ts-ignore + const obj = item.obj[item.prop]; + const keys = Object.keys(obj); + for (let j = 0; j < keys.length; ++j) { + const key = keys[j]; + const val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + compact_queue(queue); + return value; +} +export function is_regexp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +} +export function is_buffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +} +export function combine(a, b) { + return [].concat(a, b); +} +export function maybe_map(val, fn) { + if (is_array(val)) { + const mapped = []; + for (let i = 0; i < val.length; i += 1) { + mapped.push(fn(val[i])); + } + return mapped; + } + return fn(val); +} +//# sourceMappingURL=utils.mjs.map \ No newline at end of file diff --git a/node_modules/openai/internal/qs/utils.mjs.map b/node_modules/openai/internal/qs/utils.mjs.map new file mode 100644 index 0000000..5f02e43 --- /dev/null +++ b/node_modules/openai/internal/qs/utils.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.mjs","sourceRoot":"","sources":["../../src/internal/qs/utils.ts"],"names":[],"mappings":"OAAO,EAAE,OAAO,EAAE;AAGlB,MAAM,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC;AAC5C,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;AAE/B,MAAM,SAAS,GAAG,CAAC,GAAG,EAAE;IACtB,MAAM,KAAK,GAAG,EAAE,CAAC;IACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,EAAE,CAAC,EAAE;QAC5B,KAAK,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC;KACxE;IAED,OAAO,KAAK,CAAC;AACf,CAAC,CAAC,EAAE,CAAC;AAEL,SAAS,aAAa,CAAgC,KAAsC;IAC1F,OAAO,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;QACvB,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QACzB,IAAI,CAAC,IAAI;YAAE,SAAS;QAEpB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAEhC,IAAI,QAAQ,CAAC,GAAG,CAAC,EAAE;YACjB,MAAM,SAAS,GAAc,EAAE,CAAC;YAEhC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACnC,IAAI,OAAO,GAAG,CAAC,CAAC,CAAC,KAAK,WAAW,EAAE;oBACjC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;iBACxB;aACF;YAED,aAAa;YACb,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;SACjC;KACF;AACH,CAAC;AAED,SAAS,eAAe,CAAC,MAAa,EAAE,OAAkC;IACxE,MAAM,GAAG,GAAG,OAAO,IAAI,OAAO,CAAC,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACtC,IAAI,OAAO,MAAM,CAAC,CAAC,CAAC,KAAK,WAAW,EAAE;YACpC,GAAG,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;SACpB;KACF;IAED,OAAO,GAAG,CAAC;AACb,CAAC;AAED,MAAM,UAAU,KAAK,CACnB,MAAW,EACX,MAAW,EACX,UAAiE,EAAE;IAEnE,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,MAAM,CAAC;KACf;IAED,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,IAAI,QAAQ,CAAC,MAAM,CAAC,EAAE;YACpB,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACrB;aAAM,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC/C,IACE,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,eAAe,CAAC,CAAC;gBAC9D,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,EACnC;gBACA,MAAM,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC;aACvB;SACF;aAAM;YACL,OAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;SACzB;QAED,OAAO,MAAM,CAAC;KACf;IAED,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;KAChC;IAED,IAAI,WAAW,GAAG,MAAM,CAAC;IACzB,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;QACzC,aAAa;QACb,WAAW,GAAG,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KAChD;IAED,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,QAAQ,CAAC,MAAM,CAAC,EAAE;QACxC,MAAM,CAAC,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;YAC9B,IAAI,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE;gBACvB,MAAM,UAAU,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBAC7B,IAAI,UAAU,IAAI,OAAO,UAAU,KAAK,QAAQ,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;oBACpF,MAAM,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;iBAC9C;qBAAM;oBACL,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBACnB;aACF;iBAAM;gBACL,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;aAClB;QACH,CAAC,CAAC,CAAC;QACH,OAAO,MAAM,CAAC;KACf;IAED,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,UAAU,GAAG,EAAE,GAAG;QAClD,MAAM,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;QAE1B,IAAI,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,EAAE;YACtB,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;SAC5C;aAAM;YACL,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SAClB;QACD,OAAO,GAAG,CAAC;IACb,CAAC,EAAE,WAAW,CAAC,CAAC;AAClB,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,MAAW,EAAE,MAAW;IAC3D,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,UAAU,GAAG,EAAE,GAAG;QAClD,GAAG,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;QACvB,OAAO,GAAG,CAAC;IACb,CAAC,EAAE,MAAM,CAAC,CAAC;AACb,CAAC;AAED,MAAM,UAAU,MAAM,CAAC,GAAW,EAAE,CAAM,EAAE,OAAe;IACzD,MAAM,cAAc,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;IAC/C,IAAI,OAAO,KAAK,YAAY,EAAE;QAC5B,gDAAgD;QAChD,OAAO,cAAc,CAAC,OAAO,CAAC,gBAAgB,EAAE,QAAQ,CAAC,CAAC;KAC3D;IACD,QAAQ;IACR,IAAI;QACF,OAAO,kBAAkB,CAAC,cAAc,CAAC,CAAC;KAC3C;IAAC,OAAO,CAAC,EAAE;QACV,OAAO,cAAc,CAAC;KACvB;AACH,CAAC;AAED,MAAM,KAAK,GAAG,IAAI,CAAC;AAEnB,MAAM,CAAC,MAAM,MAAM,GAML,CAAC,GAAG,EAAE,eAAe,EAAE,OAAO,EAAE,KAAK,EAAE,MAAc,EAAE,EAAE;IACrE,0FAA0F;IAC1F,8DAA8D;IAC9D,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;QACpB,OAAO,GAAG,CAAC;KACZ;IAED,IAAI,MAAM,GAAG,GAAG,CAAC;IACjB,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,MAAM,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC9C;SAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAClC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;KACtB;IAED,IAAI,OAAO,KAAK,YAAY,EAAE;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,iBAAiB,EAAE,UAAU,EAAE;YAC3D,OAAO,QAAQ,GAAG,QAAQ,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,KAAK,CAAC;QACtD,CAAC,CAAC,CAAC;KACJ;IAED,IAAI,GAAG,GAAG,EAAE,CAAC;IACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,IAAI,KAAK,EAAE;QAC7C,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;QAC7E,MAAM,GAAG,GAAG,EAAE,CAAC;QAEf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,IAAI,CAAC,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YAC9B,IACE,CAAC,KAAK,IAAI,IAAI,IAAI;gBAClB,CAAC,KAAK,IAAI,IAAI,IAAI;gBAClB,CAAC,KAAK,IAAI,IAAI,IAAI;gBAClB,CAAC,KAAK,IAAI,IAAI,IAAI;gBAClB,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,MAAM;gBAClC,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,MAAM;gBAClC,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,MAAM;gBAClC,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,MAAM;cACzD;gBACA,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;gBACpC,SAAS;aACV;YAED,IAAI,CAAC,GAAG,IAAI,EAAE;gBACZ,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;gBAC/B,SAAS;aACV;YAED,IAAI,CAAC,GAAG,KAAK,EAAE;gBACb,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAE,GAAG,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;gBAC7E,SAAS;aACV;YAED,IAAI,CAAC,GAAG,MAAM,IAAI,CAAC,IAAI,MAAM,EAAE;gBAC7B,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC;oBACb,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAE,GAAG,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;gBACpG,SAAS;aACV;YAED,CAAC,IAAI,CAAC,CAAC;YACP,CAAC,GAAG,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC;YAEtE,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC;gBACb,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAE;oBAC5B,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,IAAI,CAAC,CAAC;oBACpC,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;oBACnC,SAAS,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;SAChC;QAED,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KACrB;IAED,OAAO,GAAG,CAAC;AACb,CAAC,CAAC;AAEF,MAAM,UAAU,OAAO,CAAC,KAAU;IAChC,MAAM,KAAK,GAAG,CAAC,EAAE,GAAG,EAAE,EAAE,CAAC,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAC;IACjD,MAAM,IAAI,GAAG,EAAE,CAAC;IAEhB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACrC,MAAM,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACtB,aAAa;QACb,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAEhC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC9B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACpC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAE,CAAC;YACrB,MAAM,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;YACrB,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBACvE,KAAK,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aAChB;SACF;KACF;IAED,aAAa,CAAC,KAAK,CAAC,CAAC;IAErB,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,SAAS,CAAC,GAAQ;IAChC,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,iBAAiB,CAAC;AACnE,CAAC;AAED,MAAM,UAAU,SAAS,CAAC,GAAQ;IAChC,IAAI,CAAC,GAAG,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QACnC,OAAO,KAAK,CAAC;KACd;IAED,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,WAAW,IAAI,GAAG,CAAC,WAAW,CAAC,QAAQ,IAAI,GAAG,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC1F,CAAC;AAED,MAAM,UAAU,OAAO,CAAC,CAAM,EAAE,CAAM;IACpC,OAAO,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,CAAC;AAED,MAAM,UAAU,SAAS,CAAI,GAAQ,EAAE,EAAe;IACpD,IAAI,QAAQ,CAAC,GAAG,CAAC,EAAE;QACjB,MAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;YACtC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAE,CAAC,CAAC,CAAC;SAC1B;QACD,OAAO,MAAM,CAAC;KACf;IACD,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC;AACjB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/AbstractChatCompletionRunner.d.ts b/node_modules/openai/lib/AbstractChatCompletionRunner.d.ts new file mode 100644 index 0000000..848700c --- /dev/null +++ b/node_modules/openai/lib/AbstractChatCompletionRunner.d.ts @@ -0,0 +1,61 @@ +import * as Core from "../core.js"; +import { type CompletionUsage } from "../resources/completions.js"; +import { type ChatCompletion, type ChatCompletionMessage, type ChatCompletionMessageParam, type ChatCompletionCreateParams } from "../resources/chat/completions.js"; +import { type BaseFunctionsArgs } from "./RunnableFunction.js"; +import { ChatCompletionFunctionRunnerParams, ChatCompletionToolRunnerParams } from "./ChatCompletionRunner.js"; +import { ChatCompletionStreamingFunctionRunnerParams, ChatCompletionStreamingToolRunnerParams } from "./ChatCompletionStreamingRunner.js"; +import { BaseEvents, EventStream } from "./EventStream.js"; +import { ParsedChatCompletion } from "../resources/beta/chat/completions.js"; +import OpenAI from "../index.js"; +export interface RunnerOptions extends Core.RequestOptions { + /** How many requests to make before canceling. Default 10. */ + maxChatCompletions?: number; +} +export declare class AbstractChatCompletionRunner extends EventStream { + #private; + protected _chatCompletions: ParsedChatCompletion[]; + messages: ChatCompletionMessageParam[]; + protected _addChatCompletion(this: AbstractChatCompletionRunner, chatCompletion: ParsedChatCompletion): ParsedChatCompletion; + protected _addMessage(this: AbstractChatCompletionRunner, message: ChatCompletionMessageParam, emit?: boolean): void; + /** + * @returns a promise that resolves with the final ChatCompletion, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletion. + */ + finalChatCompletion(): Promise>; + /** + * @returns a promise that resolves with the content of the final ChatCompletionMessage, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + finalContent(): Promise; + /** + * @returns a promise that resolves with the the final assistant ChatCompletionMessage response, + * or rejects if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + finalMessage(): Promise; + /** + * @returns a promise that resolves with the content of the final FunctionCall, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + finalFunctionCall(): Promise; + finalFunctionCallResult(): Promise; + totalUsage(): Promise; + allChatCompletions(): ChatCompletion[]; + protected _emitFinal(this: AbstractChatCompletionRunner): void; + protected _createChatCompletion(client: OpenAI, params: ChatCompletionCreateParams, options?: Core.RequestOptions): Promise>; + protected _runChatCompletion(client: OpenAI, params: ChatCompletionCreateParams, options?: Core.RequestOptions): Promise; + protected _runFunctions(client: OpenAI, params: ChatCompletionFunctionRunnerParams | ChatCompletionStreamingFunctionRunnerParams, options?: RunnerOptions): Promise; + protected _runTools(client: OpenAI, params: ChatCompletionToolRunnerParams | ChatCompletionStreamingToolRunnerParams, options?: RunnerOptions): Promise; +} +export interface AbstractChatCompletionRunnerEvents extends BaseEvents { + functionCall: (functionCall: ChatCompletionMessage.FunctionCall) => void; + message: (message: ChatCompletionMessageParam) => void; + chatCompletion: (completion: ChatCompletion) => void; + finalContent: (contentSnapshot: string) => void; + finalMessage: (message: ChatCompletionMessageParam) => void; + finalChatCompletion: (completion: ChatCompletion) => void; + finalFunctionCall: (functionCall: ChatCompletionMessage.FunctionCall) => void; + functionCallResult: (content: string) => void; + finalFunctionCallResult: (content: string) => void; + totalUsage: (usage: CompletionUsage) => void; +} +//# sourceMappingURL=AbstractChatCompletionRunner.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/AbstractChatCompletionRunner.d.ts.map b/node_modules/openai/lib/AbstractChatCompletionRunner.d.ts.map new file mode 100644 index 0000000..3fa85a2 --- /dev/null +++ b/node_modules/openai/lib/AbstractChatCompletionRunner.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbstractChatCompletionRunner.d.ts","sourceRoot":"","sources":["../src/lib/AbstractChatCompletionRunner.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAChE,OAAO,EACL,KAAK,cAAc,EACnB,KAAK,qBAAqB,EAC1B,KAAK,0BAA0B,EAC/B,KAAK,0BAA0B,EAEhC,MAAM,+BAA+B,CAAC;AAEvC,OAAO,EAGL,KAAK,iBAAiB,EAEvB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,kCAAkC,EAAE,8BAA8B,EAAE,MAAM,wBAAwB,CAAC;AAC5G,OAAO,EACL,2CAA2C,EAC3C,uCAAuC,EACxC,MAAM,iCAAiC,CAAC;AAEzC,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AACxD,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC1E,OAAO,MAAM,MAAM,UAAU,CAAC;AAI9B,MAAM,WAAW,aAAc,SAAQ,IAAI,CAAC,cAAc;IACxD,8DAA8D;IAC9D,kBAAkB,CAAC,EAAE,MAAM,CAAC;CAC7B;AAED,qBAAa,4BAA4B,CACvC,UAAU,SAAS,kCAAkC,EACrD,OAAO,CACP,SAAQ,WAAW,CAAC,UAAU,CAAC;;IAC/B,SAAS,CAAC,gBAAgB,EAAE,oBAAoB,CAAC,OAAO,CAAC,EAAE,CAAM;IACjE,QAAQ,EAAE,0BAA0B,EAAE,CAAM;IAE5C,SAAS,CAAC,kBAAkB,CAC1B,IAAI,EAAE,4BAA4B,CAAC,kCAAkC,EAAE,OAAO,CAAC,EAC/E,cAAc,EAAE,oBAAoB,CAAC,OAAO,CAAC,GAC5C,oBAAoB,CAAC,OAAO,CAAC;IAQhC,SAAS,CAAC,WAAW,CACnB,IAAI,EAAE,4BAA4B,CAAC,kCAAkC,EAAE,OAAO,CAAC,EAC/E,OAAO,EAAE,0BAA0B,EACnC,IAAI,UAAO;IAuBb;;;OAGG;IACG,mBAAmB,IAAI,OAAO,CAAC,oBAAoB,CAAC,OAAO,CAAC,CAAC;IAWnE;;;OAGG;IACG,YAAY,IAAI,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IA2B5C;;;OAGG;IACG,YAAY,IAAI,OAAO,CAAC,qBAAqB,CAAC;IAmBpD;;;OAGG;IACG,iBAAiB,IAAI,OAAO,CAAC,qBAAqB,CAAC,YAAY,GAAG,SAAS,CAAC;IA4B5E,uBAAuB,IAAI,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;IAqBtD,UAAU,IAAI,OAAO,CAAC,eAAe,CAAC;IAK5C,kBAAkB,IAAI,cAAc,EAAE;cAInB,UAAU,CAC3B,IAAI,EAAE,4BAA4B,CAAC,kCAAkC,EAAE,OAAO,CAAC;cA4BjE,qBAAqB,CACnC,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,0BAA0B,EAClC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,oBAAoB,CAAC,OAAO,CAAC,CAAC;cAgBzB,kBAAkB,CAChC,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,0BAA0B,EAClC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,cAAc,CAAC;cAOV,aAAa,CAAC,aAAa,SAAS,iBAAiB,EACnE,MAAM,EAAE,MAAM,EACd,MAAM,EACF,kCAAkC,CAAC,aAAa,CAAC,GACjD,2CAA2C,CAAC,aAAa,CAAC,EAC9D,OAAO,CAAC,EAAE,aAAa;cAgFT,SAAS,CAAC,aAAa,SAAS,iBAAiB,EAC/D,MAAM,EAAE,MAAM,EACd,MAAM,EACF,8BAA8B,CAAC,aAAa,CAAC,GAC7C,uCAAuC,CAAC,aAAa,CAAC,EAC1D,OAAO,CAAC,EAAE,aAAa;CAmI1B;AAED,MAAM,WAAW,kCAAmC,SAAQ,UAAU;IACpE,YAAY,EAAE,CAAC,YAAY,EAAE,qBAAqB,CAAC,YAAY,KAAK,IAAI,CAAC;IACzE,OAAO,EAAE,CAAC,OAAO,EAAE,0BAA0B,KAAK,IAAI,CAAC;IACvD,cAAc,EAAE,CAAC,UAAU,EAAE,cAAc,KAAK,IAAI,CAAC;IACrD,YAAY,EAAE,CAAC,eAAe,EAAE,MAAM,KAAK,IAAI,CAAC;IAChD,YAAY,EAAE,CAAC,OAAO,EAAE,0BAA0B,KAAK,IAAI,CAAC;IAC5D,mBAAmB,EAAE,CAAC,UAAU,EAAE,cAAc,KAAK,IAAI,CAAC;IAC1D,iBAAiB,EAAE,CAAC,YAAY,EAAE,qBAAqB,CAAC,YAAY,KAAK,IAAI,CAAC;IAC9E,kBAAkB,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IAC9C,uBAAuB,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACnD,UAAU,EAAE,CAAC,KAAK,EAAE,eAAe,KAAK,IAAI,CAAC;CAC9C"} \ No newline at end of file diff --git a/node_modules/openai/lib/AbstractChatCompletionRunner.js b/node_modules/openai/lib/AbstractChatCompletionRunner.js new file mode 100644 index 0000000..14f8ae1 --- /dev/null +++ b/node_modules/openai/lib/AbstractChatCompletionRunner.js @@ -0,0 +1,372 @@ +"use strict"; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _AbstractChatCompletionRunner_instances, _AbstractChatCompletionRunner_getFinalContent, _AbstractChatCompletionRunner_getFinalMessage, _AbstractChatCompletionRunner_getFinalFunctionCall, _AbstractChatCompletionRunner_getFinalFunctionCallResult, _AbstractChatCompletionRunner_calculateTotalUsage, _AbstractChatCompletionRunner_validateParams, _AbstractChatCompletionRunner_stringifyFunctionCallResult; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AbstractChatCompletionRunner = void 0; +const error_1 = require("../error.js"); +const RunnableFunction_1 = require("./RunnableFunction.js"); +const chatCompletionUtils_1 = require("./chatCompletionUtils.js"); +const EventStream_1 = require("./EventStream.js"); +const parser_1 = require("../lib/parser.js"); +const DEFAULT_MAX_CHAT_COMPLETIONS = 10; +class AbstractChatCompletionRunner extends EventStream_1.EventStream { + constructor() { + super(...arguments); + _AbstractChatCompletionRunner_instances.add(this); + this._chatCompletions = []; + this.messages = []; + } + _addChatCompletion(chatCompletion) { + this._chatCompletions.push(chatCompletion); + this._emit('chatCompletion', chatCompletion); + const message = chatCompletion.choices[0]?.message; + if (message) + this._addMessage(message); + return chatCompletion; + } + _addMessage(message, emit = true) { + if (!('content' in message)) + message.content = null; + this.messages.push(message); + if (emit) { + this._emit('message', message); + if (((0, chatCompletionUtils_1.isFunctionMessage)(message) || (0, chatCompletionUtils_1.isToolMessage)(message)) && message.content) { + // Note, this assumes that {role: 'tool', content: …} is always the result of a call of tool of type=function. + this._emit('functionCallResult', message.content); + } + else if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message.function_call) { + this._emit('functionCall', message.function_call); + } + else if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message.tool_calls) { + for (const tool_call of message.tool_calls) { + if (tool_call.type === 'function') { + this._emit('functionCall', tool_call.function); + } + } + } + } + } + /** + * @returns a promise that resolves with the final ChatCompletion, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletion. + */ + async finalChatCompletion() { + await this.done(); + const completion = this._chatCompletions[this._chatCompletions.length - 1]; + if (!completion) + throw new error_1.OpenAIError('stream ended without producing a ChatCompletion'); + return completion; + } + /** + * @returns a promise that resolves with the content of the final ChatCompletionMessage, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + async finalContent() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalContent).call(this); + } + /** + * @returns a promise that resolves with the the final assistant ChatCompletionMessage response, + * or rejects if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + async finalMessage() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this); + } + /** + * @returns a promise that resolves with the content of the final FunctionCall, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + async finalFunctionCall() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this); + } + async finalFunctionCallResult() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this); + } + async totalUsage() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_calculateTotalUsage).call(this); + } + allChatCompletions() { + return [...this._chatCompletions]; + } + _emitFinal() { + const completion = this._chatCompletions[this._chatCompletions.length - 1]; + if (completion) + this._emit('finalChatCompletion', completion); + const finalMessage = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this); + if (finalMessage) + this._emit('finalMessage', finalMessage); + const finalContent = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalContent).call(this); + if (finalContent) + this._emit('finalContent', finalContent); + const finalFunctionCall = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this); + if (finalFunctionCall) + this._emit('finalFunctionCall', finalFunctionCall); + const finalFunctionCallResult = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this); + if (finalFunctionCallResult != null) + this._emit('finalFunctionCallResult', finalFunctionCallResult); + if (this._chatCompletions.some((c) => c.usage)) { + this._emit('totalUsage', __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_calculateTotalUsage).call(this)); + } + } + async _createChatCompletion(client, params, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_validateParams).call(this, params); + const chatCompletion = await client.chat.completions.create({ ...params, stream: false }, { ...options, signal: this.controller.signal }); + this._connected(); + return this._addChatCompletion((0, parser_1.parseChatCompletion)(chatCompletion, params)); + } + async _runChatCompletion(client, params, options) { + for (const message of params.messages) { + this._addMessage(message, false); + } + return await this._createChatCompletion(client, params, options); + } + async _runFunctions(client, params, options) { + const role = 'function'; + const { function_call = 'auto', stream, ...restParams } = params; + const singleFunctionToCall = typeof function_call !== 'string' && function_call?.name; + const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {}; + const functionsByName = {}; + for (const f of params.functions) { + functionsByName[f.name || f.function.name] = f; + } + const functions = params.functions.map((f) => ({ + name: f.name || f.function.name, + parameters: f.parameters, + description: f.description, + })); + for (const message of params.messages) { + this._addMessage(message, false); + } + for (let i = 0; i < maxChatCompletions; ++i) { + const chatCompletion = await this._createChatCompletion(client, { + ...restParams, + function_call, + functions, + messages: [...this.messages], + }, options); + const message = chatCompletion.choices[0]?.message; + if (!message) { + throw new error_1.OpenAIError(`missing message in ChatCompletion response`); + } + if (!message.function_call) + return; + const { name, arguments: args } = message.function_call; + const fn = functionsByName[name]; + if (!fn) { + const content = `Invalid function_call: ${JSON.stringify(name)}. Available options are: ${functions + .map((f) => JSON.stringify(f.name)) + .join(', ')}. Please try again`; + this._addMessage({ role, name, content }); + continue; + } + else if (singleFunctionToCall && singleFunctionToCall !== name) { + const content = `Invalid function_call: ${JSON.stringify(name)}. ${JSON.stringify(singleFunctionToCall)} requested. Please try again`; + this._addMessage({ role, name, content }); + continue; + } + let parsed; + try { + parsed = (0, RunnableFunction_1.isRunnableFunctionWithParse)(fn) ? await fn.parse(args) : args; + } + catch (error) { + this._addMessage({ + role, + name, + content: error instanceof Error ? error.message : String(error), + }); + continue; + } + // @ts-expect-error it can't rule out `never` type. + const rawContent = await fn.function(parsed, this); + const content = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent); + this._addMessage({ role, name, content }); + if (singleFunctionToCall) + return; + } + } + async _runTools(client, params, options) { + const role = 'tool'; + const { tool_choice = 'auto', stream, ...restParams } = params; + const singleFunctionToCall = typeof tool_choice !== 'string' && tool_choice?.function?.name; + const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {}; + // TODO(someday): clean this logic up + const inputTools = params.tools.map((tool) => { + if ((0, parser_1.isAutoParsableTool)(tool)) { + if (!tool.$callback) { + throw new error_1.OpenAIError('Tool given to `.runTools()` that does not have an associated function'); + } + return { + type: 'function', + function: { + function: tool.$callback, + name: tool.function.name, + description: tool.function.description || '', + parameters: tool.function.parameters, + parse: tool.$parseRaw, + strict: true, + }, + }; + } + return tool; + }); + const functionsByName = {}; + for (const f of inputTools) { + if (f.type === 'function') { + functionsByName[f.function.name || f.function.function.name] = f.function; + } + } + const tools = 'tools' in params ? + inputTools.map((t) => t.type === 'function' ? + { + type: 'function', + function: { + name: t.function.name || t.function.function.name, + parameters: t.function.parameters, + description: t.function.description, + strict: t.function.strict, + }, + } + : t) + : undefined; + for (const message of params.messages) { + this._addMessage(message, false); + } + for (let i = 0; i < maxChatCompletions; ++i) { + const chatCompletion = await this._createChatCompletion(client, { + ...restParams, + tool_choice, + tools, + messages: [...this.messages], + }, options); + const message = chatCompletion.choices[0]?.message; + if (!message) { + throw new error_1.OpenAIError(`missing message in ChatCompletion response`); + } + if (!message.tool_calls?.length) { + return; + } + for (const tool_call of message.tool_calls) { + if (tool_call.type !== 'function') + continue; + const tool_call_id = tool_call.id; + const { name, arguments: args } = tool_call.function; + const fn = functionsByName[name]; + if (!fn) { + const content = `Invalid tool_call: ${JSON.stringify(name)}. Available options are: ${Object.keys(functionsByName) + .map((name) => JSON.stringify(name)) + .join(', ')}. Please try again`; + this._addMessage({ role, tool_call_id, content }); + continue; + } + else if (singleFunctionToCall && singleFunctionToCall !== name) { + const content = `Invalid tool_call: ${JSON.stringify(name)}. ${JSON.stringify(singleFunctionToCall)} requested. Please try again`; + this._addMessage({ role, tool_call_id, content }); + continue; + } + let parsed; + try { + parsed = (0, RunnableFunction_1.isRunnableFunctionWithParse)(fn) ? await fn.parse(args) : args; + } + catch (error) { + const content = error instanceof Error ? error.message : String(error); + this._addMessage({ role, tool_call_id, content }); + continue; + } + // @ts-expect-error it can't rule out `never` type. + const rawContent = await fn.function(parsed, this); + const content = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent); + this._addMessage({ role, tool_call_id, content }); + if (singleFunctionToCall) { + return; + } + } + } + return; + } +} +exports.AbstractChatCompletionRunner = AbstractChatCompletionRunner; +_AbstractChatCompletionRunner_instances = new WeakSet(), _AbstractChatCompletionRunner_getFinalContent = function _AbstractChatCompletionRunner_getFinalContent() { + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this).content ?? null; +}, _AbstractChatCompletionRunner_getFinalMessage = function _AbstractChatCompletionRunner_getFinalMessage() { + let i = this.messages.length; + while (i-- > 0) { + const message = this.messages[i]; + if ((0, chatCompletionUtils_1.isAssistantMessage)(message)) { + const { function_call, ...rest } = message; + // TODO: support audio here + const ret = { + ...rest, + content: message.content ?? null, + refusal: message.refusal ?? null, + }; + if (function_call) { + ret.function_call = function_call; + } + return ret; + } + } + throw new error_1.OpenAIError('stream ended without producing a ChatCompletionMessage with role=assistant'); +}, _AbstractChatCompletionRunner_getFinalFunctionCall = function _AbstractChatCompletionRunner_getFinalFunctionCall() { + for (let i = this.messages.length - 1; i >= 0; i--) { + const message = this.messages[i]; + if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message?.function_call) { + return message.function_call; + } + if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message?.tool_calls?.length) { + return message.tool_calls.at(-1)?.function; + } + } + return; +}, _AbstractChatCompletionRunner_getFinalFunctionCallResult = function _AbstractChatCompletionRunner_getFinalFunctionCallResult() { + for (let i = this.messages.length - 1; i >= 0; i--) { + const message = this.messages[i]; + if ((0, chatCompletionUtils_1.isFunctionMessage)(message) && message.content != null) { + return message.content; + } + if ((0, chatCompletionUtils_1.isToolMessage)(message) && + message.content != null && + typeof message.content === 'string' && + this.messages.some((x) => x.role === 'assistant' && + x.tool_calls?.some((y) => y.type === 'function' && y.id === message.tool_call_id))) { + return message.content; + } + } + return; +}, _AbstractChatCompletionRunner_calculateTotalUsage = function _AbstractChatCompletionRunner_calculateTotalUsage() { + const total = { + completion_tokens: 0, + prompt_tokens: 0, + total_tokens: 0, + }; + for (const { usage } of this._chatCompletions) { + if (usage) { + total.completion_tokens += usage.completion_tokens; + total.prompt_tokens += usage.prompt_tokens; + total.total_tokens += usage.total_tokens; + } + } + return total; +}, _AbstractChatCompletionRunner_validateParams = function _AbstractChatCompletionRunner_validateParams(params) { + if (params.n != null && params.n > 1) { + throw new error_1.OpenAIError('ChatCompletion convenience helpers only support n=1 at this time. To use n>1, please use chat.completions.create() directly.'); + } +}, _AbstractChatCompletionRunner_stringifyFunctionCallResult = function _AbstractChatCompletionRunner_stringifyFunctionCallResult(rawContent) { + return (typeof rawContent === 'string' ? rawContent + : rawContent === undefined ? 'undefined' + : JSON.stringify(rawContent)); +}; +//# sourceMappingURL=AbstractChatCompletionRunner.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/AbstractChatCompletionRunner.js.map b/node_modules/openai/lib/AbstractChatCompletionRunner.js.map new file mode 100644 index 0000000..1fe4e06 --- /dev/null +++ b/node_modules/openai/lib/AbstractChatCompletionRunner.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbstractChatCompletionRunner.js","sourceRoot":"","sources":["../src/lib/AbstractChatCompletionRunner.ts"],"names":[],"mappings":";;;;;;;;;AASA,uCAAuC;AACvC,4DAK4B;AAM5B,kEAA6F;AAC7F,kDAAwD;AAGxD,6CAAwE;AAExE,MAAM,4BAA4B,GAAG,EAAE,CAAC;AAMxC,MAAa,4BAGX,SAAQ,yBAAuB;IAHjC;;;QAIY,qBAAgB,GAAoC,EAAE,CAAC;QACjE,aAAQ,GAAiC,EAAE,CAAC;IAmc9C,CAAC;IAjcW,kBAAkB,CAE1B,cAA6C;QAE7C,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QAC3C,IAAI,CAAC,KAAK,CAAC,gBAAgB,EAAE,cAAc,CAAC,CAAC;QAC7C,MAAM,OAAO,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC;QACnD,IAAI,OAAO;YAAE,IAAI,CAAC,WAAW,CAAC,OAAqC,CAAC,CAAC;QACrE,OAAO,cAAc,CAAC;IACxB,CAAC;IAES,WAAW,CAEnB,OAAmC,EACnC,IAAI,GAAG,IAAI;QAEX,IAAI,CAAC,CAAC,SAAS,IAAI,OAAO,CAAC;YAAE,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC;QAEpD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAE5B,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YAC/B,IAAI,CAAC,IAAA,uCAAiB,EAAC,OAAO,CAAC,IAAI,IAAA,mCAAa,EAAC,OAAO,CAAC,CAAC,IAAI,OAAO,CAAC,OAAO,EAAE;gBAC7E,8GAA8G;gBAC9G,IAAI,CAAC,KAAK,CAAC,oBAAoB,EAAE,OAAO,CAAC,OAAiB,CAAC,CAAC;aAC7D;iBAAM,IAAI,IAAA,wCAAkB,EAAC,OAAO,CAAC,IAAI,OAAO,CAAC,aAAa,EAAE;gBAC/D,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,OAAO,CAAC,aAAa,CAAC,CAAC;aACnD;iBAAM,IAAI,IAAA,wCAAkB,EAAC,OAAO,CAAC,IAAI,OAAO,CAAC,UAAU,EAAE;gBAC5D,KAAK,MAAM,SAAS,IAAI,OAAO,CAAC,UAAU,EAAE;oBAC1C,IAAI,SAAS,CAAC,IAAI,KAAK,UAAU,EAAE;wBACjC,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,SAAS,CAAC,QAAQ,CAAC,CAAC;qBAChD;iBACF;aACF;SACF;IACH,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,mBAAmB;QACvB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,gBAAgB,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC3E,IAAI,CAAC,UAAU;YAAE,MAAM,IAAI,mBAAW,CAAC,iDAAiD,CAAC,CAAC;QAC1F,OAAO,UAAU,CAAC;IACpB,CAAC;IAMD;;;OAGG;IACH,KAAK,CAAC,YAAY;QAChB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC;IACjC,CAAC;IAwBD;;;OAGG;IACH,KAAK,CAAC,YAAY;QAChB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC;IACjC,CAAC;IAgBD;;;OAGG;IACH,KAAK,CAAC,iBAAiB;QACrB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,mGAAsB,MAA1B,IAAI,CAAwB,CAAC;IACtC,CAAC;IAyBD,KAAK,CAAC,uBAAuB;QAC3B,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,yGAA4B,MAAhC,IAAI,CAA8B,CAAC;IAC5C,CAAC;IAkBD,KAAK,CAAC,UAAU;QACd,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,kGAAqB,MAAzB,IAAI,CAAuB,CAAC;IACrC,CAAC;IAED,kBAAkB;QAChB,OAAO,CAAC,GAAG,IAAI,CAAC,gBAAgB,CAAC,CAAC;IACpC,CAAC;IAEkB,UAAU;QAG3B,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,gBAAgB,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC3E,IAAI,UAAU;YAAE,IAAI,CAAC,KAAK,CAAC,qBAAqB,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,YAAY,GAAG,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC;QAC7C,IAAI,YAAY;YAAE,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,YAAY,CAAC,CAAC;QAC3D,MAAM,YAAY,GAAG,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC;QAC7C,IAAI,YAAY;YAAE,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,YAAY,CAAC,CAAC;QAE3D,MAAM,iBAAiB,GAAG,uBAAA,IAAI,mGAAsB,MAA1B,IAAI,CAAwB,CAAC;QACvD,IAAI,iBAAiB;YAAE,IAAI,CAAC,KAAK,CAAC,mBAAmB,EAAE,iBAAiB,CAAC,CAAC;QAE1E,MAAM,uBAAuB,GAAG,uBAAA,IAAI,yGAA4B,MAAhC,IAAI,CAA8B,CAAC;QACnE,IAAI,uBAAuB,IAAI,IAAI;YAAE,IAAI,CAAC,KAAK,CAAC,yBAAyB,EAAE,uBAAuB,CAAC,CAAC;QAEpG,IAAI,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,EAAE,uBAAA,IAAI,kGAAqB,MAAzB,IAAI,CAAuB,CAAC,CAAC;SACvD;IACH,CAAC;IAUS,KAAK,CAAC,qBAAqB,CACnC,MAAc,EACd,MAAkC,EAClC,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QACD,uBAAA,IAAI,6FAAgB,MAApB,IAAI,EAAiB,MAAM,CAAC,CAAC;QAE7B,MAAM,cAAc,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CACzD,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,EAC5B,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,CAC/C,CAAC;QACF,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,OAAO,IAAI,CAAC,kBAAkB,CAAC,IAAA,4BAAmB,EAAC,cAAc,EAAE,MAAM,CAAC,CAAC,CAAC;IAC9E,CAAC;IAES,KAAK,CAAC,kBAAkB,CAChC,MAAc,EACd,MAAkC,EAClC,OAA6B;QAE7B,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE;YACrC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SAClC;QACD,OAAO,MAAM,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;IACnE,CAAC;IAES,KAAK,CAAC,aAAa,CAC3B,MAAc,EACd,MAE8D,EAC9D,OAAuB;QAEvB,MAAM,IAAI,GAAG,UAAmB,CAAC;QACjC,MAAM,EAAE,aAAa,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM,CAAC;QACjE,MAAM,oBAAoB,GAAG,OAAO,aAAa,KAAK,QAAQ,IAAI,aAAa,EAAE,IAAI,CAAC;QACtF,MAAM,EAAE,kBAAkB,GAAG,4BAA4B,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;QAE5E,MAAM,eAAe,GAA0C,EAAE,CAAC;QAClE,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,SAAS,EAAE;YAChC,eAAe,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SAChD;QAED,MAAM,SAAS,GAA0C,MAAM,CAAC,SAAS,CAAC,GAAG,CAC3E,CAAC,CAAC,EAAuC,EAAE,CAAC,CAAC;YAC3C,IAAI,EAAE,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI;YAC/B,UAAU,EAAE,CAAC,CAAC,UAAqC;YACnD,WAAW,EAAE,CAAC,CAAC,WAAW;SAC3B,CAAC,CACH,CAAC;QAEF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE;YACrC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SAClC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,EAAE,EAAE,CAAC,EAAE;YAC3C,MAAM,cAAc,GAAmB,MAAM,IAAI,CAAC,qBAAqB,CACrE,MAAM,EACN;gBACE,GAAG,UAAU;gBACb,aAAa;gBACb,SAAS;gBACT,QAAQ,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC;aAC7B,EACD,OAAO,CACR,CAAC;YACF,MAAM,OAAO,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC;YACnD,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,mBAAW,CAAC,4CAA4C,CAAC,CAAC;aACrE;YACD,IAAI,CAAC,OAAO,CAAC,aAAa;gBAAE,OAAO;YACnC,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,aAAa,CAAC;YACxD,MAAM,EAAE,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;YACjC,IAAI,CAAC,EAAE,EAAE;gBACP,MAAM,OAAO,GAAG,0BAA0B,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,4BAA4B,SAAS;qBAChG,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;qBAClC,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC;gBAElC,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;gBAC1C,SAAS;aACV;iBAAM,IAAI,oBAAoB,IAAI,oBAAoB,KAAK,IAAI,EAAE;gBAChE,MAAM,OAAO,GAAG,0BAA0B,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,SAAS,CAC/E,oBAAoB,CACrB,8BAA8B,CAAC;gBAEhC,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;gBAC1C,SAAS;aACV;YAED,IAAI,MAAM,CAAC;YACX,IAAI;gBACF,MAAM,GAAG,IAAA,8CAA2B,EAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;aACxE;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,WAAW,CAAC;oBACf,IAAI;oBACJ,IAAI;oBACJ,OAAO,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;iBAChE,CAAC,CAAC;gBACH,SAAS;aACV;YAED,mDAAmD;YACnD,MAAM,UAAU,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;YACnD,MAAM,OAAO,GAAG,uBAAA,IAAI,0GAA6B,MAAjC,IAAI,EAA8B,UAAU,CAAC,CAAC;YAE9D,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;YAE1C,IAAI,oBAAoB;gBAAE,OAAO;SAClC;IACH,CAAC;IAES,KAAK,CAAC,SAAS,CACvB,MAAc,EACd,MAE0D,EAC1D,OAAuB;QAEvB,MAAM,IAAI,GAAG,MAAe,CAAC;QAC7B,MAAM,EAAE,WAAW,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM,CAAC;QAC/D,MAAM,oBAAoB,GAAG,OAAO,WAAW,KAAK,QAAQ,IAAI,WAAW,EAAE,QAAQ,EAAE,IAAI,CAAC;QAC5F,MAAM,EAAE,kBAAkB,GAAG,4BAA4B,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;QAE5E,qCAAqC;QACrC,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAA6B,EAAE;YACtE,IAAI,IAAA,2BAAkB,EAAC,IAAI,CAAC,EAAE;gBAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;oBACnB,MAAM,IAAI,mBAAW,CAAC,uEAAuE,CAAC,CAAC;iBAChG;gBAED,OAAO;oBACL,IAAI,EAAE,UAAU;oBAChB,QAAQ,EAAE;wBACR,QAAQ,EAAE,IAAI,CAAC,SAAS;wBACxB,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI;wBACxB,WAAW,EAAE,IAAI,CAAC,QAAQ,CAAC,WAAW,IAAI,EAAE;wBAC5C,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAiB;wBAC3C,KAAK,EAAE,IAAI,CAAC,SAAS;wBACrB,MAAM,EAAE,IAAI;qBACb;iBACF,CAAC;aACH;YAED,OAAO,IAAwC,CAAC;QAClD,CAAC,CAAC,CAAC;QAEH,MAAM,eAAe,GAA0C,EAAE,CAAC;QAClE,KAAK,MAAM,CAAC,IAAI,UAAU,EAAE;YAC1B,IAAI,CAAC,CAAC,IAAI,KAAK,UAAU,EAAE;gBACzB,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC;aAC3E;SACF;QAED,MAAM,KAAK,GACT,OAAO,IAAI,MAAM,CAAC,CAAC;YACjB,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,CAAC;gBACrB;oBACE,IAAI,EAAE,UAAU;oBAChB,QAAQ,EAAE;wBACR,IAAI,EAAE,CAAC,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI;wBACjD,UAAU,EAAE,CAAC,CAAC,QAAQ,CAAC,UAAqC;wBAC5D,WAAW,EAAE,CAAC,CAAC,QAAQ,CAAC,WAAW;wBACnC,MAAM,EAAE,CAAC,CAAC,QAAQ,CAAC,MAAM;qBAC1B;iBACF;gBACH,CAAC,CAAE,CAAmC,CACvC;YACH,CAAC,CAAE,SAAiB,CAAC;QAEvB,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE;YACrC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SAClC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,EAAE,EAAE,CAAC,EAAE;YAC3C,MAAM,cAAc,GAAmB,MAAM,IAAI,CAAC,qBAAqB,CACrE,MAAM,EACN;gBACE,GAAG,UAAU;gBACb,WAAW;gBACX,KAAK;gBACL,QAAQ,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC;aAC7B,EACD,OAAO,CACR,CAAC;YACF,MAAM,OAAO,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC;YACnD,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,mBAAW,CAAC,4CAA4C,CAAC,CAAC;aACrE;YACD,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,MAAM,EAAE;gBAC/B,OAAO;aACR;YAED,KAAK,MAAM,SAAS,IAAI,OAAO,CAAC,UAAU,EAAE;gBAC1C,IAAI,SAAS,CAAC,IAAI,KAAK,UAAU;oBAAE,SAAS;gBAC5C,MAAM,YAAY,GAAG,SAAS,CAAC,EAAE,CAAC;gBAClC,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,SAAS,CAAC,QAAQ,CAAC;gBACrD,MAAM,EAAE,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;gBAEjC,IAAI,CAAC,EAAE,EAAE;oBACP,MAAM,OAAO,GAAG,sBAAsB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,4BAA4B,MAAM,CAAC,IAAI,CAC/F,eAAe,CAChB;yBACE,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC;oBAElC,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClD,SAAS;iBACV;qBAAM,IAAI,oBAAoB,IAAI,oBAAoB,KAAK,IAAI,EAAE;oBAChE,MAAM,OAAO,GAAG,sBAAsB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,SAAS,CAC3E,oBAAoB,CACrB,8BAA8B,CAAC;oBAEhC,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClD,SAAS;iBACV;gBAED,IAAI,MAAM,CAAC;gBACX,IAAI;oBACF,MAAM,GAAG,IAAA,8CAA2B,EAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;iBACxE;gBAAC,OAAO,KAAK,EAAE;oBACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBACvE,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClD,SAAS;iBACV;gBAED,mDAAmD;gBACnD,MAAM,UAAU,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;gBACnD,MAAM,OAAO,GAAG,uBAAA,IAAI,0GAA6B,MAAjC,IAAI,EAA8B,UAAU,CAAC,CAAC;gBAC9D,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,CAAC,CAAC;gBAElD,IAAI,oBAAoB,EAAE;oBACxB,OAAO;iBACR;aACF;SACF;QAED,OAAO;IACT,CAAC;CASF;AAxcD,oEAwcC;;IAhZG,OAAO,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC,OAAO,IAAI,IAAI,CAAC;AACjD,CAAC;IAYC,IAAI,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC;IAC7B,OAAO,CAAC,EAAE,GAAG,CAAC,EAAE;QACd,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QACjC,IAAI,IAAA,wCAAkB,EAAC,OAAO,CAAC,EAAE;YAC/B,MAAM,EAAE,aAAa,EAAE,GAAG,IAAI,EAAE,GAAG,OAAO,CAAC;YAE3C,2BAA2B;YAC3B,MAAM,GAAG,GAAyC;gBAChD,GAAG,IAAI;gBACP,OAAO,EAAG,OAAiC,CAAC,OAAO,IAAI,IAAI;gBAC3D,OAAO,EAAG,OAAiC,CAAC,OAAO,IAAI,IAAI;aAC5D,CAAC;YACF,IAAI,aAAa,EAAE;gBACjB,GAAG,CAAC,aAAa,GAAG,aAAa,CAAC;aACnC;YACD,OAAO,GAAG,CAAC;SACZ;KACF;IACD,MAAM,IAAI,mBAAW,CAAC,4EAA4E,CAAC,CAAC;AACtG,CAAC;IAYC,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QACjC,IAAI,IAAA,wCAAkB,EAAC,OAAO,CAAC,IAAI,OAAO,EAAE,aAAa,EAAE;YACzD,OAAO,OAAO,CAAC,aAAa,CAAC;SAC9B;QACD,IAAI,IAAA,wCAAkB,EAAC,OAAO,CAAC,IAAI,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE;YAC9D,OAAO,OAAO,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC;SAC5C;KACF;IAED,OAAO;AACT,CAAC;IAYC,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QACjC,IAAI,IAAA,uCAAiB,EAAC,OAAO,CAAC,IAAI,OAAO,CAAC,OAAO,IAAI,IAAI,EAAE;YACzD,OAAO,OAAO,CAAC,OAAO,CAAC;SACxB;QACD,IACE,IAAA,mCAAa,EAAC,OAAO,CAAC;YACtB,OAAO,CAAC,OAAO,IAAI,IAAI;YACvB,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ;YACnC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,EAAE,CACJ,CAAC,CAAC,IAAI,KAAK,WAAW;gBACtB,CAAC,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,CAAC,EAAE,KAAK,OAAO,CAAC,YAAY,CAAC,CACpF,EACD;YACA,OAAO,OAAO,CAAC,OAAO,CAAC;SACxB;KACF;IAED,OAAO;AACT,CAAC;IAQC,MAAM,KAAK,GAAoB;QAC7B,iBAAiB,EAAE,CAAC;QACpB,aAAa,EAAE,CAAC;QAChB,YAAY,EAAE,CAAC;KAChB,CAAC;IACF,KAAK,MAAM,EAAE,KAAK,EAAE,IAAI,IAAI,CAAC,gBAAgB,EAAE;QAC7C,IAAI,KAAK,EAAE;YACT,KAAK,CAAC,iBAAiB,IAAI,KAAK,CAAC,iBAAiB,CAAC;YACnD,KAAK,CAAC,aAAa,IAAI,KAAK,CAAC,aAAa,CAAC;YAC3C,KAAK,CAAC,YAAY,IAAI,KAAK,CAAC,YAAY,CAAC;SAC1C;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC,uGAgCe,MAAkC;IAChD,IAAI,MAAM,CAAC,CAAC,IAAI,IAAI,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,EAAE;QACpC,MAAM,IAAI,mBAAW,CACnB,8HAA8H,CAC/H,CAAC;KACH;AACH,CAAC,iIAuP4B,UAAmB;IAC9C,OAAO,CACL,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU;QAC3C,CAAC,CAAC,UAAU,KAAK,SAAS,CAAC,CAAC,CAAC,WAAW;YACxC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAC7B,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/AbstractChatCompletionRunner.mjs b/node_modules/openai/lib/AbstractChatCompletionRunner.mjs new file mode 100644 index 0000000..d614b07 --- /dev/null +++ b/node_modules/openai/lib/AbstractChatCompletionRunner.mjs @@ -0,0 +1,368 @@ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _AbstractChatCompletionRunner_instances, _AbstractChatCompletionRunner_getFinalContent, _AbstractChatCompletionRunner_getFinalMessage, _AbstractChatCompletionRunner_getFinalFunctionCall, _AbstractChatCompletionRunner_getFinalFunctionCallResult, _AbstractChatCompletionRunner_calculateTotalUsage, _AbstractChatCompletionRunner_validateParams, _AbstractChatCompletionRunner_stringifyFunctionCallResult; +import { OpenAIError } from "../error.mjs"; +import { isRunnableFunctionWithParse, } from "./RunnableFunction.mjs"; +import { isAssistantMessage, isFunctionMessage, isToolMessage } from "./chatCompletionUtils.mjs"; +import { EventStream } from "./EventStream.mjs"; +import { isAutoParsableTool, parseChatCompletion } from "../lib/parser.mjs"; +const DEFAULT_MAX_CHAT_COMPLETIONS = 10; +export class AbstractChatCompletionRunner extends EventStream { + constructor() { + super(...arguments); + _AbstractChatCompletionRunner_instances.add(this); + this._chatCompletions = []; + this.messages = []; + } + _addChatCompletion(chatCompletion) { + this._chatCompletions.push(chatCompletion); + this._emit('chatCompletion', chatCompletion); + const message = chatCompletion.choices[0]?.message; + if (message) + this._addMessage(message); + return chatCompletion; + } + _addMessage(message, emit = true) { + if (!('content' in message)) + message.content = null; + this.messages.push(message); + if (emit) { + this._emit('message', message); + if ((isFunctionMessage(message) || isToolMessage(message)) && message.content) { + // Note, this assumes that {role: 'tool', content: …} is always the result of a call of tool of type=function. + this._emit('functionCallResult', message.content); + } + else if (isAssistantMessage(message) && message.function_call) { + this._emit('functionCall', message.function_call); + } + else if (isAssistantMessage(message) && message.tool_calls) { + for (const tool_call of message.tool_calls) { + if (tool_call.type === 'function') { + this._emit('functionCall', tool_call.function); + } + } + } + } + } + /** + * @returns a promise that resolves with the final ChatCompletion, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletion. + */ + async finalChatCompletion() { + await this.done(); + const completion = this._chatCompletions[this._chatCompletions.length - 1]; + if (!completion) + throw new OpenAIError('stream ended without producing a ChatCompletion'); + return completion; + } + /** + * @returns a promise that resolves with the content of the final ChatCompletionMessage, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + async finalContent() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalContent).call(this); + } + /** + * @returns a promise that resolves with the the final assistant ChatCompletionMessage response, + * or rejects if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + async finalMessage() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this); + } + /** + * @returns a promise that resolves with the content of the final FunctionCall, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + async finalFunctionCall() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this); + } + async finalFunctionCallResult() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this); + } + async totalUsage() { + await this.done(); + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_calculateTotalUsage).call(this); + } + allChatCompletions() { + return [...this._chatCompletions]; + } + _emitFinal() { + const completion = this._chatCompletions[this._chatCompletions.length - 1]; + if (completion) + this._emit('finalChatCompletion', completion); + const finalMessage = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this); + if (finalMessage) + this._emit('finalMessage', finalMessage); + const finalContent = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalContent).call(this); + if (finalContent) + this._emit('finalContent', finalContent); + const finalFunctionCall = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this); + if (finalFunctionCall) + this._emit('finalFunctionCall', finalFunctionCall); + const finalFunctionCallResult = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this); + if (finalFunctionCallResult != null) + this._emit('finalFunctionCallResult', finalFunctionCallResult); + if (this._chatCompletions.some((c) => c.usage)) { + this._emit('totalUsage', __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_calculateTotalUsage).call(this)); + } + } + async _createChatCompletion(client, params, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_validateParams).call(this, params); + const chatCompletion = await client.chat.completions.create({ ...params, stream: false }, { ...options, signal: this.controller.signal }); + this._connected(); + return this._addChatCompletion(parseChatCompletion(chatCompletion, params)); + } + async _runChatCompletion(client, params, options) { + for (const message of params.messages) { + this._addMessage(message, false); + } + return await this._createChatCompletion(client, params, options); + } + async _runFunctions(client, params, options) { + const role = 'function'; + const { function_call = 'auto', stream, ...restParams } = params; + const singleFunctionToCall = typeof function_call !== 'string' && function_call?.name; + const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {}; + const functionsByName = {}; + for (const f of params.functions) { + functionsByName[f.name || f.function.name] = f; + } + const functions = params.functions.map((f) => ({ + name: f.name || f.function.name, + parameters: f.parameters, + description: f.description, + })); + for (const message of params.messages) { + this._addMessage(message, false); + } + for (let i = 0; i < maxChatCompletions; ++i) { + const chatCompletion = await this._createChatCompletion(client, { + ...restParams, + function_call, + functions, + messages: [...this.messages], + }, options); + const message = chatCompletion.choices[0]?.message; + if (!message) { + throw new OpenAIError(`missing message in ChatCompletion response`); + } + if (!message.function_call) + return; + const { name, arguments: args } = message.function_call; + const fn = functionsByName[name]; + if (!fn) { + const content = `Invalid function_call: ${JSON.stringify(name)}. Available options are: ${functions + .map((f) => JSON.stringify(f.name)) + .join(', ')}. Please try again`; + this._addMessage({ role, name, content }); + continue; + } + else if (singleFunctionToCall && singleFunctionToCall !== name) { + const content = `Invalid function_call: ${JSON.stringify(name)}. ${JSON.stringify(singleFunctionToCall)} requested. Please try again`; + this._addMessage({ role, name, content }); + continue; + } + let parsed; + try { + parsed = isRunnableFunctionWithParse(fn) ? await fn.parse(args) : args; + } + catch (error) { + this._addMessage({ + role, + name, + content: error instanceof Error ? error.message : String(error), + }); + continue; + } + // @ts-expect-error it can't rule out `never` type. + const rawContent = await fn.function(parsed, this); + const content = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent); + this._addMessage({ role, name, content }); + if (singleFunctionToCall) + return; + } + } + async _runTools(client, params, options) { + const role = 'tool'; + const { tool_choice = 'auto', stream, ...restParams } = params; + const singleFunctionToCall = typeof tool_choice !== 'string' && tool_choice?.function?.name; + const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {}; + // TODO(someday): clean this logic up + const inputTools = params.tools.map((tool) => { + if (isAutoParsableTool(tool)) { + if (!tool.$callback) { + throw new OpenAIError('Tool given to `.runTools()` that does not have an associated function'); + } + return { + type: 'function', + function: { + function: tool.$callback, + name: tool.function.name, + description: tool.function.description || '', + parameters: tool.function.parameters, + parse: tool.$parseRaw, + strict: true, + }, + }; + } + return tool; + }); + const functionsByName = {}; + for (const f of inputTools) { + if (f.type === 'function') { + functionsByName[f.function.name || f.function.function.name] = f.function; + } + } + const tools = 'tools' in params ? + inputTools.map((t) => t.type === 'function' ? + { + type: 'function', + function: { + name: t.function.name || t.function.function.name, + parameters: t.function.parameters, + description: t.function.description, + strict: t.function.strict, + }, + } + : t) + : undefined; + for (const message of params.messages) { + this._addMessage(message, false); + } + for (let i = 0; i < maxChatCompletions; ++i) { + const chatCompletion = await this._createChatCompletion(client, { + ...restParams, + tool_choice, + tools, + messages: [...this.messages], + }, options); + const message = chatCompletion.choices[0]?.message; + if (!message) { + throw new OpenAIError(`missing message in ChatCompletion response`); + } + if (!message.tool_calls?.length) { + return; + } + for (const tool_call of message.tool_calls) { + if (tool_call.type !== 'function') + continue; + const tool_call_id = tool_call.id; + const { name, arguments: args } = tool_call.function; + const fn = functionsByName[name]; + if (!fn) { + const content = `Invalid tool_call: ${JSON.stringify(name)}. Available options are: ${Object.keys(functionsByName) + .map((name) => JSON.stringify(name)) + .join(', ')}. Please try again`; + this._addMessage({ role, tool_call_id, content }); + continue; + } + else if (singleFunctionToCall && singleFunctionToCall !== name) { + const content = `Invalid tool_call: ${JSON.stringify(name)}. ${JSON.stringify(singleFunctionToCall)} requested. Please try again`; + this._addMessage({ role, tool_call_id, content }); + continue; + } + let parsed; + try { + parsed = isRunnableFunctionWithParse(fn) ? await fn.parse(args) : args; + } + catch (error) { + const content = error instanceof Error ? error.message : String(error); + this._addMessage({ role, tool_call_id, content }); + continue; + } + // @ts-expect-error it can't rule out `never` type. + const rawContent = await fn.function(parsed, this); + const content = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent); + this._addMessage({ role, tool_call_id, content }); + if (singleFunctionToCall) { + return; + } + } + } + return; + } +} +_AbstractChatCompletionRunner_instances = new WeakSet(), _AbstractChatCompletionRunner_getFinalContent = function _AbstractChatCompletionRunner_getFinalContent() { + return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this).content ?? null; +}, _AbstractChatCompletionRunner_getFinalMessage = function _AbstractChatCompletionRunner_getFinalMessage() { + let i = this.messages.length; + while (i-- > 0) { + const message = this.messages[i]; + if (isAssistantMessage(message)) { + const { function_call, ...rest } = message; + // TODO: support audio here + const ret = { + ...rest, + content: message.content ?? null, + refusal: message.refusal ?? null, + }; + if (function_call) { + ret.function_call = function_call; + } + return ret; + } + } + throw new OpenAIError('stream ended without producing a ChatCompletionMessage with role=assistant'); +}, _AbstractChatCompletionRunner_getFinalFunctionCall = function _AbstractChatCompletionRunner_getFinalFunctionCall() { + for (let i = this.messages.length - 1; i >= 0; i--) { + const message = this.messages[i]; + if (isAssistantMessage(message) && message?.function_call) { + return message.function_call; + } + if (isAssistantMessage(message) && message?.tool_calls?.length) { + return message.tool_calls.at(-1)?.function; + } + } + return; +}, _AbstractChatCompletionRunner_getFinalFunctionCallResult = function _AbstractChatCompletionRunner_getFinalFunctionCallResult() { + for (let i = this.messages.length - 1; i >= 0; i--) { + const message = this.messages[i]; + if (isFunctionMessage(message) && message.content != null) { + return message.content; + } + if (isToolMessage(message) && + message.content != null && + typeof message.content === 'string' && + this.messages.some((x) => x.role === 'assistant' && + x.tool_calls?.some((y) => y.type === 'function' && y.id === message.tool_call_id))) { + return message.content; + } + } + return; +}, _AbstractChatCompletionRunner_calculateTotalUsage = function _AbstractChatCompletionRunner_calculateTotalUsage() { + const total = { + completion_tokens: 0, + prompt_tokens: 0, + total_tokens: 0, + }; + for (const { usage } of this._chatCompletions) { + if (usage) { + total.completion_tokens += usage.completion_tokens; + total.prompt_tokens += usage.prompt_tokens; + total.total_tokens += usage.total_tokens; + } + } + return total; +}, _AbstractChatCompletionRunner_validateParams = function _AbstractChatCompletionRunner_validateParams(params) { + if (params.n != null && params.n > 1) { + throw new OpenAIError('ChatCompletion convenience helpers only support n=1 at this time. To use n>1, please use chat.completions.create() directly.'); + } +}, _AbstractChatCompletionRunner_stringifyFunctionCallResult = function _AbstractChatCompletionRunner_stringifyFunctionCallResult(rawContent) { + return (typeof rawContent === 'string' ? rawContent + : rawContent === undefined ? 'undefined' + : JSON.stringify(rawContent)); +}; +//# sourceMappingURL=AbstractChatCompletionRunner.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/AbstractChatCompletionRunner.mjs.map b/node_modules/openai/lib/AbstractChatCompletionRunner.mjs.map new file mode 100644 index 0000000..68ca212 --- /dev/null +++ b/node_modules/openai/lib/AbstractChatCompletionRunner.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"AbstractChatCompletionRunner.mjs","sourceRoot":"","sources":["../src/lib/AbstractChatCompletionRunner.ts"],"names":[],"mappings":";;;;;;OASO,EAAE,WAAW,EAAE;OACf,EAEL,2BAA2B,GAG5B;OAMM,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,aAAa,EAAE;OACxD,EAAc,WAAW,EAAE;OAG3B,EAAE,kBAAkB,EAAE,mBAAmB,EAAE;AAElD,MAAM,4BAA4B,GAAG,EAAE,CAAC;AAMxC,MAAM,OAAO,4BAGX,SAAQ,WAAuB;IAHjC;;;QAIY,qBAAgB,GAAoC,EAAE,CAAC;QACjE,aAAQ,GAAiC,EAAE,CAAC;IAmc9C,CAAC;IAjcW,kBAAkB,CAE1B,cAA6C;QAE7C,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QAC3C,IAAI,CAAC,KAAK,CAAC,gBAAgB,EAAE,cAAc,CAAC,CAAC;QAC7C,MAAM,OAAO,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC;QACnD,IAAI,OAAO;YAAE,IAAI,CAAC,WAAW,CAAC,OAAqC,CAAC,CAAC;QACrE,OAAO,cAAc,CAAC;IACxB,CAAC;IAES,WAAW,CAEnB,OAAmC,EACnC,IAAI,GAAG,IAAI;QAEX,IAAI,CAAC,CAAC,SAAS,IAAI,OAAO,CAAC;YAAE,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC;QAEpD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAE5B,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YAC/B,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,IAAI,aAAa,CAAC,OAAO,CAAC,CAAC,IAAI,OAAO,CAAC,OAAO,EAAE;gBAC7E,8GAA8G;gBAC9G,IAAI,CAAC,KAAK,CAAC,oBAAoB,EAAE,OAAO,CAAC,OAAiB,CAAC,CAAC;aAC7D;iBAAM,IAAI,kBAAkB,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,aAAa,EAAE;gBAC/D,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,OAAO,CAAC,aAAa,CAAC,CAAC;aACnD;iBAAM,IAAI,kBAAkB,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,UAAU,EAAE;gBAC5D,KAAK,MAAM,SAAS,IAAI,OAAO,CAAC,UAAU,EAAE;oBAC1C,IAAI,SAAS,CAAC,IAAI,KAAK,UAAU,EAAE;wBACjC,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,SAAS,CAAC,QAAQ,CAAC,CAAC;qBAChD;iBACF;aACF;SACF;IACH,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,mBAAmB;QACvB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,gBAAgB,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC3E,IAAI,CAAC,UAAU;YAAE,MAAM,IAAI,WAAW,CAAC,iDAAiD,CAAC,CAAC;QAC1F,OAAO,UAAU,CAAC;IACpB,CAAC;IAMD;;;OAGG;IACH,KAAK,CAAC,YAAY;QAChB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC;IACjC,CAAC;IAwBD;;;OAGG;IACH,KAAK,CAAC,YAAY;QAChB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC;IACjC,CAAC;IAgBD;;;OAGG;IACH,KAAK,CAAC,iBAAiB;QACrB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,mGAAsB,MAA1B,IAAI,CAAwB,CAAC;IACtC,CAAC;IAyBD,KAAK,CAAC,uBAAuB;QAC3B,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,yGAA4B,MAAhC,IAAI,CAA8B,CAAC;IAC5C,CAAC;IAkBD,KAAK,CAAC,UAAU;QACd,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,OAAO,uBAAA,IAAI,kGAAqB,MAAzB,IAAI,CAAuB,CAAC;IACrC,CAAC;IAED,kBAAkB;QAChB,OAAO,CAAC,GAAG,IAAI,CAAC,gBAAgB,CAAC,CAAC;IACpC,CAAC;IAEkB,UAAU;QAG3B,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,gBAAgB,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC3E,IAAI,UAAU;YAAE,IAAI,CAAC,KAAK,CAAC,qBAAqB,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,YAAY,GAAG,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC;QAC7C,IAAI,YAAY;YAAE,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,YAAY,CAAC,CAAC;QAC3D,MAAM,YAAY,GAAG,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC;QAC7C,IAAI,YAAY;YAAE,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,YAAY,CAAC,CAAC;QAE3D,MAAM,iBAAiB,GAAG,uBAAA,IAAI,mGAAsB,MAA1B,IAAI,CAAwB,CAAC;QACvD,IAAI,iBAAiB;YAAE,IAAI,CAAC,KAAK,CAAC,mBAAmB,EAAE,iBAAiB,CAAC,CAAC;QAE1E,MAAM,uBAAuB,GAAG,uBAAA,IAAI,yGAA4B,MAAhC,IAAI,CAA8B,CAAC;QACnE,IAAI,uBAAuB,IAAI,IAAI;YAAE,IAAI,CAAC,KAAK,CAAC,yBAAyB,EAAE,uBAAuB,CAAC,CAAC;QAEpG,IAAI,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,EAAE,uBAAA,IAAI,kGAAqB,MAAzB,IAAI,CAAuB,CAAC,CAAC;SACvD;IACH,CAAC;IAUS,KAAK,CAAC,qBAAqB,CACnC,MAAc,EACd,MAAkC,EAClC,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QACD,uBAAA,IAAI,6FAAgB,MAApB,IAAI,EAAiB,MAAM,CAAC,CAAC;QAE7B,MAAM,cAAc,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CACzD,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,EAC5B,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,CAC/C,CAAC;QACF,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,OAAO,IAAI,CAAC,kBAAkB,CAAC,mBAAmB,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC,CAAC;IAC9E,CAAC;IAES,KAAK,CAAC,kBAAkB,CAChC,MAAc,EACd,MAAkC,EAClC,OAA6B;QAE7B,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE;YACrC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SAClC;QACD,OAAO,MAAM,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;IACnE,CAAC;IAES,KAAK,CAAC,aAAa,CAC3B,MAAc,EACd,MAE8D,EAC9D,OAAuB;QAEvB,MAAM,IAAI,GAAG,UAAmB,CAAC;QACjC,MAAM,EAAE,aAAa,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM,CAAC;QACjE,MAAM,oBAAoB,GAAG,OAAO,aAAa,KAAK,QAAQ,IAAI,aAAa,EAAE,IAAI,CAAC;QACtF,MAAM,EAAE,kBAAkB,GAAG,4BAA4B,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;QAE5E,MAAM,eAAe,GAA0C,EAAE,CAAC;QAClE,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,SAAS,EAAE;YAChC,eAAe,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SAChD;QAED,MAAM,SAAS,GAA0C,MAAM,CAAC,SAAS,CAAC,GAAG,CAC3E,CAAC,CAAC,EAAuC,EAAE,CAAC,CAAC;YAC3C,IAAI,EAAE,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI;YAC/B,UAAU,EAAE,CAAC,CAAC,UAAqC;YACnD,WAAW,EAAE,CAAC,CAAC,WAAW;SAC3B,CAAC,CACH,CAAC;QAEF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE;YACrC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SAClC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,EAAE,EAAE,CAAC,EAAE;YAC3C,MAAM,cAAc,GAAmB,MAAM,IAAI,CAAC,qBAAqB,CACrE,MAAM,EACN;gBACE,GAAG,UAAU;gBACb,aAAa;gBACb,SAAS;gBACT,QAAQ,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC;aAC7B,EACD,OAAO,CACR,CAAC;YACF,MAAM,OAAO,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC;YACnD,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,WAAW,CAAC,4CAA4C,CAAC,CAAC;aACrE;YACD,IAAI,CAAC,OAAO,CAAC,aAAa;gBAAE,OAAO;YACnC,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,aAAa,CAAC;YACxD,MAAM,EAAE,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;YACjC,IAAI,CAAC,EAAE,EAAE;gBACP,MAAM,OAAO,GAAG,0BAA0B,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,4BAA4B,SAAS;qBAChG,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;qBAClC,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC;gBAElC,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;gBAC1C,SAAS;aACV;iBAAM,IAAI,oBAAoB,IAAI,oBAAoB,KAAK,IAAI,EAAE;gBAChE,MAAM,OAAO,GAAG,0BAA0B,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,SAAS,CAC/E,oBAAoB,CACrB,8BAA8B,CAAC;gBAEhC,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;gBAC1C,SAAS;aACV;YAED,IAAI,MAAM,CAAC;YACX,IAAI;gBACF,MAAM,GAAG,2BAA2B,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;aACxE;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,WAAW,CAAC;oBACf,IAAI;oBACJ,IAAI;oBACJ,OAAO,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;iBAChE,CAAC,CAAC;gBACH,SAAS;aACV;YAED,mDAAmD;YACnD,MAAM,UAAU,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;YACnD,MAAM,OAAO,GAAG,uBAAA,IAAI,0GAA6B,MAAjC,IAAI,EAA8B,UAAU,CAAC,CAAC;YAE9D,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;YAE1C,IAAI,oBAAoB;gBAAE,OAAO;SAClC;IACH,CAAC;IAES,KAAK,CAAC,SAAS,CACvB,MAAc,EACd,MAE0D,EAC1D,OAAuB;QAEvB,MAAM,IAAI,GAAG,MAAe,CAAC;QAC7B,MAAM,EAAE,WAAW,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM,CAAC;QAC/D,MAAM,oBAAoB,GAAG,OAAO,WAAW,KAAK,QAAQ,IAAI,WAAW,EAAE,QAAQ,EAAE,IAAI,CAAC;QAC5F,MAAM,EAAE,kBAAkB,GAAG,4BAA4B,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;QAE5E,qCAAqC;QACrC,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAA6B,EAAE;YACtE,IAAI,kBAAkB,CAAC,IAAI,CAAC,EAAE;gBAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;oBACnB,MAAM,IAAI,WAAW,CAAC,uEAAuE,CAAC,CAAC;iBAChG;gBAED,OAAO;oBACL,IAAI,EAAE,UAAU;oBAChB,QAAQ,EAAE;wBACR,QAAQ,EAAE,IAAI,CAAC,SAAS;wBACxB,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI;wBACxB,WAAW,EAAE,IAAI,CAAC,QAAQ,CAAC,WAAW,IAAI,EAAE;wBAC5C,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAiB;wBAC3C,KAAK,EAAE,IAAI,CAAC,SAAS;wBACrB,MAAM,EAAE,IAAI;qBACb;iBACF,CAAC;aACH;YAED,OAAO,IAAwC,CAAC;QAClD,CAAC,CAAC,CAAC;QAEH,MAAM,eAAe,GAA0C,EAAE,CAAC;QAClE,KAAK,MAAM,CAAC,IAAI,UAAU,EAAE;YAC1B,IAAI,CAAC,CAAC,IAAI,KAAK,UAAU,EAAE;gBACzB,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC;aAC3E;SACF;QAED,MAAM,KAAK,GACT,OAAO,IAAI,MAAM,CAAC,CAAC;YACjB,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,CAAC;gBACrB;oBACE,IAAI,EAAE,UAAU;oBAChB,QAAQ,EAAE;wBACR,IAAI,EAAE,CAAC,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI;wBACjD,UAAU,EAAE,CAAC,CAAC,QAAQ,CAAC,UAAqC;wBAC5D,WAAW,EAAE,CAAC,CAAC,QAAQ,CAAC,WAAW;wBACnC,MAAM,EAAE,CAAC,CAAC,QAAQ,CAAC,MAAM;qBAC1B;iBACF;gBACH,CAAC,CAAE,CAAmC,CACvC;YACH,CAAC,CAAE,SAAiB,CAAC;QAEvB,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE;YACrC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SAClC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,EAAE,EAAE,CAAC,EAAE;YAC3C,MAAM,cAAc,GAAmB,MAAM,IAAI,CAAC,qBAAqB,CACrE,MAAM,EACN;gBACE,GAAG,UAAU;gBACb,WAAW;gBACX,KAAK;gBACL,QAAQ,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC;aAC7B,EACD,OAAO,CACR,CAAC;YACF,MAAM,OAAO,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC;YACnD,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,WAAW,CAAC,4CAA4C,CAAC,CAAC;aACrE;YACD,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,MAAM,EAAE;gBAC/B,OAAO;aACR;YAED,KAAK,MAAM,SAAS,IAAI,OAAO,CAAC,UAAU,EAAE;gBAC1C,IAAI,SAAS,CAAC,IAAI,KAAK,UAAU;oBAAE,SAAS;gBAC5C,MAAM,YAAY,GAAG,SAAS,CAAC,EAAE,CAAC;gBAClC,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,SAAS,CAAC,QAAQ,CAAC;gBACrD,MAAM,EAAE,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;gBAEjC,IAAI,CAAC,EAAE,EAAE;oBACP,MAAM,OAAO,GAAG,sBAAsB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,4BAA4B,MAAM,CAAC,IAAI,CAC/F,eAAe,CAChB;yBACE,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC;oBAElC,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClD,SAAS;iBACV;qBAAM,IAAI,oBAAoB,IAAI,oBAAoB,KAAK,IAAI,EAAE;oBAChE,MAAM,OAAO,GAAG,sBAAsB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,SAAS,CAC3E,oBAAoB,CACrB,8BAA8B,CAAC;oBAEhC,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClD,SAAS;iBACV;gBAED,IAAI,MAAM,CAAC;gBACX,IAAI;oBACF,MAAM,GAAG,2BAA2B,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;iBACxE;gBAAC,OAAO,KAAK,EAAE;oBACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBACvE,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,CAAC,CAAC;oBAClD,SAAS;iBACV;gBAED,mDAAmD;gBACnD,MAAM,UAAU,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;gBACnD,MAAM,OAAO,GAAG,uBAAA,IAAI,0GAA6B,MAAjC,IAAI,EAA8B,UAAU,CAAC,CAAC;gBAC9D,IAAI,CAAC,WAAW,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,CAAC,CAAC;gBAElD,IAAI,oBAAoB,EAAE;oBACxB,OAAO;iBACR;aACF;SACF;QAED,OAAO;IACT,CAAC;CASF;;IAhZG,OAAO,uBAAA,IAAI,8FAAiB,MAArB,IAAI,CAAmB,CAAC,OAAO,IAAI,IAAI,CAAC;AACjD,CAAC;IAYC,IAAI,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC;IAC7B,OAAO,CAAC,EAAE,GAAG,CAAC,EAAE;QACd,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QACjC,IAAI,kBAAkB,CAAC,OAAO,CAAC,EAAE;YAC/B,MAAM,EAAE,aAAa,EAAE,GAAG,IAAI,EAAE,GAAG,OAAO,CAAC;YAE3C,2BAA2B;YAC3B,MAAM,GAAG,GAAyC;gBAChD,GAAG,IAAI;gBACP,OAAO,EAAG,OAAiC,CAAC,OAAO,IAAI,IAAI;gBAC3D,OAAO,EAAG,OAAiC,CAAC,OAAO,IAAI,IAAI;aAC5D,CAAC;YACF,IAAI,aAAa,EAAE;gBACjB,GAAG,CAAC,aAAa,GAAG,aAAa,CAAC;aACnC;YACD,OAAO,GAAG,CAAC;SACZ;KACF;IACD,MAAM,IAAI,WAAW,CAAC,4EAA4E,CAAC,CAAC;AACtG,CAAC;IAYC,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QACjC,IAAI,kBAAkB,CAAC,OAAO,CAAC,IAAI,OAAO,EAAE,aAAa,EAAE;YACzD,OAAO,OAAO,CAAC,aAAa,CAAC;SAC9B;QACD,IAAI,kBAAkB,CAAC,OAAO,CAAC,IAAI,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE;YAC9D,OAAO,OAAO,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC;SAC5C;KACF;IAED,OAAO;AACT,CAAC;IAYC,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QACjC,IAAI,iBAAiB,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,OAAO,IAAI,IAAI,EAAE;YACzD,OAAO,OAAO,CAAC,OAAO,CAAC;SACxB;QACD,IACE,aAAa,CAAC,OAAO,CAAC;YACtB,OAAO,CAAC,OAAO,IAAI,IAAI;YACvB,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ;YACnC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,EAAE,CACJ,CAAC,CAAC,IAAI,KAAK,WAAW;gBACtB,CAAC,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,CAAC,EAAE,KAAK,OAAO,CAAC,YAAY,CAAC,CACpF,EACD;YACA,OAAO,OAAO,CAAC,OAAO,CAAC;SACxB;KACF;IAED,OAAO;AACT,CAAC;IAQC,MAAM,KAAK,GAAoB;QAC7B,iBAAiB,EAAE,CAAC;QACpB,aAAa,EAAE,CAAC;QAChB,YAAY,EAAE,CAAC;KAChB,CAAC;IACF,KAAK,MAAM,EAAE,KAAK,EAAE,IAAI,IAAI,CAAC,gBAAgB,EAAE;QAC7C,IAAI,KAAK,EAAE;YACT,KAAK,CAAC,iBAAiB,IAAI,KAAK,CAAC,iBAAiB,CAAC;YACnD,KAAK,CAAC,aAAa,IAAI,KAAK,CAAC,aAAa,CAAC;YAC3C,KAAK,CAAC,YAAY,IAAI,KAAK,CAAC,YAAY,CAAC;SAC1C;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC,uGAgCe,MAAkC;IAChD,IAAI,MAAM,CAAC,CAAC,IAAI,IAAI,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,EAAE;QACpC,MAAM,IAAI,WAAW,CACnB,8HAA8H,CAC/H,CAAC;KACH;AACH,CAAC,iIAuP4B,UAAmB;IAC9C,OAAO,CACL,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU;QAC3C,CAAC,CAAC,UAAU,KAAK,SAAS,CAAC,CAAC,CAAC,WAAW;YACxC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAC7B,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/AssistantStream.d.ts b/node_modules/openai/lib/AssistantStream.d.ts new file mode 100644 index 0000000..dc70b5a --- /dev/null +++ b/node_modules/openai/lib/AssistantStream.d.ts @@ -0,0 +1,61 @@ +import { Message, Text, ImageFile, TextDelta, MessageDelta } from "../resources/beta/threads/messages.js"; +import * as Core from "../core.js"; +import { RequestOptions } from "../core.js"; +import { Run, RunCreateParamsBase, Runs, RunSubmitToolOutputsParamsBase } from "../resources/beta/threads/runs/runs.js"; +import { type ReadableStream } from "../_shims/index.js"; +import { AssistantStreamEvent } from "../resources/beta/assistants.js"; +import { RunStep, RunStepDelta, ToolCall, ToolCallDelta } from "../resources/beta/threads/runs/steps.js"; +import { ThreadCreateAndRunParamsBase, Threads } from "../resources/beta/threads/threads.js"; +import { BaseEvents, EventStream } from "./EventStream.js"; +export interface AssistantStreamEvents extends BaseEvents { + run: (run: Run) => void; + messageCreated: (message: Message) => void; + messageDelta: (message: MessageDelta, snapshot: Message) => void; + messageDone: (message: Message) => void; + runStepCreated: (runStep: RunStep) => void; + runStepDelta: (delta: RunStepDelta, snapshot: Runs.RunStep) => void; + runStepDone: (runStep: Runs.RunStep, snapshot: Runs.RunStep) => void; + toolCallCreated: (toolCall: ToolCall) => void; + toolCallDelta: (delta: ToolCallDelta, snapshot: ToolCall) => void; + toolCallDone: (toolCall: ToolCall) => void; + textCreated: (content: Text) => void; + textDelta: (delta: TextDelta, snapshot: Text) => void; + textDone: (content: Text, snapshot: Message) => void; + imageFileDone: (content: ImageFile, snapshot: Message) => void; + event: (event: AssistantStreamEvent) => void; +} +export type ThreadCreateAndRunParamsBaseStream = Omit & { + stream?: true; +}; +export type RunCreateParamsBaseStream = Omit & { + stream?: true; +}; +export type RunSubmitToolOutputsParamsStream = Omit & { + stream?: true; +}; +export declare class AssistantStream extends EventStream implements AsyncIterable { + #private; + [Symbol.asyncIterator](): AsyncIterator; + static fromReadableStream(stream: ReadableStream): AssistantStream; + protected _fromReadableStream(readableStream: ReadableStream, options?: Core.RequestOptions): Promise; + toReadableStream(): ReadableStream; + static createToolAssistantStream(threadId: string, runId: string, runs: Runs, params: RunSubmitToolOutputsParamsStream, options: RequestOptions | undefined): AssistantStream; + protected _createToolAssistantStream(run: Runs, threadId: string, runId: string, params: RunSubmitToolOutputsParamsStream, options?: Core.RequestOptions): Promise; + static createThreadAssistantStream(params: ThreadCreateAndRunParamsBaseStream, thread: Threads, options?: RequestOptions): AssistantStream; + static createAssistantStream(threadId: string, runs: Runs, params: RunCreateParamsBaseStream, options?: RequestOptions): AssistantStream; + currentEvent(): AssistantStreamEvent | undefined; + currentRun(): Run | undefined; + currentMessageSnapshot(): Message | undefined; + currentRunStepSnapshot(): Runs.RunStep | undefined; + finalRunSteps(): Promise; + finalMessages(): Promise; + finalRun(): Promise; + protected _createThreadAssistantStream(thread: Threads, params: ThreadCreateAndRunParamsBase, options?: Core.RequestOptions): Promise; + protected _createAssistantStream(run: Runs, threadId: string, params: RunCreateParamsBase, options?: Core.RequestOptions): Promise; + static accumulateDelta(acc: Record, delta: Record): Record; + protected _addRun(run: Run): Run; + protected _threadAssistantStream(params: ThreadCreateAndRunParamsBase, thread: Threads, options?: Core.RequestOptions): Promise; + protected _runAssistantStream(threadId: string, runs: Runs, params: RunCreateParamsBase, options?: Core.RequestOptions): Promise; + protected _runToolAssistantStream(threadId: string, runId: string, runs: Runs, params: RunSubmitToolOutputsParamsStream, options?: Core.RequestOptions): Promise; +} +//# sourceMappingURL=AssistantStream.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/AssistantStream.d.ts.map b/node_modules/openai/lib/AssistantStream.d.ts.map new file mode 100644 index 0000000..eca44f1 --- /dev/null +++ b/node_modules/openai/lib/AssistantStream.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AssistantStream.d.ts","sourceRoot":"","sources":["../src/lib/AssistantStream.ts"],"names":[],"mappings":"AAAA,OAAO,EAGL,OAAO,EAEP,IAAI,EACJ,SAAS,EACT,SAAS,EACT,YAAY,EAEb,MAAM,oCAAoC,CAAC;AAC5C,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AACzC,OAAO,EACL,GAAG,EACH,mBAAmB,EAEnB,IAAI,EACJ,8BAA8B,EAE/B,MAAM,qCAAqC,CAAC;AAC7C,OAAO,EAAE,KAAK,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAGtD,OAAO,EACL,oBAAoB,EAIrB,MAAM,8BAA8B,CAAC;AACtC,OAAO,EAAE,OAAO,EAAE,YAAY,EAAE,QAAQ,EAAE,aAAa,EAAE,MAAM,sCAAsC,CAAC;AACtG,OAAO,EAAE,4BAA4B,EAAE,OAAO,EAAE,MAAM,mCAAmC,CAAC;AAC1F,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAExD,MAAM,WAAW,qBAAsB,SAAQ,UAAU;IACvD,GAAG,EAAE,CAAC,GAAG,EAAE,GAAG,KAAK,IAAI,CAAC;IAGxB,cAAc,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,YAAY,EAAE,QAAQ,EAAE,OAAO,KAAK,IAAI,CAAC;IACjE,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAExC,cAAc,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAC3C,YAAY,EAAE,CAAC,KAAK,EAAE,YAAY,EAAE,QAAQ,EAAE,IAAI,CAAC,OAAO,KAAK,IAAI,CAAC;IACpE,WAAW,EAAE,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,EAAE,QAAQ,EAAE,IAAI,CAAC,OAAO,KAAK,IAAI,CAAC;IAErE,eAAe,EAAE,CAAC,QAAQ,EAAE,QAAQ,KAAK,IAAI,CAAC;IAC9C,aAAa,EAAE,CAAC,KAAK,EAAE,aAAa,EAAE,QAAQ,EAAE,QAAQ,KAAK,IAAI,CAAC;IAClE,YAAY,EAAE,CAAC,QAAQ,EAAE,QAAQ,KAAK,IAAI,CAAC;IAE3C,WAAW,EAAE,CAAC,OAAO,EAAE,IAAI,KAAK,IAAI,CAAC;IACrC,SAAS,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,EAAE,IAAI,KAAK,IAAI,CAAC;IACtD,QAAQ,EAAE,CAAC,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,KAAK,IAAI,CAAC;IAGrD,aAAa,EAAE,CAAC,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,OAAO,KAAK,IAAI,CAAC;IAE/D,KAAK,EAAE,CAAC,KAAK,EAAE,oBAAoB,KAAK,IAAI,CAAC;CAC9C;AAED,MAAM,MAAM,kCAAkC,GAAG,IAAI,CAAC,4BAA4B,EAAE,QAAQ,CAAC,GAAG;IAC9F,MAAM,CAAC,EAAE,IAAI,CAAC;CACf,CAAC;AAEF,MAAM,MAAM,yBAAyB,GAAG,IAAI,CAAC,mBAAmB,EAAE,QAAQ,CAAC,GAAG;IAC5E,MAAM,CAAC,EAAE,IAAI,CAAC;CACf,CAAC;AAEF,MAAM,MAAM,gCAAgC,GAAG,IAAI,CAAC,8BAA8B,EAAE,QAAQ,CAAC,GAAG;IAC9F,MAAM,CAAC,EAAE,IAAI,CAAC;CACf,CAAC;AAEF,qBAAa,eACX,SAAQ,WAAW,CAAC,qBAAqB,CACzC,YAAW,aAAa,CAAC,oBAAoB,CAAC;;IAqB9C,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,oBAAoB,CAAC;IA8D7D,MAAM,CAAC,kBAAkB,CAAC,MAAM,EAAE,cAAc,GAAG,eAAe;cAMlD,mBAAmB,CACjC,cAAc,EAAE,cAAc,EAC9B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,GAAG,CAAC;IAiBf,gBAAgB,IAAI,cAAc;IAKlC,MAAM,CAAC,yBAAyB,CAC9B,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,IAAI,EACV,MAAM,EAAE,gCAAgC,EACxC,OAAO,EAAE,cAAc,GAAG,SAAS,GAClC,eAAe;cAWF,0BAA0B,CACxC,GAAG,EAAE,IAAI,EACT,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,gCAAgC,EACxC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,GAAG,CAAC;IAyBf,MAAM,CAAC,2BAA2B,CAChC,MAAM,EAAE,kCAAkC,EAC1C,MAAM,EAAE,OAAO,EACf,OAAO,CAAC,EAAE,cAAc,GACvB,eAAe;IAWlB,MAAM,CAAC,qBAAqB,CAC1B,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,IAAI,EACV,MAAM,EAAE,yBAAyB,EACjC,OAAO,CAAC,EAAE,cAAc,GACvB,eAAe;IAWlB,YAAY,IAAI,oBAAoB,GAAG,SAAS;IAIhD,UAAU,IAAI,GAAG,GAAG,SAAS;IAI7B,sBAAsB,IAAI,OAAO,GAAG,SAAS;IAI7C,sBAAsB,IAAI,IAAI,CAAC,OAAO,GAAG,SAAS;IAI5C,aAAa,IAAI,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;IAMxC,aAAa,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;IAMnC,QAAQ,IAAI,OAAO,CAAC,GAAG,CAAC;cAOd,4BAA4B,CAC1C,MAAM,EAAE,OAAO,EACf,MAAM,EAAE,4BAA4B,EACpC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,GAAG,CAAC;cAsBC,sBAAsB,CACpC,GAAG,EAAE,IAAI,EACT,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,mBAAmB,EAC3B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,GAAG,CAAC;IAiUf,MAAM,CAAC,eAAe,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC;IAyFjG,SAAS,CAAC,OAAO,CAAC,GAAG,EAAE,GAAG,GAAG,GAAG;cAIhB,sBAAsB,CACpC,MAAM,EAAE,4BAA4B,EACpC,MAAM,EAAE,OAAO,EACf,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,GAAG,CAAC;cAIC,mBAAmB,CACjC,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,IAAI,EACV,MAAM,EAAE,mBAAmB,EAC3B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,GAAG,CAAC;cAIC,uBAAuB,CACrC,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,IAAI,EACV,MAAM,EAAE,gCAAgC,EACxC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,GAAG,CAAC;CAGhB"} \ No newline at end of file diff --git a/node_modules/openai/lib/AssistantStream.js b/node_modules/openai/lib/AssistantStream.js new file mode 100644 index 0000000..b10f892 --- /dev/null +++ b/node_modules/openai/lib/AssistantStream.js @@ -0,0 +1,581 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _AssistantStream_instances, _AssistantStream_events, _AssistantStream_runStepSnapshots, _AssistantStream_messageSnapshots, _AssistantStream_messageSnapshot, _AssistantStream_finalRun, _AssistantStream_currentContentIndex, _AssistantStream_currentContent, _AssistantStream_currentToolCallIndex, _AssistantStream_currentToolCall, _AssistantStream_currentEvent, _AssistantStream_currentRunSnapshot, _AssistantStream_currentRunStepSnapshot, _AssistantStream_addEvent, _AssistantStream_endRequest, _AssistantStream_handleMessage, _AssistantStream_handleRunStep, _AssistantStream_handleEvent, _AssistantStream_accumulateRunStep, _AssistantStream_accumulateMessage, _AssistantStream_accumulateContent, _AssistantStream_handleRun; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AssistantStream = void 0; +const Core = __importStar(require("../core.js")); +const streaming_1 = require("../streaming.js"); +const error_1 = require("../error.js"); +const EventStream_1 = require("./EventStream.js"); +class AssistantStream extends EventStream_1.EventStream { + constructor() { + super(...arguments); + _AssistantStream_instances.add(this); + //Track all events in a single list for reference + _AssistantStream_events.set(this, []); + //Used to accumulate deltas + //We are accumulating many types so the value here is not strict + _AssistantStream_runStepSnapshots.set(this, {}); + _AssistantStream_messageSnapshots.set(this, {}); + _AssistantStream_messageSnapshot.set(this, void 0); + _AssistantStream_finalRun.set(this, void 0); + _AssistantStream_currentContentIndex.set(this, void 0); + _AssistantStream_currentContent.set(this, void 0); + _AssistantStream_currentToolCallIndex.set(this, void 0); + _AssistantStream_currentToolCall.set(this, void 0); + //For current snapshot methods + _AssistantStream_currentEvent.set(this, void 0); + _AssistantStream_currentRunSnapshot.set(this, void 0); + _AssistantStream_currentRunStepSnapshot.set(this, void 0); + } + [(_AssistantStream_events = new WeakMap(), _AssistantStream_runStepSnapshots = new WeakMap(), _AssistantStream_messageSnapshots = new WeakMap(), _AssistantStream_messageSnapshot = new WeakMap(), _AssistantStream_finalRun = new WeakMap(), _AssistantStream_currentContentIndex = new WeakMap(), _AssistantStream_currentContent = new WeakMap(), _AssistantStream_currentToolCallIndex = new WeakMap(), _AssistantStream_currentToolCall = new WeakMap(), _AssistantStream_currentEvent = new WeakMap(), _AssistantStream_currentRunSnapshot = new WeakMap(), _AssistantStream_currentRunStepSnapshot = new WeakMap(), _AssistantStream_instances = new WeakSet(), Symbol.asyncIterator)]() { + const pushQueue = []; + const readQueue = []; + let done = false; + //Catch all for passing along all events + this.on('event', (event) => { + const reader = readQueue.shift(); + if (reader) { + reader.resolve(event); + } + else { + pushQueue.push(event); + } + }); + this.on('end', () => { + done = true; + for (const reader of readQueue) { + reader.resolve(undefined); + } + readQueue.length = 0; + }); + this.on('abort', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + this.on('error', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + return { + next: async () => { + if (!pushQueue.length) { + if (done) { + return { value: undefined, done: true }; + } + return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true })); + } + const chunk = pushQueue.shift(); + return { value: chunk, done: false }; + }, + return: async () => { + this.abort(); + return { value: undefined, done: true }; + }, + }; + } + static fromReadableStream(stream) { + const runner = new AssistantStream(); + runner._run(() => runner._fromReadableStream(stream)); + return runner; + } + async _fromReadableStream(readableStream, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + this._connected(); + const stream = streaming_1.Stream.fromReadableStream(readableStream, this.controller); + for await (const event of stream) { + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event); + } + if (stream.controller.signal?.aborted) { + throw new error_1.APIUserAbortError(); + } + return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this)); + } + toReadableStream() { + const stream = new streaming_1.Stream(this[Symbol.asyncIterator].bind(this), this.controller); + return stream.toReadableStream(); + } + static createToolAssistantStream(threadId, runId, runs, params, options) { + const runner = new AssistantStream(); + runner._run(() => runner._runToolAssistantStream(threadId, runId, runs, params, { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' }, + })); + return runner; + } + async _createToolAssistantStream(run, threadId, runId, params, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + const body = { ...params, stream: true }; + const stream = await run.submitToolOutputs(threadId, runId, body, { + ...options, + signal: this.controller.signal, + }); + this._connected(); + for await (const event of stream) { + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event); + } + if (stream.controller.signal?.aborted) { + throw new error_1.APIUserAbortError(); + } + return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this)); + } + static createThreadAssistantStream(params, thread, options) { + const runner = new AssistantStream(); + runner._run(() => runner._threadAssistantStream(params, thread, { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' }, + })); + return runner; + } + static createAssistantStream(threadId, runs, params, options) { + const runner = new AssistantStream(); + runner._run(() => runner._runAssistantStream(threadId, runs, params, { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' }, + })); + return runner; + } + currentEvent() { + return __classPrivateFieldGet(this, _AssistantStream_currentEvent, "f"); + } + currentRun() { + return __classPrivateFieldGet(this, _AssistantStream_currentRunSnapshot, "f"); + } + currentMessageSnapshot() { + return __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f"); + } + currentRunStepSnapshot() { + return __classPrivateFieldGet(this, _AssistantStream_currentRunStepSnapshot, "f"); + } + async finalRunSteps() { + await this.done(); + return Object.values(__classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")); + } + async finalMessages() { + await this.done(); + return Object.values(__classPrivateFieldGet(this, _AssistantStream_messageSnapshots, "f")); + } + async finalRun() { + await this.done(); + if (!__classPrivateFieldGet(this, _AssistantStream_finalRun, "f")) + throw Error('Final run was not received.'); + return __classPrivateFieldGet(this, _AssistantStream_finalRun, "f"); + } + async _createThreadAssistantStream(thread, params, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + const body = { ...params, stream: true }; + const stream = await thread.createAndRun(body, { ...options, signal: this.controller.signal }); + this._connected(); + for await (const event of stream) { + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event); + } + if (stream.controller.signal?.aborted) { + throw new error_1.APIUserAbortError(); + } + return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this)); + } + async _createAssistantStream(run, threadId, params, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + const body = { ...params, stream: true }; + const stream = await run.create(threadId, body, { ...options, signal: this.controller.signal }); + this._connected(); + for await (const event of stream) { + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event); + } + if (stream.controller.signal?.aborted) { + throw new error_1.APIUserAbortError(); + } + return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this)); + } + static accumulateDelta(acc, delta) { + for (const [key, deltaValue] of Object.entries(delta)) { + if (!acc.hasOwnProperty(key)) { + acc[key] = deltaValue; + continue; + } + let accValue = acc[key]; + if (accValue === null || accValue === undefined) { + acc[key] = deltaValue; + continue; + } + // We don't accumulate these special properties + if (key === 'index' || key === 'type') { + acc[key] = deltaValue; + continue; + } + // Type-specific accumulation logic + if (typeof accValue === 'string' && typeof deltaValue === 'string') { + accValue += deltaValue; + } + else if (typeof accValue === 'number' && typeof deltaValue === 'number') { + accValue += deltaValue; + } + else if (Core.isObj(accValue) && Core.isObj(deltaValue)) { + accValue = this.accumulateDelta(accValue, deltaValue); + } + else if (Array.isArray(accValue) && Array.isArray(deltaValue)) { + if (accValue.every((x) => typeof x === 'string' || typeof x === 'number')) { + accValue.push(...deltaValue); // Use spread syntax for efficient addition + continue; + } + for (const deltaEntry of deltaValue) { + if (!Core.isObj(deltaEntry)) { + throw new Error(`Expected array delta entry to be an object but got: ${deltaEntry}`); + } + const index = deltaEntry['index']; + if (index == null) { + console.error(deltaEntry); + throw new Error('Expected array delta entry to have an `index` property'); + } + if (typeof index !== 'number') { + throw new Error(`Expected array delta entry \`index\` property to be a number but got ${index}`); + } + const accEntry = accValue[index]; + if (accEntry == null) { + accValue.push(deltaEntry); + } + else { + accValue[index] = this.accumulateDelta(accEntry, deltaEntry); + } + } + continue; + } + else { + throw Error(`Unhandled record type: ${key}, deltaValue: ${deltaValue}, accValue: ${accValue}`); + } + acc[key] = accValue; + } + return acc; + } + _addRun(run) { + return run; + } + async _threadAssistantStream(params, thread, options) { + return await this._createThreadAssistantStream(thread, params, options); + } + async _runAssistantStream(threadId, runs, params, options) { + return await this._createAssistantStream(runs, threadId, params, options); + } + async _runToolAssistantStream(threadId, runId, runs, params, options) { + return await this._createToolAssistantStream(runs, threadId, runId, params, options); + } +} +exports.AssistantStream = AssistantStream; +_AssistantStream_addEvent = function _AssistantStream_addEvent(event) { + if (this.ended) + return; + __classPrivateFieldSet(this, _AssistantStream_currentEvent, event, "f"); + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleEvent).call(this, event); + switch (event.event) { + case 'thread.created': + //No action on this event. + break; + case 'thread.run.created': + case 'thread.run.queued': + case 'thread.run.in_progress': + case 'thread.run.requires_action': + case 'thread.run.completed': + case 'thread.run.failed': + case 'thread.run.cancelling': + case 'thread.run.cancelled': + case 'thread.run.expired': + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleRun).call(this, event); + break; + case 'thread.run.step.created': + case 'thread.run.step.in_progress': + case 'thread.run.step.delta': + case 'thread.run.step.completed': + case 'thread.run.step.failed': + case 'thread.run.step.cancelled': + case 'thread.run.step.expired': + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleRunStep).call(this, event); + break; + case 'thread.message.created': + case 'thread.message.in_progress': + case 'thread.message.delta': + case 'thread.message.completed': + case 'thread.message.incomplete': + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleMessage).call(this, event); + break; + case 'error': + //This is included for completeness, but errors are processed in the SSE event processing so this should not occur + throw new Error('Encountered an error event in event processing - errors should be processed earlier'); + } +}, _AssistantStream_endRequest = function _AssistantStream_endRequest() { + if (this.ended) { + throw new error_1.OpenAIError(`stream has ended, this shouldn't happen`); + } + if (!__classPrivateFieldGet(this, _AssistantStream_finalRun, "f")) + throw Error('Final run has not been received'); + return __classPrivateFieldGet(this, _AssistantStream_finalRun, "f"); +}, _AssistantStream_handleMessage = function _AssistantStream_handleMessage(event) { + const [accumulatedMessage, newContent] = __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_accumulateMessage).call(this, event, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + __classPrivateFieldSet(this, _AssistantStream_messageSnapshot, accumulatedMessage, "f"); + __classPrivateFieldGet(this, _AssistantStream_messageSnapshots, "f")[accumulatedMessage.id] = accumulatedMessage; + for (const content of newContent) { + const snapshotContent = accumulatedMessage.content[content.index]; + if (snapshotContent?.type == 'text') { + this._emit('textCreated', snapshotContent.text); + } + } + switch (event.event) { + case 'thread.message.created': + this._emit('messageCreated', event.data); + break; + case 'thread.message.in_progress': + break; + case 'thread.message.delta': + this._emit('messageDelta', event.data.delta, accumulatedMessage); + if (event.data.delta.content) { + for (const content of event.data.delta.content) { + //If it is text delta, emit a text delta event + if (content.type == 'text' && content.text) { + let textDelta = content.text; + let snapshot = accumulatedMessage.content[content.index]; + if (snapshot && snapshot.type == 'text') { + this._emit('textDelta', textDelta, snapshot.text); + } + else { + throw Error('The snapshot associated with this text delta is not text or missing'); + } + } + if (content.index != __classPrivateFieldGet(this, _AssistantStream_currentContentIndex, "f")) { + //See if we have in progress content + if (__classPrivateFieldGet(this, _AssistantStream_currentContent, "f")) { + switch (__classPrivateFieldGet(this, _AssistantStream_currentContent, "f").type) { + case 'text': + this._emit('textDone', __classPrivateFieldGet(this, _AssistantStream_currentContent, "f").text, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + break; + case 'image_file': + this._emit('imageFileDone', __classPrivateFieldGet(this, _AssistantStream_currentContent, "f").image_file, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + break; + } + } + __classPrivateFieldSet(this, _AssistantStream_currentContentIndex, content.index, "f"); + } + __classPrivateFieldSet(this, _AssistantStream_currentContent, accumulatedMessage.content[content.index], "f"); + } + } + break; + case 'thread.message.completed': + case 'thread.message.incomplete': + //We emit the latest content we were working on on completion (including incomplete) + if (__classPrivateFieldGet(this, _AssistantStream_currentContentIndex, "f") !== undefined) { + const currentContent = event.data.content[__classPrivateFieldGet(this, _AssistantStream_currentContentIndex, "f")]; + if (currentContent) { + switch (currentContent.type) { + case 'image_file': + this._emit('imageFileDone', currentContent.image_file, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + break; + case 'text': + this._emit('textDone', currentContent.text, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + break; + } + } + } + if (__classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")) { + this._emit('messageDone', event.data); + } + __classPrivateFieldSet(this, _AssistantStream_messageSnapshot, undefined, "f"); + } +}, _AssistantStream_handleRunStep = function _AssistantStream_handleRunStep(event) { + const accumulatedRunStep = __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_accumulateRunStep).call(this, event); + __classPrivateFieldSet(this, _AssistantStream_currentRunStepSnapshot, accumulatedRunStep, "f"); + switch (event.event) { + case 'thread.run.step.created': + this._emit('runStepCreated', event.data); + break; + case 'thread.run.step.delta': + const delta = event.data.delta; + if (delta.step_details && + delta.step_details.type == 'tool_calls' && + delta.step_details.tool_calls && + accumulatedRunStep.step_details.type == 'tool_calls') { + for (const toolCall of delta.step_details.tool_calls) { + if (toolCall.index == __classPrivateFieldGet(this, _AssistantStream_currentToolCallIndex, "f")) { + this._emit('toolCallDelta', toolCall, accumulatedRunStep.step_details.tool_calls[toolCall.index]); + } + else { + if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) { + this._emit('toolCallDone', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")); + } + __classPrivateFieldSet(this, _AssistantStream_currentToolCallIndex, toolCall.index, "f"); + __classPrivateFieldSet(this, _AssistantStream_currentToolCall, accumulatedRunStep.step_details.tool_calls[toolCall.index], "f"); + if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) + this._emit('toolCallCreated', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")); + } + } + } + this._emit('runStepDelta', event.data.delta, accumulatedRunStep); + break; + case 'thread.run.step.completed': + case 'thread.run.step.failed': + case 'thread.run.step.cancelled': + case 'thread.run.step.expired': + __classPrivateFieldSet(this, _AssistantStream_currentRunStepSnapshot, undefined, "f"); + const details = event.data.step_details; + if (details.type == 'tool_calls') { + if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) { + this._emit('toolCallDone', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")); + __classPrivateFieldSet(this, _AssistantStream_currentToolCall, undefined, "f"); + } + } + this._emit('runStepDone', event.data, accumulatedRunStep); + break; + case 'thread.run.step.in_progress': + break; + } +}, _AssistantStream_handleEvent = function _AssistantStream_handleEvent(event) { + __classPrivateFieldGet(this, _AssistantStream_events, "f").push(event); + this._emit('event', event); +}, _AssistantStream_accumulateRunStep = function _AssistantStream_accumulateRunStep(event) { + switch (event.event) { + case 'thread.run.step.created': + __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = event.data; + return event.data; + case 'thread.run.step.delta': + let snapshot = __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id]; + if (!snapshot) { + throw Error('Received a RunStepDelta before creation of a snapshot'); + } + let data = event.data; + if (data.delta) { + const accumulated = AssistantStream.accumulateDelta(snapshot, data.delta); + __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = accumulated; + } + return __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id]; + case 'thread.run.step.completed': + case 'thread.run.step.failed': + case 'thread.run.step.cancelled': + case 'thread.run.step.expired': + case 'thread.run.step.in_progress': + __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = event.data; + break; + } + if (__classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id]) + return __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id]; + throw new Error('No snapshot available'); +}, _AssistantStream_accumulateMessage = function _AssistantStream_accumulateMessage(event, snapshot) { + let newContent = []; + switch (event.event) { + case 'thread.message.created': + //On creation the snapshot is just the initial message + return [event.data, newContent]; + case 'thread.message.delta': + if (!snapshot) { + throw Error('Received a delta with no existing snapshot (there should be one from message creation)'); + } + let data = event.data; + //If this delta does not have content, nothing to process + if (data.delta.content) { + for (const contentElement of data.delta.content) { + if (contentElement.index in snapshot.content) { + let currentContent = snapshot.content[contentElement.index]; + snapshot.content[contentElement.index] = __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_accumulateContent).call(this, contentElement, currentContent); + } + else { + snapshot.content[contentElement.index] = contentElement; + // This is a new element + newContent.push(contentElement); + } + } + } + return [snapshot, newContent]; + case 'thread.message.in_progress': + case 'thread.message.completed': + case 'thread.message.incomplete': + //No changes on other thread events + if (snapshot) { + return [snapshot, newContent]; + } + else { + throw Error('Received thread message event with no existing snapshot'); + } + } + throw Error('Tried to accumulate a non-message event'); +}, _AssistantStream_accumulateContent = function _AssistantStream_accumulateContent(contentElement, currentContent) { + return AssistantStream.accumulateDelta(currentContent, contentElement); +}, _AssistantStream_handleRun = function _AssistantStream_handleRun(event) { + __classPrivateFieldSet(this, _AssistantStream_currentRunSnapshot, event.data, "f"); + switch (event.event) { + case 'thread.run.created': + break; + case 'thread.run.queued': + break; + case 'thread.run.in_progress': + break; + case 'thread.run.requires_action': + case 'thread.run.cancelled': + case 'thread.run.failed': + case 'thread.run.completed': + case 'thread.run.expired': + __classPrivateFieldSet(this, _AssistantStream_finalRun, event.data, "f"); + if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) { + this._emit('toolCallDone', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")); + __classPrivateFieldSet(this, _AssistantStream_currentToolCall, undefined, "f"); + } + break; + case 'thread.run.cancelling': + break; + } +}; +//# sourceMappingURL=AssistantStream.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/AssistantStream.js.map b/node_modules/openai/lib/AssistantStream.js.map new file mode 100644 index 0000000..523d15c --- /dev/null +++ b/node_modules/openai/lib/AssistantStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AssistantStream.js","sourceRoot":"","sources":["../src/lib/AssistantStream.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAWA,iDAAgC;AAWhC,+CAAsC;AACtC,uCAA0D;AAS1D,kDAAwD;AAwCxD,MAAa,eACX,SAAQ,yBAAkC;IAD5C;;;QAIE,iDAAiD;QACjD,kCAAkC,EAAE,EAAC;QAErC,2BAA2B;QAC3B,gEAAgE;QAChE,4CAAoD,EAAE,EAAC;QACvD,4CAA+C,EAAE,EAAC;QAClD,mDAAsC;QACtC,4CAA2B;QAC3B,uDAAyC;QACzC,kDAA4C;QAC5C,wDAA0C;QAC1C,mDAAuC;QAEvC,8BAA8B;QAC9B,gDAAgD;QAChD,sDAAqC;QACrC,0DAAkD;IAwqBpD,CAAC;IAtqBC,uoBAAC,MAAM,CAAC,aAAa,EAAC;QACpB,MAAM,SAAS,GAA2B,EAAE,CAAC;QAC7C,MAAM,SAAS,GAGT,EAAE,CAAC;QACT,IAAI,IAAI,GAAG,KAAK,CAAC;QAEjB,wCAAwC;QACxC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;YACzB,MAAM,MAAM,GAAG,SAAS,CAAC,KAAK,EAAE,CAAC;YACjC,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;aACvB;iBAAM;gBACL,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACvB;QACH,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YAClB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;aAC3B;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACpB;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACpB;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,OAAO;YACL,IAAI,EAAE,KAAK,IAAmD,EAAE;gBAC9D,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE;oBACrB,IAAI,IAAI,EAAE;wBACR,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;qBACzC;oBACD,OAAO,IAAI,OAAO,CAAmC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,CACvE,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CACpC,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;iBAC/F;gBACD,MAAM,KAAK,GAAG,SAAS,CAAC,KAAK,EAAG,CAAC;gBACjC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;YACvC,CAAC;YACD,MAAM,EAAE,KAAK,IAAI,EAAE;gBACjB,IAAI,CAAC,KAAK,EAAE,CAAC;gBACb,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;YAC1C,CAAC;SACF,CAAC;IACJ,CAAC;IAED,MAAM,CAAC,kBAAkB,CAAC,MAAsB;QAC9C,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC;QACtD,OAAO,MAAM,CAAC;IAChB,CAAC;IAES,KAAK,CAAC,mBAAmB,CACjC,cAA8B,EAC9B,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,MAAM,MAAM,GAAG,kBAAM,CAAC,kBAAkB,CAAuB,cAAc,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;QAChG,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,6DAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,yBAAiB,EAAE,CAAC;SAC/B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,+DAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IAC1C,CAAC;IAED,gBAAgB;QACd,MAAM,MAAM,GAAG,IAAI,kBAAM,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC,gBAAgB,EAAE,CAAC;IACnC,CAAC;IAED,MAAM,CAAC,yBAAyB,CAC9B,QAAgB,EAChB,KAAa,EACb,IAAU,EACV,MAAwC,EACxC,OAAmC;QAEnC,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CACf,MAAM,CAAC,uBAAuB,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE;YAC5D,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,QAAQ,EAAE;SACxE,CAAC,CACH,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC;IAES,KAAK,CAAC,0BAA0B,CACxC,GAAS,EACT,QAAgB,EAChB,KAAa,EACb,MAAwC,EACxC,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QAED,MAAM,IAAI,GAAwC,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;QAC9E,MAAM,MAAM,GAAG,MAAM,GAAG,CAAC,iBAAiB,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE;YAChE,GAAG,OAAO;YACV,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM;SAC/B,CAAC,CAAC;QAEH,IAAI,CAAC,UAAU,EAAE,CAAC;QAElB,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,6DAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,yBAAiB,EAAE,CAAC;SAC/B;QAED,OAAO,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,+DAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,CAAC,2BAA2B,CAChC,MAA0C,EAC1C,MAAe,EACf,OAAwB;QAExB,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CACf,MAAM,CAAC,sBAAsB,CAAC,MAAM,EAAE,MAAM,EAAE;YAC5C,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,QAAQ,EAAE;SACxE,CAAC,CACH,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM,CAAC,qBAAqB,CAC1B,QAAgB,EAChB,IAAU,EACV,MAAiC,EACjC,OAAwB;QAExB,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CACf,MAAM,CAAC,mBAAmB,CAAC,QAAQ,EAAE,IAAI,EAAE,MAAM,EAAE;YACjD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,QAAQ,EAAE;SACxE,CAAC,CACH,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,YAAY;QACV,OAAO,uBAAA,IAAI,qCAAc,CAAC;IAC5B,CAAC;IAED,UAAU;QACR,OAAO,uBAAA,IAAI,2CAAoB,CAAC;IAClC,CAAC;IAED,sBAAsB;QACpB,OAAO,uBAAA,IAAI,wCAAiB,CAAC;IAC/B,CAAC;IAED,sBAAsB;QACpB,OAAO,uBAAA,IAAI,+CAAwB,CAAC;IACtC,CAAC;IAED,KAAK,CAAC,aAAa;QACjB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAElB,OAAO,MAAM,CAAC,MAAM,CAAC,uBAAA,IAAI,yCAAkB,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,CAAC,aAAa;QACjB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAElB,OAAO,MAAM,CAAC,MAAM,CAAC,uBAAA,IAAI,yCAAkB,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,CAAC,QAAQ;QACZ,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,IAAI,CAAC,uBAAA,IAAI,iCAAU;YAAE,MAAM,KAAK,CAAC,6BAA6B,CAAC,CAAC;QAEhE,OAAO,uBAAA,IAAI,iCAAU,CAAC;IACxB,CAAC;IAES,KAAK,CAAC,4BAA4B,CAC1C,MAAe,EACf,MAAoC,EACpC,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QAED,MAAM,IAAI,GAA6B,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;QACnE,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC;QAE/F,IAAI,CAAC,UAAU,EAAE,CAAC;QAElB,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,6DAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,yBAAiB,EAAE,CAAC;SAC/B;QAED,OAAO,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,+DAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IAC1C,CAAC;IAES,KAAK,CAAC,sBAAsB,CACpC,GAAS,EACT,QAAgB,EAChB,MAA2B,EAC3B,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QAED,MAAM,IAAI,GAA6B,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;QACnE,MAAM,MAAM,GAAG,MAAM,GAAG,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,EAAE,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC;QAEhG,IAAI,CAAC,UAAU,EAAE,CAAC;QAElB,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,6DAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,yBAAiB,EAAE,CAAC;SAC/B;QAED,OAAO,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,+DAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IAC1C,CAAC;IA6SD,MAAM,CAAC,eAAe,CAAC,GAAwB,EAAE,KAA0B;QACzE,KAAK,MAAM,CAAC,GAAG,EAAE,UAAU,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;YACrD,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;gBAC5B,GAAG,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;gBACtB,SAAS;aACV;YAED,IAAI,QAAQ,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;YACxB,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,EAAE;gBAC/C,GAAG,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;gBACtB,SAAS;aACV;YAED,+CAA+C;YAC/C,IAAI,GAAG,KAAK,OAAO,IAAI,GAAG,KAAK,MAAM,EAAE;gBACrC,GAAG,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;gBACtB,SAAS;aACV;YAED,mCAAmC;YACnC,IAAI,OAAO,QAAQ,KAAK,QAAQ,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;gBAClE,QAAQ,IAAI,UAAU,CAAC;aACxB;iBAAM,IAAI,OAAO,QAAQ,KAAK,QAAQ,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;gBACzE,QAAQ,IAAI,UAAU,CAAC;aACxB;iBAAM,IAAI,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE;gBACzD,QAAQ,GAAG,IAAI,CAAC,eAAe,CAAC,QAA+B,EAAE,UAAiC,CAAC,CAAC;aACrG;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;gBAC/D,IAAI,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,QAAQ,IAAI,OAAO,CAAC,KAAK,QAAQ,CAAC,EAAE;oBACzE,QAAQ,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC,2CAA2C;oBACzE,SAAS;iBACV;gBAED,KAAK,MAAM,UAAU,IAAI,UAAU,EAAE;oBACnC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE;wBAC3B,MAAM,IAAI,KAAK,CAAC,uDAAuD,UAAU,EAAE,CAAC,CAAC;qBACtF;oBAED,MAAM,KAAK,GAAG,UAAU,CAAC,OAAO,CAAC,CAAC;oBAClC,IAAI,KAAK,IAAI,IAAI,EAAE;wBACjB,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;wBAC1B,MAAM,IAAI,KAAK,CAAC,wDAAwD,CAAC,CAAC;qBAC3E;oBAED,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;wBAC7B,MAAM,IAAI,KAAK,CAAC,wEAAwE,KAAK,EAAE,CAAC,CAAC;qBAClG;oBAED,MAAM,QAAQ,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;oBACjC,IAAI,QAAQ,IAAI,IAAI,EAAE;wBACpB,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;qBAC3B;yBAAM;wBACL,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,eAAe,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;qBAC9D;iBACF;gBACD,SAAS;aACV;iBAAM;gBACL,MAAM,KAAK,CAAC,0BAA0B,GAAG,iBAAiB,UAAU,eAAe,QAAQ,EAAE,CAAC,CAAC;aAChG;YACD,GAAG,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;SACrB;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IA2BS,OAAO,CAAC,GAAQ;QACxB,OAAO,GAAG,CAAC;IACb,CAAC;IAES,KAAK,CAAC,sBAAsB,CACpC,MAAoC,EACpC,MAAe,EACf,OAA6B;QAE7B,OAAO,MAAM,IAAI,CAAC,4BAA4B,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;IAC1E,CAAC;IAES,KAAK,CAAC,mBAAmB,CACjC,QAAgB,EAChB,IAAU,EACV,MAA2B,EAC3B,OAA6B;QAE7B,OAAO,MAAM,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;IAC5E,CAAC;IAES,KAAK,CAAC,uBAAuB,CACrC,QAAgB,EAChB,KAAa,EACb,IAAU,EACV,MAAwC,EACxC,OAA6B;QAE7B,OAAO,MAAM,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;IACvF,CAAC;CACF;AA7rBD,0CA6rBC;+DAlaW,KAA2B;IACnC,IAAI,IAAI,CAAC,KAAK;QAAE,OAAO;IAEvB,uBAAA,IAAI,iCAAiB,KAAK,MAAA,CAAC;IAE3B,uBAAA,IAAI,gEAAa,MAAjB,IAAI,EAAc,KAAK,CAAC,CAAC;IAEzB,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,gBAAgB;YACnB,0BAA0B;YAC1B,MAAM;QAER,KAAK,oBAAoB,CAAC;QAC1B,KAAK,mBAAmB,CAAC;QACzB,KAAK,wBAAwB,CAAC;QAC9B,KAAK,4BAA4B,CAAC;QAClC,KAAK,sBAAsB,CAAC;QAC5B,KAAK,mBAAmB,CAAC;QACzB,KAAK,uBAAuB,CAAC;QAC7B,KAAK,sBAAsB,CAAC;QAC5B,KAAK,oBAAoB;YACvB,uBAAA,IAAI,8DAAW,MAAf,IAAI,EAAY,KAAK,CAAC,CAAC;YACvB,MAAM;QAER,KAAK,yBAAyB,CAAC;QAC/B,KAAK,6BAA6B,CAAC;QACnC,KAAK,uBAAuB,CAAC;QAC7B,KAAK,2BAA2B,CAAC;QACjC,KAAK,wBAAwB,CAAC;QAC9B,KAAK,2BAA2B,CAAC;QACjC,KAAK,yBAAyB;YAC5B,uBAAA,IAAI,kEAAe,MAAnB,IAAI,EAAgB,KAAK,CAAC,CAAC;YAC3B,MAAM;QAER,KAAK,wBAAwB,CAAC;QAC9B,KAAK,4BAA4B,CAAC;QAClC,KAAK,sBAAsB,CAAC;QAC5B,KAAK,0BAA0B,CAAC;QAChC,KAAK,2BAA2B;YAC9B,uBAAA,IAAI,kEAAe,MAAnB,IAAI,EAAgB,KAAK,CAAC,CAAC;YAC3B,MAAM;QAER,KAAK,OAAO;YACV,kHAAkH;YAClH,MAAM,IAAI,KAAK,CACb,qFAAqF,CACtF,CAAC;KACL;AACH,CAAC;IAGC,IAAI,IAAI,CAAC,KAAK,EAAE;QACd,MAAM,IAAI,mBAAW,CAAC,yCAAyC,CAAC,CAAC;KAClE;IAED,IAAI,CAAC,uBAAA,IAAI,iCAAU;QAAE,MAAM,KAAK,CAAC,iCAAiC,CAAC,CAAC;IAEpE,OAAO,uBAAA,IAAI,iCAAU,CAAC;AACxB,CAAC,2EAEqC,KAAyB;IAC7D,MAAM,CAAC,kBAAkB,EAAE,UAAU,CAAC,GAAG,uBAAA,IAAI,sEAAmB,MAAvB,IAAI,EAAoB,KAAK,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;IAC/F,uBAAA,IAAI,oCAAoB,kBAAkB,MAAA,CAAC;IAC3C,uBAAA,IAAI,yCAAkB,CAAC,kBAAkB,CAAC,EAAE,CAAC,GAAG,kBAAkB,CAAC;IAEnE,KAAK,MAAM,OAAO,IAAI,UAAU,EAAE;QAChC,MAAM,eAAe,GAAG,kBAAkB,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAClE,IAAI,eAAe,EAAE,IAAI,IAAI,MAAM,EAAE;YACnC,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE,eAAe,CAAC,IAAI,CAAC,CAAC;SACjD;KACF;IAED,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,wBAAwB;YAC3B,IAAI,CAAC,KAAK,CAAC,gBAAgB,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YACzC,MAAM;QAER,KAAK,4BAA4B;YAC/B,MAAM;QAER,KAAK,sBAAsB;YACzB,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;YAEjE,IAAI,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;gBAC5B,KAAK,MAAM,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;oBAC9C,8CAA8C;oBAC9C,IAAI,OAAO,CAAC,IAAI,IAAI,MAAM,IAAI,OAAO,CAAC,IAAI,EAAE;wBAC1C,IAAI,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;wBAC7B,IAAI,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;wBACzD,IAAI,QAAQ,IAAI,QAAQ,CAAC,IAAI,IAAI,MAAM,EAAE;4BACvC,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,SAAS,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC;yBACnD;6BAAM;4BACL,MAAM,KAAK,CAAC,qEAAqE,CAAC,CAAC;yBACpF;qBACF;oBAED,IAAI,OAAO,CAAC,KAAK,IAAI,uBAAA,IAAI,4CAAqB,EAAE;wBAC9C,oCAAoC;wBACpC,IAAI,uBAAA,IAAI,uCAAgB,EAAE;4BACxB,QAAQ,uBAAA,IAAI,uCAAgB,CAAC,IAAI,EAAE;gCACjC,KAAK,MAAM;oCACT,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE,uBAAA,IAAI,uCAAgB,CAAC,IAAI,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;oCACzE,MAAM;gCACR,KAAK,YAAY;oCACf,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE,uBAAA,IAAI,uCAAgB,CAAC,UAAU,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;oCACpF,MAAM;6BACT;yBACF;wBAED,uBAAA,IAAI,wCAAwB,OAAO,CAAC,KAAK,MAAA,CAAC;qBAC3C;oBAED,uBAAA,IAAI,mCAAmB,kBAAkB,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,MAAA,CAAC;iBAClE;aACF;YAED,MAAM;QAER,KAAK,0BAA0B,CAAC;QAChC,KAAK,2BAA2B;YAC9B,oFAAoF;YACpF,IAAI,uBAAA,IAAI,4CAAqB,KAAK,SAAS,EAAE;gBAC3C,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,4CAAqB,CAAC,CAAC;gBACrE,IAAI,cAAc,EAAE;oBAClB,QAAQ,cAAc,CAAC,IAAI,EAAE;wBAC3B,KAAK,YAAY;4BACf,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE,cAAc,CAAC,UAAU,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;4BAC9E,MAAM;wBACR,KAAK,MAAM;4BACT,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE,cAAc,CAAC,IAAI,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;4BACnE,MAAM;qBACT;iBACF;aACF;YAED,IAAI,uBAAA,IAAI,wCAAiB,EAAE;gBACzB,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;aACvC;YAED,uBAAA,IAAI,oCAAoB,SAAS,MAAA,CAAC;KACrC;AACH,CAAC,2EAEqC,KAAyB;IAC7D,MAAM,kBAAkB,GAAG,uBAAA,IAAI,sEAAmB,MAAvB,IAAI,EAAoB,KAAK,CAAC,CAAC;IAC1D,uBAAA,IAAI,2CAA2B,kBAAkB,MAAA,CAAC;IAElD,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,yBAAyB;YAC5B,IAAI,CAAC,KAAK,CAAC,gBAAgB,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YACzC,MAAM;QACR,KAAK,uBAAuB;YAC1B,MAAM,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;YAC/B,IACE,KAAK,CAAC,YAAY;gBAClB,KAAK,CAAC,YAAY,CAAC,IAAI,IAAI,YAAY;gBACvC,KAAK,CAAC,YAAY,CAAC,UAAU;gBAC7B,kBAAkB,CAAC,YAAY,CAAC,IAAI,IAAI,YAAY,EACpD;gBACA,KAAK,MAAM,QAAQ,IAAI,KAAK,CAAC,YAAY,CAAC,UAAU,EAAE;oBACpD,IAAI,QAAQ,CAAC,KAAK,IAAI,uBAAA,IAAI,6CAAsB,EAAE;wBAChD,IAAI,CAAC,KAAK,CACR,eAAe,EACf,QAAQ,EACR,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAa,CACvE,CAAC;qBACH;yBAAM;wBACL,IAAI,uBAAA,IAAI,wCAAiB,EAAE;4BACzB,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;yBACnD;wBAED,uBAAA,IAAI,yCAAyB,QAAQ,CAAC,KAAK,MAAA,CAAC;wBAC5C,uBAAA,IAAI,oCAAoB,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAA,CAAC;wBACnF,IAAI,uBAAA,IAAI,wCAAiB;4BAAE,IAAI,CAAC,KAAK,CAAC,iBAAiB,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;qBACjF;iBACF;aACF;YAED,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;YACjE,MAAM;QACR,KAAK,2BAA2B,CAAC;QACjC,KAAK,wBAAwB,CAAC;QAC9B,KAAK,2BAA2B,CAAC;QACjC,KAAK,yBAAyB;YAC5B,uBAAA,IAAI,2CAA2B,SAAS,MAAA,CAAC;YACzC,MAAM,OAAO,GAAG,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC;YACxC,IAAI,OAAO,CAAC,IAAI,IAAI,YAAY,EAAE;gBAChC,IAAI,uBAAA,IAAI,wCAAiB,EAAE;oBACzB,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,uBAAA,IAAI,wCAA6B,CAAC,CAAC;oBAC9D,uBAAA,IAAI,oCAAoB,SAAS,MAAA,CAAC;iBACnC;aACF;YACD,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE,KAAK,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC;YAC1D,MAAM;QACR,KAAK,6BAA6B;YAChC,MAAM;KACT;AACH,CAAC,uEAEmC,KAA2B;IAC7D,uBAAA,IAAI,+BAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACzB,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AAC7B,CAAC,mFAEkB,KAAyB;IAC1C,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,yBAAyB;YAC5B,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC;YACnD,OAAO,KAAK,CAAC,IAAI,CAAC;QAEpB,KAAK,uBAAuB;YAC1B,IAAI,QAAQ,GAAG,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAiB,CAAC;YACrE,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,KAAK,CAAC,uDAAuD,CAAC,CAAC;aACtE;YAED,IAAI,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;YAEtB,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,WAAW,GAAG,eAAe,CAAC,eAAe,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAiB,CAAC;gBAC1F,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC;aACrD;YAED,OAAO,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAiB,CAAC;QAE/D,KAAK,2BAA2B,CAAC;QACjC,KAAK,wBAAwB,CAAC;QAC9B,KAAK,2BAA2B,CAAC;QACjC,KAAK,yBAAyB,CAAC;QAC/B,KAAK,6BAA6B;YAChC,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC;YACnD,MAAM;KACT;IAED,IAAI,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;QAAE,OAAO,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAiB,CAAC;IACxG,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC,CAAC;AAC3C,CAAC,mFAGC,KAA2B,EAC3B,QAA6B;IAE7B,IAAI,UAAU,GAA0B,EAAE,CAAC;IAE3C,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,wBAAwB;YAC3B,sDAAsD;YACtD,OAAO,CAAC,KAAK,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAElC,KAAK,sBAAsB;YACzB,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,KAAK,CACT,wFAAwF,CACzF,CAAC;aACH;YAED,IAAI,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;YAEtB,yDAAyD;YACzD,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;gBACtB,KAAK,MAAM,cAAc,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;oBAC/C,IAAI,cAAc,CAAC,KAAK,IAAI,QAAQ,CAAC,OAAO,EAAE;wBAC5C,IAAI,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC;wBAC5D,QAAQ,CAAC,OAAO,CAAC,cAAc,CAAC,KAAK,CAAC,GAAG,uBAAA,IAAI,sEAAmB,MAAvB,IAAI,EAC3C,cAAc,EACd,cAAc,CACf,CAAC;qBACH;yBAAM;wBACL,QAAQ,CAAC,OAAO,CAAC,cAAc,CAAC,KAAK,CAAC,GAAG,cAAgC,CAAC;wBAC1E,wBAAwB;wBACxB,UAAU,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;qBACjC;iBACF;aACF;YAED,OAAO,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;QAEhC,KAAK,4BAA4B,CAAC;QAClC,KAAK,0BAA0B,CAAC;QAChC,KAAK,2BAA2B;YAC9B,mCAAmC;YACnC,IAAI,QAAQ,EAAE;gBACZ,OAAO,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;aAC/B;iBAAM;gBACL,MAAM,KAAK,CAAC,yDAAyD,CAAC,CAAC;aACxE;KACJ;IACD,MAAM,KAAK,CAAC,yCAAyC,CAAC,CAAC;AACzD,CAAC,mFAGC,cAAmC,EACnC,cAA0C;IAE1C,OAAO,eAAe,CAAC,eAAe,CAAC,cAA6C,EAAE,cAAc,CAE3E,CAAC;AAC5B,CAAC,mEAkEiC,KAAqB;IACrD,uBAAA,IAAI,uCAAuB,KAAK,CAAC,IAAI,MAAA,CAAC;IACtC,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,oBAAoB;YACvB,MAAM;QACR,KAAK,mBAAmB;YACtB,MAAM;QACR,KAAK,wBAAwB;YAC3B,MAAM;QACR,KAAK,4BAA4B,CAAC;QAClC,KAAK,sBAAsB,CAAC;QAC5B,KAAK,mBAAmB,CAAC;QACzB,KAAK,sBAAsB,CAAC;QAC5B,KAAK,oBAAoB;YACvB,uBAAA,IAAI,6BAAa,KAAK,CAAC,IAAI,MAAA,CAAC;YAC5B,IAAI,uBAAA,IAAI,wCAAiB,EAAE;gBACzB,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;gBAClD,uBAAA,IAAI,oCAAoB,SAAS,MAAA,CAAC;aACnC;YACD,MAAM;QACR,KAAK,uBAAuB;YAC1B,MAAM;KACT;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/AssistantStream.mjs b/node_modules/openai/lib/AssistantStream.mjs new file mode 100644 index 0000000..1fac9e5 --- /dev/null +++ b/node_modules/openai/lib/AssistantStream.mjs @@ -0,0 +1,554 @@ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _AssistantStream_instances, _AssistantStream_events, _AssistantStream_runStepSnapshots, _AssistantStream_messageSnapshots, _AssistantStream_messageSnapshot, _AssistantStream_finalRun, _AssistantStream_currentContentIndex, _AssistantStream_currentContent, _AssistantStream_currentToolCallIndex, _AssistantStream_currentToolCall, _AssistantStream_currentEvent, _AssistantStream_currentRunSnapshot, _AssistantStream_currentRunStepSnapshot, _AssistantStream_addEvent, _AssistantStream_endRequest, _AssistantStream_handleMessage, _AssistantStream_handleRunStep, _AssistantStream_handleEvent, _AssistantStream_accumulateRunStep, _AssistantStream_accumulateMessage, _AssistantStream_accumulateContent, _AssistantStream_handleRun; +import * as Core from "../core.mjs"; +import { Stream } from "../streaming.mjs"; +import { APIUserAbortError, OpenAIError } from "../error.mjs"; +import { EventStream } from "./EventStream.mjs"; +export class AssistantStream extends EventStream { + constructor() { + super(...arguments); + _AssistantStream_instances.add(this); + //Track all events in a single list for reference + _AssistantStream_events.set(this, []); + //Used to accumulate deltas + //We are accumulating many types so the value here is not strict + _AssistantStream_runStepSnapshots.set(this, {}); + _AssistantStream_messageSnapshots.set(this, {}); + _AssistantStream_messageSnapshot.set(this, void 0); + _AssistantStream_finalRun.set(this, void 0); + _AssistantStream_currentContentIndex.set(this, void 0); + _AssistantStream_currentContent.set(this, void 0); + _AssistantStream_currentToolCallIndex.set(this, void 0); + _AssistantStream_currentToolCall.set(this, void 0); + //For current snapshot methods + _AssistantStream_currentEvent.set(this, void 0); + _AssistantStream_currentRunSnapshot.set(this, void 0); + _AssistantStream_currentRunStepSnapshot.set(this, void 0); + } + [(_AssistantStream_events = new WeakMap(), _AssistantStream_runStepSnapshots = new WeakMap(), _AssistantStream_messageSnapshots = new WeakMap(), _AssistantStream_messageSnapshot = new WeakMap(), _AssistantStream_finalRun = new WeakMap(), _AssistantStream_currentContentIndex = new WeakMap(), _AssistantStream_currentContent = new WeakMap(), _AssistantStream_currentToolCallIndex = new WeakMap(), _AssistantStream_currentToolCall = new WeakMap(), _AssistantStream_currentEvent = new WeakMap(), _AssistantStream_currentRunSnapshot = new WeakMap(), _AssistantStream_currentRunStepSnapshot = new WeakMap(), _AssistantStream_instances = new WeakSet(), Symbol.asyncIterator)]() { + const pushQueue = []; + const readQueue = []; + let done = false; + //Catch all for passing along all events + this.on('event', (event) => { + const reader = readQueue.shift(); + if (reader) { + reader.resolve(event); + } + else { + pushQueue.push(event); + } + }); + this.on('end', () => { + done = true; + for (const reader of readQueue) { + reader.resolve(undefined); + } + readQueue.length = 0; + }); + this.on('abort', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + this.on('error', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + return { + next: async () => { + if (!pushQueue.length) { + if (done) { + return { value: undefined, done: true }; + } + return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true })); + } + const chunk = pushQueue.shift(); + return { value: chunk, done: false }; + }, + return: async () => { + this.abort(); + return { value: undefined, done: true }; + }, + }; + } + static fromReadableStream(stream) { + const runner = new AssistantStream(); + runner._run(() => runner._fromReadableStream(stream)); + return runner; + } + async _fromReadableStream(readableStream, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + this._connected(); + const stream = Stream.fromReadableStream(readableStream, this.controller); + for await (const event of stream) { + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this)); + } + toReadableStream() { + const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller); + return stream.toReadableStream(); + } + static createToolAssistantStream(threadId, runId, runs, params, options) { + const runner = new AssistantStream(); + runner._run(() => runner._runToolAssistantStream(threadId, runId, runs, params, { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' }, + })); + return runner; + } + async _createToolAssistantStream(run, threadId, runId, params, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + const body = { ...params, stream: true }; + const stream = await run.submitToolOutputs(threadId, runId, body, { + ...options, + signal: this.controller.signal, + }); + this._connected(); + for await (const event of stream) { + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this)); + } + static createThreadAssistantStream(params, thread, options) { + const runner = new AssistantStream(); + runner._run(() => runner._threadAssistantStream(params, thread, { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' }, + })); + return runner; + } + static createAssistantStream(threadId, runs, params, options) { + const runner = new AssistantStream(); + runner._run(() => runner._runAssistantStream(threadId, runs, params, { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' }, + })); + return runner; + } + currentEvent() { + return __classPrivateFieldGet(this, _AssistantStream_currentEvent, "f"); + } + currentRun() { + return __classPrivateFieldGet(this, _AssistantStream_currentRunSnapshot, "f"); + } + currentMessageSnapshot() { + return __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f"); + } + currentRunStepSnapshot() { + return __classPrivateFieldGet(this, _AssistantStream_currentRunStepSnapshot, "f"); + } + async finalRunSteps() { + await this.done(); + return Object.values(__classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")); + } + async finalMessages() { + await this.done(); + return Object.values(__classPrivateFieldGet(this, _AssistantStream_messageSnapshots, "f")); + } + async finalRun() { + await this.done(); + if (!__classPrivateFieldGet(this, _AssistantStream_finalRun, "f")) + throw Error('Final run was not received.'); + return __classPrivateFieldGet(this, _AssistantStream_finalRun, "f"); + } + async _createThreadAssistantStream(thread, params, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + const body = { ...params, stream: true }; + const stream = await thread.createAndRun(body, { ...options, signal: this.controller.signal }); + this._connected(); + for await (const event of stream) { + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this)); + } + async _createAssistantStream(run, threadId, params, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + const body = { ...params, stream: true }; + const stream = await run.create(threadId, body, { ...options, signal: this.controller.signal }); + this._connected(); + for await (const event of stream) { + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this)); + } + static accumulateDelta(acc, delta) { + for (const [key, deltaValue] of Object.entries(delta)) { + if (!acc.hasOwnProperty(key)) { + acc[key] = deltaValue; + continue; + } + let accValue = acc[key]; + if (accValue === null || accValue === undefined) { + acc[key] = deltaValue; + continue; + } + // We don't accumulate these special properties + if (key === 'index' || key === 'type') { + acc[key] = deltaValue; + continue; + } + // Type-specific accumulation logic + if (typeof accValue === 'string' && typeof deltaValue === 'string') { + accValue += deltaValue; + } + else if (typeof accValue === 'number' && typeof deltaValue === 'number') { + accValue += deltaValue; + } + else if (Core.isObj(accValue) && Core.isObj(deltaValue)) { + accValue = this.accumulateDelta(accValue, deltaValue); + } + else if (Array.isArray(accValue) && Array.isArray(deltaValue)) { + if (accValue.every((x) => typeof x === 'string' || typeof x === 'number')) { + accValue.push(...deltaValue); // Use spread syntax for efficient addition + continue; + } + for (const deltaEntry of deltaValue) { + if (!Core.isObj(deltaEntry)) { + throw new Error(`Expected array delta entry to be an object but got: ${deltaEntry}`); + } + const index = deltaEntry['index']; + if (index == null) { + console.error(deltaEntry); + throw new Error('Expected array delta entry to have an `index` property'); + } + if (typeof index !== 'number') { + throw new Error(`Expected array delta entry \`index\` property to be a number but got ${index}`); + } + const accEntry = accValue[index]; + if (accEntry == null) { + accValue.push(deltaEntry); + } + else { + accValue[index] = this.accumulateDelta(accEntry, deltaEntry); + } + } + continue; + } + else { + throw Error(`Unhandled record type: ${key}, deltaValue: ${deltaValue}, accValue: ${accValue}`); + } + acc[key] = accValue; + } + return acc; + } + _addRun(run) { + return run; + } + async _threadAssistantStream(params, thread, options) { + return await this._createThreadAssistantStream(thread, params, options); + } + async _runAssistantStream(threadId, runs, params, options) { + return await this._createAssistantStream(runs, threadId, params, options); + } + async _runToolAssistantStream(threadId, runId, runs, params, options) { + return await this._createToolAssistantStream(runs, threadId, runId, params, options); + } +} +_AssistantStream_addEvent = function _AssistantStream_addEvent(event) { + if (this.ended) + return; + __classPrivateFieldSet(this, _AssistantStream_currentEvent, event, "f"); + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleEvent).call(this, event); + switch (event.event) { + case 'thread.created': + //No action on this event. + break; + case 'thread.run.created': + case 'thread.run.queued': + case 'thread.run.in_progress': + case 'thread.run.requires_action': + case 'thread.run.completed': + case 'thread.run.failed': + case 'thread.run.cancelling': + case 'thread.run.cancelled': + case 'thread.run.expired': + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleRun).call(this, event); + break; + case 'thread.run.step.created': + case 'thread.run.step.in_progress': + case 'thread.run.step.delta': + case 'thread.run.step.completed': + case 'thread.run.step.failed': + case 'thread.run.step.cancelled': + case 'thread.run.step.expired': + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleRunStep).call(this, event); + break; + case 'thread.message.created': + case 'thread.message.in_progress': + case 'thread.message.delta': + case 'thread.message.completed': + case 'thread.message.incomplete': + __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleMessage).call(this, event); + break; + case 'error': + //This is included for completeness, but errors are processed in the SSE event processing so this should not occur + throw new Error('Encountered an error event in event processing - errors should be processed earlier'); + } +}, _AssistantStream_endRequest = function _AssistantStream_endRequest() { + if (this.ended) { + throw new OpenAIError(`stream has ended, this shouldn't happen`); + } + if (!__classPrivateFieldGet(this, _AssistantStream_finalRun, "f")) + throw Error('Final run has not been received'); + return __classPrivateFieldGet(this, _AssistantStream_finalRun, "f"); +}, _AssistantStream_handleMessage = function _AssistantStream_handleMessage(event) { + const [accumulatedMessage, newContent] = __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_accumulateMessage).call(this, event, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + __classPrivateFieldSet(this, _AssistantStream_messageSnapshot, accumulatedMessage, "f"); + __classPrivateFieldGet(this, _AssistantStream_messageSnapshots, "f")[accumulatedMessage.id] = accumulatedMessage; + for (const content of newContent) { + const snapshotContent = accumulatedMessage.content[content.index]; + if (snapshotContent?.type == 'text') { + this._emit('textCreated', snapshotContent.text); + } + } + switch (event.event) { + case 'thread.message.created': + this._emit('messageCreated', event.data); + break; + case 'thread.message.in_progress': + break; + case 'thread.message.delta': + this._emit('messageDelta', event.data.delta, accumulatedMessage); + if (event.data.delta.content) { + for (const content of event.data.delta.content) { + //If it is text delta, emit a text delta event + if (content.type == 'text' && content.text) { + let textDelta = content.text; + let snapshot = accumulatedMessage.content[content.index]; + if (snapshot && snapshot.type == 'text') { + this._emit('textDelta', textDelta, snapshot.text); + } + else { + throw Error('The snapshot associated with this text delta is not text or missing'); + } + } + if (content.index != __classPrivateFieldGet(this, _AssistantStream_currentContentIndex, "f")) { + //See if we have in progress content + if (__classPrivateFieldGet(this, _AssistantStream_currentContent, "f")) { + switch (__classPrivateFieldGet(this, _AssistantStream_currentContent, "f").type) { + case 'text': + this._emit('textDone', __classPrivateFieldGet(this, _AssistantStream_currentContent, "f").text, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + break; + case 'image_file': + this._emit('imageFileDone', __classPrivateFieldGet(this, _AssistantStream_currentContent, "f").image_file, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + break; + } + } + __classPrivateFieldSet(this, _AssistantStream_currentContentIndex, content.index, "f"); + } + __classPrivateFieldSet(this, _AssistantStream_currentContent, accumulatedMessage.content[content.index], "f"); + } + } + break; + case 'thread.message.completed': + case 'thread.message.incomplete': + //We emit the latest content we were working on on completion (including incomplete) + if (__classPrivateFieldGet(this, _AssistantStream_currentContentIndex, "f") !== undefined) { + const currentContent = event.data.content[__classPrivateFieldGet(this, _AssistantStream_currentContentIndex, "f")]; + if (currentContent) { + switch (currentContent.type) { + case 'image_file': + this._emit('imageFileDone', currentContent.image_file, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + break; + case 'text': + this._emit('textDone', currentContent.text, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")); + break; + } + } + } + if (__classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")) { + this._emit('messageDone', event.data); + } + __classPrivateFieldSet(this, _AssistantStream_messageSnapshot, undefined, "f"); + } +}, _AssistantStream_handleRunStep = function _AssistantStream_handleRunStep(event) { + const accumulatedRunStep = __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_accumulateRunStep).call(this, event); + __classPrivateFieldSet(this, _AssistantStream_currentRunStepSnapshot, accumulatedRunStep, "f"); + switch (event.event) { + case 'thread.run.step.created': + this._emit('runStepCreated', event.data); + break; + case 'thread.run.step.delta': + const delta = event.data.delta; + if (delta.step_details && + delta.step_details.type == 'tool_calls' && + delta.step_details.tool_calls && + accumulatedRunStep.step_details.type == 'tool_calls') { + for (const toolCall of delta.step_details.tool_calls) { + if (toolCall.index == __classPrivateFieldGet(this, _AssistantStream_currentToolCallIndex, "f")) { + this._emit('toolCallDelta', toolCall, accumulatedRunStep.step_details.tool_calls[toolCall.index]); + } + else { + if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) { + this._emit('toolCallDone', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")); + } + __classPrivateFieldSet(this, _AssistantStream_currentToolCallIndex, toolCall.index, "f"); + __classPrivateFieldSet(this, _AssistantStream_currentToolCall, accumulatedRunStep.step_details.tool_calls[toolCall.index], "f"); + if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) + this._emit('toolCallCreated', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")); + } + } + } + this._emit('runStepDelta', event.data.delta, accumulatedRunStep); + break; + case 'thread.run.step.completed': + case 'thread.run.step.failed': + case 'thread.run.step.cancelled': + case 'thread.run.step.expired': + __classPrivateFieldSet(this, _AssistantStream_currentRunStepSnapshot, undefined, "f"); + const details = event.data.step_details; + if (details.type == 'tool_calls') { + if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) { + this._emit('toolCallDone', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")); + __classPrivateFieldSet(this, _AssistantStream_currentToolCall, undefined, "f"); + } + } + this._emit('runStepDone', event.data, accumulatedRunStep); + break; + case 'thread.run.step.in_progress': + break; + } +}, _AssistantStream_handleEvent = function _AssistantStream_handleEvent(event) { + __classPrivateFieldGet(this, _AssistantStream_events, "f").push(event); + this._emit('event', event); +}, _AssistantStream_accumulateRunStep = function _AssistantStream_accumulateRunStep(event) { + switch (event.event) { + case 'thread.run.step.created': + __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = event.data; + return event.data; + case 'thread.run.step.delta': + let snapshot = __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id]; + if (!snapshot) { + throw Error('Received a RunStepDelta before creation of a snapshot'); + } + let data = event.data; + if (data.delta) { + const accumulated = AssistantStream.accumulateDelta(snapshot, data.delta); + __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = accumulated; + } + return __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id]; + case 'thread.run.step.completed': + case 'thread.run.step.failed': + case 'thread.run.step.cancelled': + case 'thread.run.step.expired': + case 'thread.run.step.in_progress': + __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = event.data; + break; + } + if (__classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id]) + return __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id]; + throw new Error('No snapshot available'); +}, _AssistantStream_accumulateMessage = function _AssistantStream_accumulateMessage(event, snapshot) { + let newContent = []; + switch (event.event) { + case 'thread.message.created': + //On creation the snapshot is just the initial message + return [event.data, newContent]; + case 'thread.message.delta': + if (!snapshot) { + throw Error('Received a delta with no existing snapshot (there should be one from message creation)'); + } + let data = event.data; + //If this delta does not have content, nothing to process + if (data.delta.content) { + for (const contentElement of data.delta.content) { + if (contentElement.index in snapshot.content) { + let currentContent = snapshot.content[contentElement.index]; + snapshot.content[contentElement.index] = __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_accumulateContent).call(this, contentElement, currentContent); + } + else { + snapshot.content[contentElement.index] = contentElement; + // This is a new element + newContent.push(contentElement); + } + } + } + return [snapshot, newContent]; + case 'thread.message.in_progress': + case 'thread.message.completed': + case 'thread.message.incomplete': + //No changes on other thread events + if (snapshot) { + return [snapshot, newContent]; + } + else { + throw Error('Received thread message event with no existing snapshot'); + } + } + throw Error('Tried to accumulate a non-message event'); +}, _AssistantStream_accumulateContent = function _AssistantStream_accumulateContent(contentElement, currentContent) { + return AssistantStream.accumulateDelta(currentContent, contentElement); +}, _AssistantStream_handleRun = function _AssistantStream_handleRun(event) { + __classPrivateFieldSet(this, _AssistantStream_currentRunSnapshot, event.data, "f"); + switch (event.event) { + case 'thread.run.created': + break; + case 'thread.run.queued': + break; + case 'thread.run.in_progress': + break; + case 'thread.run.requires_action': + case 'thread.run.cancelled': + case 'thread.run.failed': + case 'thread.run.completed': + case 'thread.run.expired': + __classPrivateFieldSet(this, _AssistantStream_finalRun, event.data, "f"); + if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) { + this._emit('toolCallDone', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")); + __classPrivateFieldSet(this, _AssistantStream_currentToolCall, undefined, "f"); + } + break; + case 'thread.run.cancelling': + break; + } +}; +//# sourceMappingURL=AssistantStream.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/AssistantStream.mjs.map b/node_modules/openai/lib/AssistantStream.mjs.map new file mode 100644 index 0000000..990bf73 --- /dev/null +++ b/node_modules/openai/lib/AssistantStream.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"AssistantStream.mjs","sourceRoot":"","sources":["../src/lib/AssistantStream.ts"],"names":[],"mappings":";;;;;;;;;;;;OAWO,KAAK,IAAI;OAWT,EAAE,MAAM,EAAE;OACV,EAAE,iBAAiB,EAAE,WAAW,EAAE;OASlC,EAAc,WAAW,EAAE;AAwClC,MAAM,OAAO,eACX,SAAQ,WAAkC;IAD5C;;;QAIE,iDAAiD;QACjD,kCAAkC,EAAE,EAAC;QAErC,2BAA2B;QAC3B,gEAAgE;QAChE,4CAAoD,EAAE,EAAC;QACvD,4CAA+C,EAAE,EAAC;QAClD,mDAAsC;QACtC,4CAA2B;QAC3B,uDAAyC;QACzC,kDAA4C;QAC5C,wDAA0C;QAC1C,mDAAuC;QAEvC,8BAA8B;QAC9B,gDAAgD;QAChD,sDAAqC;QACrC,0DAAkD;IAwqBpD,CAAC;IAtqBC,uoBAAC,MAAM,CAAC,aAAa,EAAC;QACpB,MAAM,SAAS,GAA2B,EAAE,CAAC;QAC7C,MAAM,SAAS,GAGT,EAAE,CAAC;QACT,IAAI,IAAI,GAAG,KAAK,CAAC;QAEjB,wCAAwC;QACxC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;YACzB,MAAM,MAAM,GAAG,SAAS,CAAC,KAAK,EAAE,CAAC;YACjC,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;aACvB;iBAAM;gBACL,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACvB;QACH,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YAClB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;aAC3B;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACpB;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACpB;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,OAAO;YACL,IAAI,EAAE,KAAK,IAAmD,EAAE;gBAC9D,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE;oBACrB,IAAI,IAAI,EAAE;wBACR,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;qBACzC;oBACD,OAAO,IAAI,OAAO,CAAmC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,CACvE,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CACpC,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;iBAC/F;gBACD,MAAM,KAAK,GAAG,SAAS,CAAC,KAAK,EAAG,CAAC;gBACjC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;YACvC,CAAC;YACD,MAAM,EAAE,KAAK,IAAI,EAAE;gBACjB,IAAI,CAAC,KAAK,EAAE,CAAC;gBACb,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;YAC1C,CAAC;SACF,CAAC;IACJ,CAAC;IAED,MAAM,CAAC,kBAAkB,CAAC,MAAsB;QAC9C,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC;QACtD,OAAO,MAAM,CAAC;IAChB,CAAC;IAES,KAAK,CAAC,mBAAmB,CACjC,cAA8B,EAC9B,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,MAAM,MAAM,GAAG,MAAM,CAAC,kBAAkB,CAAuB,cAAc,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;QAChG,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,6DAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,iBAAiB,EAAE,CAAC;SAC/B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,+DAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IAC1C,CAAC;IAED,gBAAgB;QACd,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC,gBAAgB,EAAE,CAAC;IACnC,CAAC;IAED,MAAM,CAAC,yBAAyB,CAC9B,QAAgB,EAChB,KAAa,EACb,IAAU,EACV,MAAwC,EACxC,OAAmC;QAEnC,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CACf,MAAM,CAAC,uBAAuB,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE;YAC5D,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,QAAQ,EAAE;SACxE,CAAC,CACH,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC;IAES,KAAK,CAAC,0BAA0B,CACxC,GAAS,EACT,QAAgB,EAChB,KAAa,EACb,MAAwC,EACxC,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QAED,MAAM,IAAI,GAAwC,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;QAC9E,MAAM,MAAM,GAAG,MAAM,GAAG,CAAC,iBAAiB,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE;YAChE,GAAG,OAAO;YACV,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM;SAC/B,CAAC,CAAC;QAEH,IAAI,CAAC,UAAU,EAAE,CAAC;QAElB,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,6DAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,iBAAiB,EAAE,CAAC;SAC/B;QAED,OAAO,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,+DAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,CAAC,2BAA2B,CAChC,MAA0C,EAC1C,MAAe,EACf,OAAwB;QAExB,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CACf,MAAM,CAAC,sBAAsB,CAAC,MAAM,EAAE,MAAM,EAAE;YAC5C,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,QAAQ,EAAE;SACxE,CAAC,CACH,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM,CAAC,qBAAqB,CAC1B,QAAgB,EAChB,IAAU,EACV,MAAiC,EACjC,OAAwB;QAExB,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CACf,MAAM,CAAC,mBAAmB,CAAC,QAAQ,EAAE,IAAI,EAAE,MAAM,EAAE;YACjD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,QAAQ,EAAE;SACxE,CAAC,CACH,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,YAAY;QACV,OAAO,uBAAA,IAAI,qCAAc,CAAC;IAC5B,CAAC;IAED,UAAU;QACR,OAAO,uBAAA,IAAI,2CAAoB,CAAC;IAClC,CAAC;IAED,sBAAsB;QACpB,OAAO,uBAAA,IAAI,wCAAiB,CAAC;IAC/B,CAAC;IAED,sBAAsB;QACpB,OAAO,uBAAA,IAAI,+CAAwB,CAAC;IACtC,CAAC;IAED,KAAK,CAAC,aAAa;QACjB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAElB,OAAO,MAAM,CAAC,MAAM,CAAC,uBAAA,IAAI,yCAAkB,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,CAAC,aAAa;QACjB,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAElB,OAAO,MAAM,CAAC,MAAM,CAAC,uBAAA,IAAI,yCAAkB,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,CAAC,QAAQ;QACZ,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;QAClB,IAAI,CAAC,uBAAA,IAAI,iCAAU;YAAE,MAAM,KAAK,CAAC,6BAA6B,CAAC,CAAC;QAEhE,OAAO,uBAAA,IAAI,iCAAU,CAAC;IACxB,CAAC;IAES,KAAK,CAAC,4BAA4B,CAC1C,MAAe,EACf,MAAoC,EACpC,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QAED,MAAM,IAAI,GAA6B,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;QACnE,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC;QAE/F,IAAI,CAAC,UAAU,EAAE,CAAC;QAElB,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,6DAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,iBAAiB,EAAE,CAAC;SAC/B;QAED,OAAO,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,+DAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IAC1C,CAAC;IAES,KAAK,CAAC,sBAAsB,CACpC,GAAS,EACT,QAAgB,EAChB,MAA2B,EAC3B,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QAED,MAAM,IAAI,GAA6B,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;QACnE,MAAM,MAAM,GAAG,MAAM,GAAG,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,EAAE,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC;QAEhG,IAAI,CAAC,UAAU,EAAE,CAAC;QAElB,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,6DAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,iBAAiB,EAAE,CAAC;SAC/B;QAED,OAAO,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,+DAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IAC1C,CAAC;IA6SD,MAAM,CAAC,eAAe,CAAC,GAAwB,EAAE,KAA0B;QACzE,KAAK,MAAM,CAAC,GAAG,EAAE,UAAU,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;YACrD,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;gBAC5B,GAAG,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;gBACtB,SAAS;aACV;YAED,IAAI,QAAQ,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;YACxB,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,EAAE;gBAC/C,GAAG,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;gBACtB,SAAS;aACV;YAED,+CAA+C;YAC/C,IAAI,GAAG,KAAK,OAAO,IAAI,GAAG,KAAK,MAAM,EAAE;gBACrC,GAAG,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;gBACtB,SAAS;aACV;YAED,mCAAmC;YACnC,IAAI,OAAO,QAAQ,KAAK,QAAQ,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;gBAClE,QAAQ,IAAI,UAAU,CAAC;aACxB;iBAAM,IAAI,OAAO,QAAQ,KAAK,QAAQ,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;gBACzE,QAAQ,IAAI,UAAU,CAAC;aACxB;iBAAM,IAAI,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE;gBACzD,QAAQ,GAAG,IAAI,CAAC,eAAe,CAAC,QAA+B,EAAE,UAAiC,CAAC,CAAC;aACrG;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;gBAC/D,IAAI,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,QAAQ,IAAI,OAAO,CAAC,KAAK,QAAQ,CAAC,EAAE;oBACzE,QAAQ,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC,2CAA2C;oBACzE,SAAS;iBACV;gBAED,KAAK,MAAM,UAAU,IAAI,UAAU,EAAE;oBACnC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE;wBAC3B,MAAM,IAAI,KAAK,CAAC,uDAAuD,UAAU,EAAE,CAAC,CAAC;qBACtF;oBAED,MAAM,KAAK,GAAG,UAAU,CAAC,OAAO,CAAC,CAAC;oBAClC,IAAI,KAAK,IAAI,IAAI,EAAE;wBACjB,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;wBAC1B,MAAM,IAAI,KAAK,CAAC,wDAAwD,CAAC,CAAC;qBAC3E;oBAED,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;wBAC7B,MAAM,IAAI,KAAK,CAAC,wEAAwE,KAAK,EAAE,CAAC,CAAC;qBAClG;oBAED,MAAM,QAAQ,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;oBACjC,IAAI,QAAQ,IAAI,IAAI,EAAE;wBACpB,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;qBAC3B;yBAAM;wBACL,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,eAAe,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;qBAC9D;iBACF;gBACD,SAAS;aACV;iBAAM;gBACL,MAAM,KAAK,CAAC,0BAA0B,GAAG,iBAAiB,UAAU,eAAe,QAAQ,EAAE,CAAC,CAAC;aAChG;YACD,GAAG,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;SACrB;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IA2BS,OAAO,CAAC,GAAQ;QACxB,OAAO,GAAG,CAAC;IACb,CAAC;IAES,KAAK,CAAC,sBAAsB,CACpC,MAAoC,EACpC,MAAe,EACf,OAA6B;QAE7B,OAAO,MAAM,IAAI,CAAC,4BAA4B,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;IAC1E,CAAC;IAES,KAAK,CAAC,mBAAmB,CACjC,QAAgB,EAChB,IAAU,EACV,MAA2B,EAC3B,OAA6B;QAE7B,OAAO,MAAM,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;IAC5E,CAAC;IAES,KAAK,CAAC,uBAAuB,CACrC,QAAgB,EAChB,KAAa,EACb,IAAU,EACV,MAAwC,EACxC,OAA6B;QAE7B,OAAO,MAAM,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;IACvF,CAAC;CACF;+DAlaW,KAA2B;IACnC,IAAI,IAAI,CAAC,KAAK;QAAE,OAAO;IAEvB,uBAAA,IAAI,iCAAiB,KAAK,MAAA,CAAC;IAE3B,uBAAA,IAAI,gEAAa,MAAjB,IAAI,EAAc,KAAK,CAAC,CAAC;IAEzB,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,gBAAgB;YACnB,0BAA0B;YAC1B,MAAM;QAER,KAAK,oBAAoB,CAAC;QAC1B,KAAK,mBAAmB,CAAC;QACzB,KAAK,wBAAwB,CAAC;QAC9B,KAAK,4BAA4B,CAAC;QAClC,KAAK,sBAAsB,CAAC;QAC5B,KAAK,mBAAmB,CAAC;QACzB,KAAK,uBAAuB,CAAC;QAC7B,KAAK,sBAAsB,CAAC;QAC5B,KAAK,oBAAoB;YACvB,uBAAA,IAAI,8DAAW,MAAf,IAAI,EAAY,KAAK,CAAC,CAAC;YACvB,MAAM;QAER,KAAK,yBAAyB,CAAC;QAC/B,KAAK,6BAA6B,CAAC;QACnC,KAAK,uBAAuB,CAAC;QAC7B,KAAK,2BAA2B,CAAC;QACjC,KAAK,wBAAwB,CAAC;QAC9B,KAAK,2BAA2B,CAAC;QACjC,KAAK,yBAAyB;YAC5B,uBAAA,IAAI,kEAAe,MAAnB,IAAI,EAAgB,KAAK,CAAC,CAAC;YAC3B,MAAM;QAER,KAAK,wBAAwB,CAAC;QAC9B,KAAK,4BAA4B,CAAC;QAClC,KAAK,sBAAsB,CAAC;QAC5B,KAAK,0BAA0B,CAAC;QAChC,KAAK,2BAA2B;YAC9B,uBAAA,IAAI,kEAAe,MAAnB,IAAI,EAAgB,KAAK,CAAC,CAAC;YAC3B,MAAM;QAER,KAAK,OAAO;YACV,kHAAkH;YAClH,MAAM,IAAI,KAAK,CACb,qFAAqF,CACtF,CAAC;KACL;AACH,CAAC;IAGC,IAAI,IAAI,CAAC,KAAK,EAAE;QACd,MAAM,IAAI,WAAW,CAAC,yCAAyC,CAAC,CAAC;KAClE;IAED,IAAI,CAAC,uBAAA,IAAI,iCAAU;QAAE,MAAM,KAAK,CAAC,iCAAiC,CAAC,CAAC;IAEpE,OAAO,uBAAA,IAAI,iCAAU,CAAC;AACxB,CAAC,2EAEqC,KAAyB;IAC7D,MAAM,CAAC,kBAAkB,EAAE,UAAU,CAAC,GAAG,uBAAA,IAAI,sEAAmB,MAAvB,IAAI,EAAoB,KAAK,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;IAC/F,uBAAA,IAAI,oCAAoB,kBAAkB,MAAA,CAAC;IAC3C,uBAAA,IAAI,yCAAkB,CAAC,kBAAkB,CAAC,EAAE,CAAC,GAAG,kBAAkB,CAAC;IAEnE,KAAK,MAAM,OAAO,IAAI,UAAU,EAAE;QAChC,MAAM,eAAe,GAAG,kBAAkB,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAClE,IAAI,eAAe,EAAE,IAAI,IAAI,MAAM,EAAE;YACnC,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE,eAAe,CAAC,IAAI,CAAC,CAAC;SACjD;KACF;IAED,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,wBAAwB;YAC3B,IAAI,CAAC,KAAK,CAAC,gBAAgB,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YACzC,MAAM;QAER,KAAK,4BAA4B;YAC/B,MAAM;QAER,KAAK,sBAAsB;YACzB,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;YAEjE,IAAI,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;gBAC5B,KAAK,MAAM,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;oBAC9C,8CAA8C;oBAC9C,IAAI,OAAO,CAAC,IAAI,IAAI,MAAM,IAAI,OAAO,CAAC,IAAI,EAAE;wBAC1C,IAAI,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;wBAC7B,IAAI,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;wBACzD,IAAI,QAAQ,IAAI,QAAQ,CAAC,IAAI,IAAI,MAAM,EAAE;4BACvC,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,SAAS,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC;yBACnD;6BAAM;4BACL,MAAM,KAAK,CAAC,qEAAqE,CAAC,CAAC;yBACpF;qBACF;oBAED,IAAI,OAAO,CAAC,KAAK,IAAI,uBAAA,IAAI,4CAAqB,EAAE;wBAC9C,oCAAoC;wBACpC,IAAI,uBAAA,IAAI,uCAAgB,EAAE;4BACxB,QAAQ,uBAAA,IAAI,uCAAgB,CAAC,IAAI,EAAE;gCACjC,KAAK,MAAM;oCACT,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE,uBAAA,IAAI,uCAAgB,CAAC,IAAI,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;oCACzE,MAAM;gCACR,KAAK,YAAY;oCACf,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE,uBAAA,IAAI,uCAAgB,CAAC,UAAU,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;oCACpF,MAAM;6BACT;yBACF;wBAED,uBAAA,IAAI,wCAAwB,OAAO,CAAC,KAAK,MAAA,CAAC;qBAC3C;oBAED,uBAAA,IAAI,mCAAmB,kBAAkB,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,MAAA,CAAC;iBAClE;aACF;YAED,MAAM;QAER,KAAK,0BAA0B,CAAC;QAChC,KAAK,2BAA2B;YAC9B,oFAAoF;YACpF,IAAI,uBAAA,IAAI,4CAAqB,KAAK,SAAS,EAAE;gBAC3C,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,uBAAA,IAAI,4CAAqB,CAAC,CAAC;gBACrE,IAAI,cAAc,EAAE;oBAClB,QAAQ,cAAc,CAAC,IAAI,EAAE;wBAC3B,KAAK,YAAY;4BACf,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE,cAAc,CAAC,UAAU,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;4BAC9E,MAAM;wBACR,KAAK,MAAM;4BACT,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE,cAAc,CAAC,IAAI,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;4BACnE,MAAM;qBACT;iBACF;aACF;YAED,IAAI,uBAAA,IAAI,wCAAiB,EAAE;gBACzB,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;aACvC;YAED,uBAAA,IAAI,oCAAoB,SAAS,MAAA,CAAC;KACrC;AACH,CAAC,2EAEqC,KAAyB;IAC7D,MAAM,kBAAkB,GAAG,uBAAA,IAAI,sEAAmB,MAAvB,IAAI,EAAoB,KAAK,CAAC,CAAC;IAC1D,uBAAA,IAAI,2CAA2B,kBAAkB,MAAA,CAAC;IAElD,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,yBAAyB;YAC5B,IAAI,CAAC,KAAK,CAAC,gBAAgB,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YACzC,MAAM;QACR,KAAK,uBAAuB;YAC1B,MAAM,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;YAC/B,IACE,KAAK,CAAC,YAAY;gBAClB,KAAK,CAAC,YAAY,CAAC,IAAI,IAAI,YAAY;gBACvC,KAAK,CAAC,YAAY,CAAC,UAAU;gBAC7B,kBAAkB,CAAC,YAAY,CAAC,IAAI,IAAI,YAAY,EACpD;gBACA,KAAK,MAAM,QAAQ,IAAI,KAAK,CAAC,YAAY,CAAC,UAAU,EAAE;oBACpD,IAAI,QAAQ,CAAC,KAAK,IAAI,uBAAA,IAAI,6CAAsB,EAAE;wBAChD,IAAI,CAAC,KAAK,CACR,eAAe,EACf,QAAQ,EACR,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAa,CACvE,CAAC;qBACH;yBAAM;wBACL,IAAI,uBAAA,IAAI,wCAAiB,EAAE;4BACzB,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;yBACnD;wBAED,uBAAA,IAAI,yCAAyB,QAAQ,CAAC,KAAK,MAAA,CAAC;wBAC5C,uBAAA,IAAI,oCAAoB,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAA,CAAC;wBACnF,IAAI,uBAAA,IAAI,wCAAiB;4BAAE,IAAI,CAAC,KAAK,CAAC,iBAAiB,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;qBACjF;iBACF;aACF;YAED,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,CAAC,CAAC;YACjE,MAAM;QACR,KAAK,2BAA2B,CAAC;QACjC,KAAK,wBAAwB,CAAC;QAC9B,KAAK,2BAA2B,CAAC;QACjC,KAAK,yBAAyB;YAC5B,uBAAA,IAAI,2CAA2B,SAAS,MAAA,CAAC;YACzC,MAAM,OAAO,GAAG,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC;YACxC,IAAI,OAAO,CAAC,IAAI,IAAI,YAAY,EAAE;gBAChC,IAAI,uBAAA,IAAI,wCAAiB,EAAE;oBACzB,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,uBAAA,IAAI,wCAA6B,CAAC,CAAC;oBAC9D,uBAAA,IAAI,oCAAoB,SAAS,MAAA,CAAC;iBACnC;aACF;YACD,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE,KAAK,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC;YAC1D,MAAM;QACR,KAAK,6BAA6B;YAChC,MAAM;KACT;AACH,CAAC,uEAEmC,KAA2B;IAC7D,uBAAA,IAAI,+BAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACzB,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AAC7B,CAAC,mFAEkB,KAAyB;IAC1C,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,yBAAyB;YAC5B,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC;YACnD,OAAO,KAAK,CAAC,IAAI,CAAC;QAEpB,KAAK,uBAAuB;YAC1B,IAAI,QAAQ,GAAG,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAiB,CAAC;YACrE,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,KAAK,CAAC,uDAAuD,CAAC,CAAC;aACtE;YAED,IAAI,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;YAEtB,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,WAAW,GAAG,eAAe,CAAC,eAAe,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAiB,CAAC;gBAC1F,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC;aACrD;YAED,OAAO,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAiB,CAAC;QAE/D,KAAK,2BAA2B,CAAC;QACjC,KAAK,wBAAwB,CAAC;QAC9B,KAAK,2BAA2B,CAAC;QACjC,KAAK,yBAAyB,CAAC;QAC/B,KAAK,6BAA6B;YAChC,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC;YACnD,MAAM;KACT;IAED,IAAI,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;QAAE,OAAO,uBAAA,IAAI,yCAAkB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAiB,CAAC;IACxG,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC,CAAC;AAC3C,CAAC,mFAGC,KAA2B,EAC3B,QAA6B;IAE7B,IAAI,UAAU,GAA0B,EAAE,CAAC;IAE3C,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,wBAAwB;YAC3B,sDAAsD;YACtD,OAAO,CAAC,KAAK,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAElC,KAAK,sBAAsB;YACzB,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,KAAK,CACT,wFAAwF,CACzF,CAAC;aACH;YAED,IAAI,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;YAEtB,yDAAyD;YACzD,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;gBACtB,KAAK,MAAM,cAAc,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;oBAC/C,IAAI,cAAc,CAAC,KAAK,IAAI,QAAQ,CAAC,OAAO,EAAE;wBAC5C,IAAI,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC;wBAC5D,QAAQ,CAAC,OAAO,CAAC,cAAc,CAAC,KAAK,CAAC,GAAG,uBAAA,IAAI,sEAAmB,MAAvB,IAAI,EAC3C,cAAc,EACd,cAAc,CACf,CAAC;qBACH;yBAAM;wBACL,QAAQ,CAAC,OAAO,CAAC,cAAc,CAAC,KAAK,CAAC,GAAG,cAAgC,CAAC;wBAC1E,wBAAwB;wBACxB,UAAU,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;qBACjC;iBACF;aACF;YAED,OAAO,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;QAEhC,KAAK,4BAA4B,CAAC;QAClC,KAAK,0BAA0B,CAAC;QAChC,KAAK,2BAA2B;YAC9B,mCAAmC;YACnC,IAAI,QAAQ,EAAE;gBACZ,OAAO,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;aAC/B;iBAAM;gBACL,MAAM,KAAK,CAAC,yDAAyD,CAAC,CAAC;aACxE;KACJ;IACD,MAAM,KAAK,CAAC,yCAAyC,CAAC,CAAC;AACzD,CAAC,mFAGC,cAAmC,EACnC,cAA0C;IAE1C,OAAO,eAAe,CAAC,eAAe,CAAC,cAA6C,EAAE,cAAc,CAE3E,CAAC;AAC5B,CAAC,mEAkEiC,KAAqB;IACrD,uBAAA,IAAI,uCAAuB,KAAK,CAAC,IAAI,MAAA,CAAC;IACtC,QAAQ,KAAK,CAAC,KAAK,EAAE;QACnB,KAAK,oBAAoB;YACvB,MAAM;QACR,KAAK,mBAAmB;YACtB,MAAM;QACR,KAAK,wBAAwB;YAC3B,MAAM;QACR,KAAK,4BAA4B,CAAC;QAClC,KAAK,sBAAsB,CAAC;QAC5B,KAAK,mBAAmB,CAAC;QACzB,KAAK,sBAAsB,CAAC;QAC5B,KAAK,oBAAoB;YACvB,uBAAA,IAAI,6BAAa,KAAK,CAAC,IAAI,MAAA,CAAC;YAC5B,IAAI,uBAAA,IAAI,wCAAiB,EAAE;gBACzB,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,uBAAA,IAAI,wCAAiB,CAAC,CAAC;gBAClD,uBAAA,IAAI,oCAAoB,SAAS,MAAA,CAAC;aACnC;YACD,MAAM;QACR,KAAK,uBAAuB;YAC1B,MAAM;KACT;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionRunner.d.ts b/node_modules/openai/lib/ChatCompletionRunner.d.ts new file mode 100644 index 0000000..dcf390a --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionRunner.d.ts @@ -0,0 +1,21 @@ +import { type ChatCompletionMessageParam, type ChatCompletionCreateParamsNonStreaming } from "../resources/chat/completions.js"; +import { type RunnableFunctions, type BaseFunctionsArgs, RunnableTools } from "./RunnableFunction.js"; +import { AbstractChatCompletionRunner, AbstractChatCompletionRunnerEvents, RunnerOptions } from "./AbstractChatCompletionRunner.js"; +import OpenAI from "../index.js"; +import { AutoParseableTool } from "../lib/parser.js"; +export interface ChatCompletionRunnerEvents extends AbstractChatCompletionRunnerEvents { + content: (content: string) => void; +} +export type ChatCompletionFunctionRunnerParams = Omit & { + functions: RunnableFunctions; +}; +export type ChatCompletionToolRunnerParams = Omit & { + tools: RunnableTools | AutoParseableTool[]; +}; +export declare class ChatCompletionRunner extends AbstractChatCompletionRunner { + /** @deprecated - please use `runTools` instead. */ + static runFunctions(client: OpenAI, params: ChatCompletionFunctionRunnerParams, options?: RunnerOptions): ChatCompletionRunner; + static runTools(client: OpenAI, params: ChatCompletionToolRunnerParams, options?: RunnerOptions): ChatCompletionRunner; + _addMessage(this: ChatCompletionRunner, message: ChatCompletionMessageParam, emit?: boolean): void; +} +//# sourceMappingURL=ChatCompletionRunner.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionRunner.d.ts.map b/node_modules/openai/lib/ChatCompletionRunner.d.ts.map new file mode 100644 index 0000000..9c65fe8 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionRunner.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ChatCompletionRunner.d.ts","sourceRoot":"","sources":["../src/lib/ChatCompletionRunner.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,0BAA0B,EAC/B,KAAK,sCAAsC,EAC5C,MAAM,+BAA+B,CAAC;AACvC,OAAO,EAAE,KAAK,iBAAiB,EAAE,KAAK,iBAAiB,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AACnG,OAAO,EACL,4BAA4B,EAC5B,kCAAkC,EAClC,aAAa,EACd,MAAM,gCAAgC,CAAC;AAExC,OAAO,MAAM,MAAM,UAAU,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAElD,MAAM,WAAW,0BAA2B,SAAQ,kCAAkC;IACpF,OAAO,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;CACpC;AAED,MAAM,MAAM,kCAAkC,CAAC,aAAa,SAAS,iBAAiB,IAAI,IAAI,CAC5F,sCAAsC,EACtC,WAAW,CACZ,GAAG;IACF,SAAS,EAAE,iBAAiB,CAAC,aAAa,CAAC,CAAC;CAC7C,CAAC;AAEF,MAAM,MAAM,8BAA8B,CAAC,aAAa,SAAS,iBAAiB,IAAI,IAAI,CACxF,sCAAsC,EACtC,OAAO,CACR,GAAG;IACF,KAAK,EAAE,aAAa,CAAC,aAAa,CAAC,GAAG,iBAAiB,CAAC,GAAG,EAAE,IAAI,CAAC,EAAE,CAAC;CACtE,CAAC;AAEF,qBAAa,oBAAoB,CAAC,OAAO,GAAG,IAAI,CAAE,SAAQ,4BAA4B,CACpF,0BAA0B,EAC1B,OAAO,CACR;IACC,mDAAmD;IACnD,MAAM,CAAC,YAAY,CACjB,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,kCAAkC,CAAC,GAAG,EAAE,CAAC,EACjD,OAAO,CAAC,EAAE,aAAa,GACtB,oBAAoB,CAAC,IAAI,CAAC;IAU7B,MAAM,CAAC,QAAQ,CAAC,OAAO,EACrB,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,8BAA8B,CAAC,GAAG,EAAE,CAAC,EAC7C,OAAO,CAAC,EAAE,aAAa,GACtB,oBAAoB,CAAC,OAAO,CAAC;IAUvB,WAAW,CAClB,IAAI,EAAE,oBAAoB,CAAC,OAAO,CAAC,EACnC,OAAO,EAAE,0BAA0B,EACnC,IAAI,GAAE,OAAc;CAOvB"} \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionRunner.js b/node_modules/openai/lib/ChatCompletionRunner.js new file mode 100644 index 0000000..34ff99a --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionRunner.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChatCompletionRunner = void 0; +const AbstractChatCompletionRunner_1 = require("./AbstractChatCompletionRunner.js"); +const chatCompletionUtils_1 = require("./chatCompletionUtils.js"); +class ChatCompletionRunner extends AbstractChatCompletionRunner_1.AbstractChatCompletionRunner { + /** @deprecated - please use `runTools` instead. */ + static runFunctions(client, params, options) { + const runner = new ChatCompletionRunner(); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' }, + }; + runner._run(() => runner._runFunctions(client, params, opts)); + return runner; + } + static runTools(client, params, options) { + const runner = new ChatCompletionRunner(); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' }, + }; + runner._run(() => runner._runTools(client, params, opts)); + return runner; + } + _addMessage(message, emit = true) { + super._addMessage(message, emit); + if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message.content) { + this._emit('content', message.content); + } + } +} +exports.ChatCompletionRunner = ChatCompletionRunner; +//# sourceMappingURL=ChatCompletionRunner.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionRunner.js.map b/node_modules/openai/lib/ChatCompletionRunner.js.map new file mode 100644 index 0000000..cc08b06 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionRunner.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ChatCompletionRunner.js","sourceRoot":"","sources":["../src/lib/ChatCompletionRunner.ts"],"names":[],"mappings":";;;AAKA,oFAIwC;AACxC,kEAA2D;AAsB3D,MAAa,oBAAqC,SAAQ,2DAGzD;IACC,mDAAmD;IACnD,MAAM,CAAC,YAAY,CACjB,MAAc,EACd,MAAiD,EACjD,OAAuB;QAEvB,MAAM,MAAM,GAAG,IAAI,oBAAoB,EAAE,CAAC;QAC1C,MAAM,IAAI,GAAG;YACX,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,cAAc,EAAE;SAC9E,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;QAC9D,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM,CAAC,QAAQ,CACb,MAAc,EACd,MAA6C,EAC7C,OAAuB;QAEvB,MAAM,MAAM,GAAG,IAAI,oBAAoB,EAAW,CAAC;QACnD,MAAM,IAAI,GAAG;YACX,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,UAAU,EAAE;SAC1E,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;QAC1D,OAAO,MAAM,CAAC;IAChB,CAAC;IAEQ,WAAW,CAElB,OAAmC,EACnC,OAAgB,IAAI;QAEpB,KAAK,CAAC,WAAW,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACjC,IAAI,IAAA,wCAAkB,EAAC,OAAO,CAAC,IAAI,OAAO,CAAC,OAAO,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,OAAiB,CAAC,CAAC;SAClD;IACH,CAAC;CACF;AA3CD,oDA2CC"} \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionRunner.mjs b/node_modules/openai/lib/ChatCompletionRunner.mjs new file mode 100644 index 0000000..e75bf30 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionRunner.mjs @@ -0,0 +1,30 @@ +import { AbstractChatCompletionRunner, } from "./AbstractChatCompletionRunner.mjs"; +import { isAssistantMessage } from "./chatCompletionUtils.mjs"; +export class ChatCompletionRunner extends AbstractChatCompletionRunner { + /** @deprecated - please use `runTools` instead. */ + static runFunctions(client, params, options) { + const runner = new ChatCompletionRunner(); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' }, + }; + runner._run(() => runner._runFunctions(client, params, opts)); + return runner; + } + static runTools(client, params, options) { + const runner = new ChatCompletionRunner(); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' }, + }; + runner._run(() => runner._runTools(client, params, opts)); + return runner; + } + _addMessage(message, emit = true) { + super._addMessage(message, emit); + if (isAssistantMessage(message) && message.content) { + this._emit('content', message.content); + } + } +} +//# sourceMappingURL=ChatCompletionRunner.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionRunner.mjs.map b/node_modules/openai/lib/ChatCompletionRunner.mjs.map new file mode 100644 index 0000000..432118c --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionRunner.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"ChatCompletionRunner.mjs","sourceRoot":"","sources":["../src/lib/ChatCompletionRunner.ts"],"names":[],"mappings":"OAKO,EACL,4BAA4B,GAG7B;OACM,EAAE,kBAAkB,EAAE;AAsB7B,MAAM,OAAO,oBAAqC,SAAQ,4BAGzD;IACC,mDAAmD;IACnD,MAAM,CAAC,YAAY,CACjB,MAAc,EACd,MAAiD,EACjD,OAAuB;QAEvB,MAAM,MAAM,GAAG,IAAI,oBAAoB,EAAE,CAAC;QAC1C,MAAM,IAAI,GAAG;YACX,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,cAAc,EAAE;SAC9E,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;QAC9D,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM,CAAC,QAAQ,CACb,MAAc,EACd,MAA6C,EAC7C,OAAuB;QAEvB,MAAM,MAAM,GAAG,IAAI,oBAAoB,EAAW,CAAC;QACnD,MAAM,IAAI,GAAG;YACX,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,UAAU,EAAE;SAC1E,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;QAC1D,OAAO,MAAM,CAAC;IAChB,CAAC;IAEQ,WAAW,CAElB,OAAmC,EACnC,OAAgB,IAAI;QAEpB,KAAK,CAAC,WAAW,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACjC,IAAI,kBAAkB,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,OAAO,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,OAAiB,CAAC,CAAC;SAClD;IACH,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStream.d.ts b/node_modules/openai/lib/ChatCompletionStream.d.ts new file mode 100644 index 0000000..052379f --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStream.d.ts @@ -0,0 +1,208 @@ +import * as Core from "../core.js"; +import { ChatCompletionTokenLogprob, type ChatCompletion, type ChatCompletionChunk, type ChatCompletionCreateParams, type ChatCompletionCreateParamsBase } from "../resources/chat/completions.js"; +import { AbstractChatCompletionRunner, type AbstractChatCompletionRunnerEvents } from "./AbstractChatCompletionRunner.js"; +import { type ReadableStream } from "../_shims/index.js"; +import OpenAI from "../index.js"; +import { ParsedChatCompletion } from "../resources/beta/chat/completions.js"; +export interface ContentDeltaEvent { + delta: string; + snapshot: string; + parsed: unknown | null; +} +export interface ContentDoneEvent { + content: string; + parsed: ParsedT | null; +} +export interface RefusalDeltaEvent { + delta: string; + snapshot: string; +} +export interface RefusalDoneEvent { + refusal: string; +} +export interface FunctionToolCallArgumentsDeltaEvent { + name: string; + index: number; + arguments: string; + parsed_arguments: unknown; + arguments_delta: string; +} +export interface FunctionToolCallArgumentsDoneEvent { + name: string; + index: number; + arguments: string; + parsed_arguments: unknown; +} +export interface LogProbsContentDeltaEvent { + content: Array; + snapshot: Array; +} +export interface LogProbsContentDoneEvent { + content: Array; +} +export interface LogProbsRefusalDeltaEvent { + refusal: Array; + snapshot: Array; +} +export interface LogProbsRefusalDoneEvent { + refusal: Array; +} +export interface ChatCompletionStreamEvents extends AbstractChatCompletionRunnerEvents { + content: (contentDelta: string, contentSnapshot: string) => void; + chunk: (chunk: ChatCompletionChunk, snapshot: ChatCompletionSnapshot) => void; + 'content.delta': (props: ContentDeltaEvent) => void; + 'content.done': (props: ContentDoneEvent) => void; + 'refusal.delta': (props: RefusalDeltaEvent) => void; + 'refusal.done': (props: RefusalDoneEvent) => void; + 'tool_calls.function.arguments.delta': (props: FunctionToolCallArgumentsDeltaEvent) => void; + 'tool_calls.function.arguments.done': (props: FunctionToolCallArgumentsDoneEvent) => void; + 'logprobs.content.delta': (props: LogProbsContentDeltaEvent) => void; + 'logprobs.content.done': (props: LogProbsContentDoneEvent) => void; + 'logprobs.refusal.delta': (props: LogProbsRefusalDeltaEvent) => void; + 'logprobs.refusal.done': (props: LogProbsRefusalDoneEvent) => void; +} +export type ChatCompletionStreamParams = Omit & { + stream?: true; +}; +export declare class ChatCompletionStream extends AbstractChatCompletionRunner, ParsedT> implements AsyncIterable { + #private; + constructor(params: ChatCompletionCreateParams | null); + get currentChatCompletionSnapshot(): ChatCompletionSnapshot | undefined; + /** + * Intended for use on the frontend, consuming a stream produced with + * `.toReadableStream()` on the backend. + * + * Note that messages sent to the model do not appear in `.on('message')` + * in this context. + */ + static fromReadableStream(stream: ReadableStream): ChatCompletionStream; + static createChatCompletion(client: OpenAI, params: ChatCompletionStreamParams, options?: Core.RequestOptions): ChatCompletionStream; + protected _createChatCompletion(client: OpenAI, params: ChatCompletionCreateParams, options?: Core.RequestOptions): Promise>; + protected _fromReadableStream(readableStream: ReadableStream, options?: Core.RequestOptions): Promise; + [Symbol.asyncIterator](this: ChatCompletionStream): AsyncIterator; + toReadableStream(): ReadableStream; +} +/** + * Represents a streamed chunk of a chat completion response returned by model, + * based on the provided input. + */ +export interface ChatCompletionSnapshot { + /** + * A unique identifier for the chat completion. + */ + id: string; + /** + * A list of chat completion choices. Can be more than one if `n` is greater + * than 1. + */ + choices: Array; + /** + * The Unix timestamp (in seconds) of when the chat completion was created. + */ + created: number; + /** + * The model to generate the completion. + */ + model: string; + /** + * This fingerprint represents the backend configuration that the model runs with. + * + * Can be used in conjunction with the `seed` request parameter to understand when + * backend changes have been made that might impact determinism. + */ + system_fingerprint?: string; +} +export declare namespace ChatCompletionSnapshot { + interface Choice { + /** + * A chat completion delta generated by streamed model responses. + */ + message: Choice.Message; + /** + * The reason the model stopped generating tokens. This will be `stop` if the model + * hit a natural stop point or a provided stop sequence, `length` if the maximum + * number of tokens specified in the request was reached, `content_filter` if + * content was omitted due to a flag from our content filters, or `function_call` + * if the model called a function. + */ + finish_reason: ChatCompletion.Choice['finish_reason'] | null; + /** + * Log probability information for the choice. + */ + logprobs: ChatCompletion.Choice.Logprobs | null; + /** + * The index of the choice in the list of choices. + */ + index: number; + } + namespace Choice { + /** + * A chat completion delta generated by streamed model responses. + */ + interface Message { + /** + * The contents of the chunk message. + */ + content?: string | null; + refusal?: string | null; + parsed?: unknown | null; + /** + * The name and arguments of a function that should be called, as generated by the + * model. + */ + function_call?: Message.FunctionCall; + tool_calls?: Array; + /** + * The role of the author of this message. + */ + role?: 'system' | 'user' | 'assistant' | 'function' | 'tool'; + } + namespace Message { + interface ToolCall { + /** + * The ID of the tool call. + */ + id: string; + function: ToolCall.Function; + /** + * The type of the tool. + */ + type: 'function'; + } + namespace ToolCall { + interface Function { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments: string; + parsed_arguments?: unknown; + /** + * The name of the function to call. + */ + name: string; + } + } + /** + * The name and arguments of a function that should be called, as generated by the + * model. + */ + interface FunctionCall { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments?: string; + /** + * The name of the function to call. + */ + name?: string; + } + } + } +} +//# sourceMappingURL=ChatCompletionStream.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStream.d.ts.map b/node_modules/openai/lib/ChatCompletionStream.d.ts.map new file mode 100644 index 0000000..e273b92 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStream.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ChatCompletionStream.d.ts","sourceRoot":"","sources":["../src/lib/ChatCompletionStream.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAOhC,OAAO,EACL,0BAA0B,EAC1B,KAAK,cAAc,EACnB,KAAK,mBAAmB,EACxB,KAAK,0BAA0B,EAE/B,KAAK,8BAA8B,EACpC,MAAM,+BAA+B,CAAC;AACvC,OAAO,EACL,4BAA4B,EAC5B,KAAK,kCAAkC,EACxC,MAAM,gCAAgC,CAAC;AACxC,OAAO,EAAE,KAAK,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAEtD,OAAO,MAAM,MAAM,UAAU,CAAC;AAC9B,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAW1E,MAAM,WAAW,iBAAiB;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,OAAO,GAAG,IAAI,CAAC;CACxB;AAED,MAAM,WAAW,gBAAgB,CAAC,OAAO,GAAG,IAAI;IAC9C,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,OAAO,GAAG,IAAI,CAAC;CACxB;AAED,MAAM,WAAW,iBAAiB;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,mCAAmC;IAClD,IAAI,EAAE,MAAM,CAAC;IAEb,KAAK,EAAE,MAAM,CAAC;IAEd,SAAS,EAAE,MAAM,CAAC;IAElB,gBAAgB,EAAE,OAAO,CAAC;IAE1B,eAAe,EAAE,MAAM,CAAC;CACzB;AAED,MAAM,WAAW,kCAAkC;IACjD,IAAI,EAAE,MAAM,CAAC;IAEb,KAAK,EAAE,MAAM,CAAC;IAEd,SAAS,EAAE,MAAM,CAAC;IAElB,gBAAgB,EAAE,OAAO,CAAC;CAC3B;AAED,MAAM,WAAW,yBAAyB;IACxC,OAAO,EAAE,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAC3C,QAAQ,EAAE,KAAK,CAAC,0BAA0B,CAAC,CAAC;CAC7C;AAED,MAAM,WAAW,wBAAwB;IACvC,OAAO,EAAE,KAAK,CAAC,0BAA0B,CAAC,CAAC;CAC5C;AAED,MAAM,WAAW,yBAAyB;IACxC,OAAO,EAAE,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAC3C,QAAQ,EAAE,KAAK,CAAC,0BAA0B,CAAC,CAAC;CAC7C;AAED,MAAM,WAAW,wBAAwB;IACvC,OAAO,EAAE,KAAK,CAAC,0BAA0B,CAAC,CAAC;CAC5C;AAED,MAAM,WAAW,0BAA0B,CAAC,OAAO,GAAG,IAAI,CAAE,SAAQ,kCAAkC;IACpG,OAAO,EAAE,CAAC,YAAY,EAAE,MAAM,EAAE,eAAe,EAAE,MAAM,KAAK,IAAI,CAAC;IACjE,KAAK,EAAE,CAAC,KAAK,EAAE,mBAAmB,EAAE,QAAQ,EAAE,sBAAsB,KAAK,IAAI,CAAC;IAE9E,eAAe,EAAE,CAAC,KAAK,EAAE,iBAAiB,KAAK,IAAI,CAAC;IACpD,cAAc,EAAE,CAAC,KAAK,EAAE,gBAAgB,CAAC,OAAO,CAAC,KAAK,IAAI,CAAC;IAE3D,eAAe,EAAE,CAAC,KAAK,EAAE,iBAAiB,KAAK,IAAI,CAAC;IACpD,cAAc,EAAE,CAAC,KAAK,EAAE,gBAAgB,KAAK,IAAI,CAAC;IAElD,qCAAqC,EAAE,CAAC,KAAK,EAAE,mCAAmC,KAAK,IAAI,CAAC;IAC5F,oCAAoC,EAAE,CAAC,KAAK,EAAE,kCAAkC,KAAK,IAAI,CAAC;IAE1F,wBAAwB,EAAE,CAAC,KAAK,EAAE,yBAAyB,KAAK,IAAI,CAAC;IACrE,uBAAuB,EAAE,CAAC,KAAK,EAAE,wBAAwB,KAAK,IAAI,CAAC;IAEnE,wBAAwB,EAAE,CAAC,KAAK,EAAE,yBAAyB,KAAK,IAAI,CAAC;IACrE,uBAAuB,EAAE,CAAC,KAAK,EAAE,wBAAwB,KAAK,IAAI,CAAC;CACpE;AAED,MAAM,MAAM,0BAA0B,GAAG,IAAI,CAAC,8BAA8B,EAAE,QAAQ,CAAC,GAAG;IACxF,MAAM,CAAC,EAAE,IAAI,CAAC;CACf,CAAC;AAWF,qBAAa,oBAAoB,CAAC,OAAO,GAAG,IAAI,CAC9C,SAAQ,4BAA4B,CAAC,0BAA0B,CAAC,OAAO,CAAC,EAAE,OAAO,CACjF,YAAW,aAAa,CAAC,mBAAmB,CAAC;;gBAMjC,MAAM,EAAE,0BAA0B,GAAG,IAAI;IAMrD,IAAI,6BAA6B,IAAI,sBAAsB,GAAG,SAAS,CAEtE;IAED;;;;;;OAMG;IACH,MAAM,CAAC,kBAAkB,CAAC,MAAM,EAAE,cAAc,GAAG,oBAAoB,CAAC,IAAI,CAAC;IAM7E,MAAM,CAAC,oBAAoB,CAAC,OAAO,EACjC,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,0BAA0B,EAClC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,oBAAoB,CAAC,OAAO,CAAC;cA8MP,qBAAqB,CAC5C,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,0BAA0B,EAClC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,oBAAoB,CAAC,OAAO,CAAC,CAAC;cAuBzB,mBAAmB,CACjC,cAAc,EAAE,cAAc,EAC9B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,cAAc,CAAC;IA8I1B,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,IAAI,EAAE,oBAAoB,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,mBAAmB,CAAC;IA6D/F,gBAAgB,IAAI,cAAc;CAInC;AAwGD;;;GAGG;AACH,MAAM,WAAW,sBAAsB;IACrC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;;OAGG;IACH,OAAO,EAAE,KAAK,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC;IAE9C;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAMd;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC;CAC7B;AAED,yBAAiB,sBAAsB,CAAC;IACtC,UAAiB,MAAM;QACrB;;WAEG;QACH,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC;QAExB;;;;;;WAMG;QACH,aAAa,EAAE,cAAc,CAAC,MAAM,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC;QAE7D;;WAEG;QACH,QAAQ,EAAE,cAAc,CAAC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;QAEhD;;WAEG;QACH,KAAK,EAAE,MAAM,CAAC;KACf;IAED,UAAiB,MAAM,CAAC;QACtB;;WAEG;QACH,UAAiB,OAAO;YACtB;;eAEG;YACH,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;YAExB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;YAExB,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;YAExB;;;eAGG;YACH,aAAa,CAAC,EAAE,OAAO,CAAC,YAAY,CAAC;YAErC,UAAU,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;YAErC;;eAEG;YACH,IAAI,CAAC,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,GAAG,UAAU,GAAG,MAAM,CAAC;SAC9D;QAED,UAAiB,OAAO,CAAC;YACvB,UAAiB,QAAQ;gBACvB;;mBAEG;gBACH,EAAE,EAAE,MAAM,CAAC;gBAEX,QAAQ,EAAE,QAAQ,CAAC,QAAQ,CAAC;gBAE5B;;mBAEG;gBACH,IAAI,EAAE,UAAU,CAAC;aAClB;YAED,UAAiB,QAAQ,CAAC;gBACxB,UAAiB,QAAQ;oBACvB;;;;;uBAKG;oBACH,SAAS,EAAE,MAAM,CAAC;oBAElB,gBAAgB,CAAC,EAAE,OAAO,CAAC;oBAE3B;;uBAEG;oBACH,IAAI,EAAE,MAAM,CAAC;iBACd;aACF;YAED;;;eAGG;YACH,UAAiB,YAAY;gBAC3B;;;;;mBAKG;gBACH,SAAS,CAAC,EAAE,MAAM,CAAC;gBAEnB;;mBAEG;gBACH,IAAI,CAAC,EAAE,MAAM,CAAC;aACf;SACF;KACF;CACF"} \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStream.js b/node_modules/openai/lib/ChatCompletionStream.js new file mode 100644 index 0000000..bc0d36b --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStream.js @@ -0,0 +1,503 @@ +"use strict"; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _ChatCompletionStream_instances, _ChatCompletionStream_params, _ChatCompletionStream_choiceEventStates, _ChatCompletionStream_currentChatCompletionSnapshot, _ChatCompletionStream_beginRequest, _ChatCompletionStream_getChoiceEventState, _ChatCompletionStream_addChunk, _ChatCompletionStream_emitToolCallDoneEvent, _ChatCompletionStream_emitContentDoneEvents, _ChatCompletionStream_endRequest, _ChatCompletionStream_getAutoParseableResponseFormat, _ChatCompletionStream_accumulateChatCompletion; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChatCompletionStream = void 0; +const error_1 = require("../error.js"); +const AbstractChatCompletionRunner_1 = require("./AbstractChatCompletionRunner.js"); +const streaming_1 = require("../streaming.js"); +const parser_1 = require("../lib/parser.js"); +const parser_2 = require("../_vendor/partial-json-parser/parser.js"); +class ChatCompletionStream extends AbstractChatCompletionRunner_1.AbstractChatCompletionRunner { + constructor(params) { + super(); + _ChatCompletionStream_instances.add(this); + _ChatCompletionStream_params.set(this, void 0); + _ChatCompletionStream_choiceEventStates.set(this, void 0); + _ChatCompletionStream_currentChatCompletionSnapshot.set(this, void 0); + __classPrivateFieldSet(this, _ChatCompletionStream_params, params, "f"); + __classPrivateFieldSet(this, _ChatCompletionStream_choiceEventStates, [], "f"); + } + get currentChatCompletionSnapshot() { + return __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f"); + } + /** + * Intended for use on the frontend, consuming a stream produced with + * `.toReadableStream()` on the backend. + * + * Note that messages sent to the model do not appear in `.on('message')` + * in this context. + */ + static fromReadableStream(stream) { + const runner = new ChatCompletionStream(null); + runner._run(() => runner._fromReadableStream(stream)); + return runner; + } + static createChatCompletion(client, params, options) { + const runner = new ChatCompletionStream(params); + runner._run(() => runner._runChatCompletion(client, { ...params, stream: true }, { ...options, headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' } })); + return runner; + } + async _createChatCompletion(client, params, options) { + super._createChatCompletion; + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this); + const stream = await client.chat.completions.create({ ...params, stream: true }, { ...options, signal: this.controller.signal }); + this._connected(); + for await (const chunk of stream) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk); + } + if (stream.controller.signal?.aborted) { + throw new error_1.APIUserAbortError(); + } + return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this)); + } + async _fromReadableStream(readableStream, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this); + this._connected(); + const stream = streaming_1.Stream.fromReadableStream(readableStream, this.controller); + let chatId; + for await (const chunk of stream) { + if (chatId && chatId !== chunk.id) { + // A new request has been made. + this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this)); + } + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk); + chatId = chunk.id; + } + if (stream.controller.signal?.aborted) { + throw new error_1.APIUserAbortError(); + } + return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this)); + } + [(_ChatCompletionStream_params = new WeakMap(), _ChatCompletionStream_choiceEventStates = new WeakMap(), _ChatCompletionStream_currentChatCompletionSnapshot = new WeakMap(), _ChatCompletionStream_instances = new WeakSet(), _ChatCompletionStream_beginRequest = function _ChatCompletionStream_beginRequest() { + if (this.ended) + return; + __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f"); + }, _ChatCompletionStream_getChoiceEventState = function _ChatCompletionStream_getChoiceEventState(choice) { + let state = __classPrivateFieldGet(this, _ChatCompletionStream_choiceEventStates, "f")[choice.index]; + if (state) { + return state; + } + state = { + content_done: false, + refusal_done: false, + logprobs_content_done: false, + logprobs_refusal_done: false, + done_tool_calls: new Set(), + current_tool_call_index: null, + }; + __classPrivateFieldGet(this, _ChatCompletionStream_choiceEventStates, "f")[choice.index] = state; + return state; + }, _ChatCompletionStream_addChunk = function _ChatCompletionStream_addChunk(chunk) { + if (this.ended) + return; + const completion = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_accumulateChatCompletion).call(this, chunk); + this._emit('chunk', chunk, completion); + for (const choice of chunk.choices) { + const choiceSnapshot = completion.choices[choice.index]; + if (choice.delta.content != null && + choiceSnapshot.message?.role === 'assistant' && + choiceSnapshot.message?.content) { + this._emit('content', choice.delta.content, choiceSnapshot.message.content); + this._emit('content.delta', { + delta: choice.delta.content, + snapshot: choiceSnapshot.message.content, + parsed: choiceSnapshot.message.parsed, + }); + } + if (choice.delta.refusal != null && + choiceSnapshot.message?.role === 'assistant' && + choiceSnapshot.message?.refusal) { + this._emit('refusal.delta', { + delta: choice.delta.refusal, + snapshot: choiceSnapshot.message.refusal, + }); + } + if (choice.logprobs?.content != null && choiceSnapshot.message?.role === 'assistant') { + this._emit('logprobs.content.delta', { + content: choice.logprobs?.content, + snapshot: choiceSnapshot.logprobs?.content ?? [], + }); + } + if (choice.logprobs?.refusal != null && choiceSnapshot.message?.role === 'assistant') { + this._emit('logprobs.refusal.delta', { + refusal: choice.logprobs?.refusal, + snapshot: choiceSnapshot.logprobs?.refusal ?? [], + }); + } + const state = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot); + if (choiceSnapshot.finish_reason) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitContentDoneEvents).call(this, choiceSnapshot); + if (state.current_tool_call_index != null) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitToolCallDoneEvent).call(this, choiceSnapshot, state.current_tool_call_index); + } + } + for (const toolCall of choice.delta.tool_calls ?? []) { + if (state.current_tool_call_index !== toolCall.index) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitContentDoneEvents).call(this, choiceSnapshot); + // new tool call started, the previous one is done + if (state.current_tool_call_index != null) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitToolCallDoneEvent).call(this, choiceSnapshot, state.current_tool_call_index); + } + } + state.current_tool_call_index = toolCall.index; + } + for (const toolCallDelta of choice.delta.tool_calls ?? []) { + const toolCallSnapshot = choiceSnapshot.message.tool_calls?.[toolCallDelta.index]; + if (!toolCallSnapshot?.type) { + continue; + } + if (toolCallSnapshot?.type === 'function') { + this._emit('tool_calls.function.arguments.delta', { + name: toolCallSnapshot.function?.name, + index: toolCallDelta.index, + arguments: toolCallSnapshot.function.arguments, + parsed_arguments: toolCallSnapshot.function.parsed_arguments, + arguments_delta: toolCallDelta.function?.arguments ?? '', + }); + } + else { + assertNever(toolCallSnapshot?.type); + } + } + } + }, _ChatCompletionStream_emitToolCallDoneEvent = function _ChatCompletionStream_emitToolCallDoneEvent(choiceSnapshot, toolCallIndex) { + const state = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot); + if (state.done_tool_calls.has(toolCallIndex)) { + // we've already fired the done event + return; + } + const toolCallSnapshot = choiceSnapshot.message.tool_calls?.[toolCallIndex]; + if (!toolCallSnapshot) { + throw new Error('no tool call snapshot'); + } + if (!toolCallSnapshot.type) { + throw new Error('tool call snapshot missing `type`'); + } + if (toolCallSnapshot.type === 'function') { + const inputTool = __classPrivateFieldGet(this, _ChatCompletionStream_params, "f")?.tools?.find((tool) => tool.type === 'function' && tool.function.name === toolCallSnapshot.function.name); + this._emit('tool_calls.function.arguments.done', { + name: toolCallSnapshot.function.name, + index: toolCallIndex, + arguments: toolCallSnapshot.function.arguments, + parsed_arguments: (0, parser_1.isAutoParsableTool)(inputTool) ? inputTool.$parseRaw(toolCallSnapshot.function.arguments) + : inputTool?.function.strict ? JSON.parse(toolCallSnapshot.function.arguments) + : null, + }); + } + else { + assertNever(toolCallSnapshot.type); + } + }, _ChatCompletionStream_emitContentDoneEvents = function _ChatCompletionStream_emitContentDoneEvents(choiceSnapshot) { + const state = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot); + if (choiceSnapshot.message.content && !state.content_done) { + state.content_done = true; + const responseFormat = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getAutoParseableResponseFormat).call(this); + this._emit('content.done', { + content: choiceSnapshot.message.content, + parsed: responseFormat ? responseFormat.$parseRaw(choiceSnapshot.message.content) : null, + }); + } + if (choiceSnapshot.message.refusal && !state.refusal_done) { + state.refusal_done = true; + this._emit('refusal.done', { refusal: choiceSnapshot.message.refusal }); + } + if (choiceSnapshot.logprobs?.content && !state.logprobs_content_done) { + state.logprobs_content_done = true; + this._emit('logprobs.content.done', { content: choiceSnapshot.logprobs.content }); + } + if (choiceSnapshot.logprobs?.refusal && !state.logprobs_refusal_done) { + state.logprobs_refusal_done = true; + this._emit('logprobs.refusal.done', { refusal: choiceSnapshot.logprobs.refusal }); + } + }, _ChatCompletionStream_endRequest = function _ChatCompletionStream_endRequest() { + if (this.ended) { + throw new error_1.OpenAIError(`stream has ended, this shouldn't happen`); + } + const snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f"); + if (!snapshot) { + throw new error_1.OpenAIError(`request ended without sending any chunks`); + } + __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f"); + __classPrivateFieldSet(this, _ChatCompletionStream_choiceEventStates, [], "f"); + return finalizeChatCompletion(snapshot, __classPrivateFieldGet(this, _ChatCompletionStream_params, "f")); + }, _ChatCompletionStream_getAutoParseableResponseFormat = function _ChatCompletionStream_getAutoParseableResponseFormat() { + const responseFormat = __classPrivateFieldGet(this, _ChatCompletionStream_params, "f")?.response_format; + if ((0, parser_1.isAutoParsableResponseFormat)(responseFormat)) { + return responseFormat; + } + return null; + }, _ChatCompletionStream_accumulateChatCompletion = function _ChatCompletionStream_accumulateChatCompletion(chunk) { + var _a, _b, _c, _d; + let snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f"); + const { choices, ...rest } = chunk; + if (!snapshot) { + snapshot = __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, { + ...rest, + choices: [], + }, "f"); + } + else { + Object.assign(snapshot, rest); + } + for (const { delta, finish_reason, index, logprobs = null, ...other } of chunk.choices) { + let choice = snapshot.choices[index]; + if (!choice) { + choice = snapshot.choices[index] = { finish_reason, index, message: {}, logprobs, ...other }; + } + if (logprobs) { + if (!choice.logprobs) { + choice.logprobs = Object.assign({}, logprobs); + } + else { + const { content, refusal, ...rest } = logprobs; + assertIsEmpty(rest); + Object.assign(choice.logprobs, rest); + if (content) { + (_a = choice.logprobs).content ?? (_a.content = []); + choice.logprobs.content.push(...content); + } + if (refusal) { + (_b = choice.logprobs).refusal ?? (_b.refusal = []); + choice.logprobs.refusal.push(...refusal); + } + } + } + if (finish_reason) { + choice.finish_reason = finish_reason; + if (__classPrivateFieldGet(this, _ChatCompletionStream_params, "f") && (0, parser_1.hasAutoParseableInput)(__classPrivateFieldGet(this, _ChatCompletionStream_params, "f"))) { + if (finish_reason === 'length') { + throw new error_1.LengthFinishReasonError(); + } + if (finish_reason === 'content_filter') { + throw new error_1.ContentFilterFinishReasonError(); + } + } + } + Object.assign(choice, other); + if (!delta) + continue; // Shouldn't happen; just in case. + const { content, refusal, function_call, role, tool_calls, ...rest } = delta; + assertIsEmpty(rest); + Object.assign(choice.message, rest); + if (refusal) { + choice.message.refusal = (choice.message.refusal || '') + refusal; + } + if (role) + choice.message.role = role; + if (function_call) { + if (!choice.message.function_call) { + choice.message.function_call = function_call; + } + else { + if (function_call.name) + choice.message.function_call.name = function_call.name; + if (function_call.arguments) { + (_c = choice.message.function_call).arguments ?? (_c.arguments = ''); + choice.message.function_call.arguments += function_call.arguments; + } + } + } + if (content) { + choice.message.content = (choice.message.content || '') + content; + if (!choice.message.refusal && __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getAutoParseableResponseFormat).call(this)) { + choice.message.parsed = (0, parser_2.partialParse)(choice.message.content); + } + } + if (tool_calls) { + if (!choice.message.tool_calls) + choice.message.tool_calls = []; + for (const { index, id, type, function: fn, ...rest } of tool_calls) { + const tool_call = ((_d = choice.message.tool_calls)[index] ?? (_d[index] = {})); + Object.assign(tool_call, rest); + if (id) + tool_call.id = id; + if (type) + tool_call.type = type; + if (fn) + tool_call.function ?? (tool_call.function = { name: fn.name ?? '', arguments: '' }); + if (fn?.name) + tool_call.function.name = fn.name; + if (fn?.arguments) { + tool_call.function.arguments += fn.arguments; + if ((0, parser_1.shouldParseToolCall)(__classPrivateFieldGet(this, _ChatCompletionStream_params, "f"), tool_call)) { + tool_call.function.parsed_arguments = (0, parser_2.partialParse)(tool_call.function.arguments); + } + } + } + } + } + return snapshot; + }, Symbol.asyncIterator)]() { + const pushQueue = []; + const readQueue = []; + let done = false; + this.on('chunk', (chunk) => { + const reader = readQueue.shift(); + if (reader) { + reader.resolve(chunk); + } + else { + pushQueue.push(chunk); + } + }); + this.on('end', () => { + done = true; + for (const reader of readQueue) { + reader.resolve(undefined); + } + readQueue.length = 0; + }); + this.on('abort', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + this.on('error', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + return { + next: async () => { + if (!pushQueue.length) { + if (done) { + return { value: undefined, done: true }; + } + return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true })); + } + const chunk = pushQueue.shift(); + return { value: chunk, done: false }; + }, + return: async () => { + this.abort(); + return { value: undefined, done: true }; + }, + }; + } + toReadableStream() { + const stream = new streaming_1.Stream(this[Symbol.asyncIterator].bind(this), this.controller); + return stream.toReadableStream(); + } +} +exports.ChatCompletionStream = ChatCompletionStream; +function finalizeChatCompletion(snapshot, params) { + const { id, choices, created, model, system_fingerprint, ...rest } = snapshot; + const completion = { + ...rest, + id, + choices: choices.map(({ message, finish_reason, index, logprobs, ...choiceRest }) => { + if (!finish_reason) { + throw new error_1.OpenAIError(`missing finish_reason for choice ${index}`); + } + const { content = null, function_call, tool_calls, ...messageRest } = message; + const role = message.role; // this is what we expect; in theory it could be different which would make our types a slight lie but would be fine. + if (!role) { + throw new error_1.OpenAIError(`missing role for choice ${index}`); + } + if (function_call) { + const { arguments: args, name } = function_call; + if (args == null) { + throw new error_1.OpenAIError(`missing function_call.arguments for choice ${index}`); + } + if (!name) { + throw new error_1.OpenAIError(`missing function_call.name for choice ${index}`); + } + return { + ...choiceRest, + message: { + content, + function_call: { arguments: args, name }, + role, + refusal: message.refusal ?? null, + }, + finish_reason, + index, + logprobs, + }; + } + if (tool_calls) { + return { + ...choiceRest, + index, + finish_reason, + logprobs, + message: { + ...messageRest, + role, + content, + refusal: message.refusal ?? null, + tool_calls: tool_calls.map((tool_call, i) => { + const { function: fn, type, id, ...toolRest } = tool_call; + const { arguments: args, name, ...fnRest } = fn || {}; + if (id == null) { + throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].id\n${str(snapshot)}`); + } + if (type == null) { + throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].type\n${str(snapshot)}`); + } + if (name == null) { + throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].function.name\n${str(snapshot)}`); + } + if (args == null) { + throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].function.arguments\n${str(snapshot)}`); + } + return { ...toolRest, id, type, function: { ...fnRest, name, arguments: args } }; + }), + }, + }; + } + return { + ...choiceRest, + message: { ...messageRest, content, role, refusal: message.refusal ?? null }, + finish_reason, + index, + logprobs, + }; + }), + created, + model, + object: 'chat.completion', + ...(system_fingerprint ? { system_fingerprint } : {}), + }; + return (0, parser_1.maybeParseChatCompletion)(completion, params); +} +function str(x) { + return JSON.stringify(x); +} +/** + * Ensures the given argument is an empty object, useful for + * asserting that all known properties on an object have been + * destructured. + */ +function assertIsEmpty(obj) { + return; +} +function assertNever(_x) { } +//# sourceMappingURL=ChatCompletionStream.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStream.js.map b/node_modules/openai/lib/ChatCompletionStream.js.map new file mode 100644 index 0000000..a0daa26 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ChatCompletionStream.js","sourceRoot":"","sources":["../src/lib/ChatCompletionStream.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AACA,uCAKkB;AASlB,oFAGwC;AAExC,+CAAsC;AAGtC,6CAOuB;AACvB,qEAAqE;AA+FrE,MAAa,oBACX,SAAQ,2DAA0E;IAOlF,YAAY,MAAyC;QACnD,KAAK,EAAE,CAAC;;QALV,+CAA2C;QAC3C,0DAAuC;QACvC,sEAAmE;QAIjE,uBAAA,IAAI,gCAAW,MAAM,MAAA,CAAC;QACtB,uBAAA,IAAI,2CAAsB,EAAE,MAAA,CAAC;IAC/B,CAAC;IAED,IAAI,6BAA6B;QAC/B,OAAO,uBAAA,IAAI,2DAA+B,CAAC;IAC7C,CAAC;IAED;;;;;;OAMG;IACH,MAAM,CAAC,kBAAkB,CAAC,MAAsB;QAC9C,MAAM,MAAM,GAAG,IAAI,oBAAoB,CAAC,IAAI,CAAC,CAAC;QAC9C,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC;QACtD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM,CAAC,oBAAoB,CACzB,MAAc,EACd,MAAkC,EAClC,OAA6B;QAE7B,MAAM,MAAM,GAAG,IAAI,oBAAoB,CAAU,MAA6C,CAAC,CAAC;QAChG,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CACf,MAAM,CAAC,kBAAkB,CACvB,MAAM,EACN,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,EAC3B,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,QAAQ,EAAE,EAAE,CACxF,CACF,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC;IAoMkB,KAAK,CAAC,qBAAqB,CAC5C,MAAc,EACd,MAAkC,EAClC,OAA6B;QAE7B,KAAK,CAAC,qBAAqB,CAAC;QAC5B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QACD,uBAAA,IAAI,2EAAc,MAAlB,IAAI,CAAgB,CAAC;QAErB,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CACjD,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,EAC3B,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,CAC/C,CAAC;QACF,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,uEAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,yBAAiB,EAAE,CAAC;SAC/B;QACD,OAAO,IAAI,CAAC,kBAAkB,CAAC,uBAAA,IAAI,yEAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IACrD,CAAC;IAES,KAAK,CAAC,mBAAmB,CACjC,cAA8B,EAC9B,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QACD,uBAAA,IAAI,2EAAc,MAAlB,IAAI,CAAgB,CAAC;QACrB,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,MAAM,MAAM,GAAG,kBAAM,CAAC,kBAAkB,CAAsB,cAAc,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;QAC/F,IAAI,MAAM,CAAC;QACX,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,IAAI,MAAM,IAAI,MAAM,KAAK,KAAK,CAAC,EAAE,EAAE;gBACjC,+BAA+B;gBAC/B,IAAI,CAAC,kBAAkB,CAAC,uBAAA,IAAI,yEAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;aAC7C;YAED,uBAAA,IAAI,uEAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;YACtB,MAAM,GAAG,KAAK,CAAC,EAAE,CAAC;SACnB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,yBAAiB,EAAE,CAAC;SAC/B;QACD,OAAO,IAAI,CAAC,kBAAkB,CAAC,uBAAA,IAAI,yEAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IACrD,CAAC;IAuHD;QA7WE,IAAI,IAAI,CAAC,KAAK;YAAE,OAAO;QACvB,uBAAA,IAAI,uDAAkC,SAAS,MAAA,CAAC;IAClD,CAAC,iGAEoB,MAAqC;QACxD,IAAI,KAAK,GAAG,uBAAA,IAAI,+CAAmB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAClD,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAC;SACd;QAED,KAAK,GAAG;YACN,YAAY,EAAE,KAAK;YACnB,YAAY,EAAE,KAAK;YACnB,qBAAqB,EAAE,KAAK;YAC5B,qBAAqB,EAAE,KAAK;YAC5B,eAAe,EAAE,IAAI,GAAG,EAAE;YAC1B,uBAAuB,EAAE,IAAI;SAC9B,CAAC;QACF,uBAAA,IAAI,+CAAmB,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC;QAC9C,OAAO,KAAK,CAAC;IACf,CAAC,2EAE8C,KAA0B;QACvE,IAAI,IAAI,CAAC,KAAK;YAAE,OAAO;QAEvB,MAAM,UAAU,GAAG,uBAAA,IAAI,uFAA0B,MAA9B,IAAI,EAA2B,KAAK,CAAC,CAAC;QACzD,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC;QAEvC,KAAK,MAAM,MAAM,IAAI,KAAK,CAAC,OAAO,EAAE;YAClC,MAAM,cAAc,GAAG,UAAU,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAE,CAAC;YAEzD,IACE,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,IAAI;gBAC5B,cAAc,CAAC,OAAO,EAAE,IAAI,KAAK,WAAW;gBAC5C,cAAc,CAAC,OAAO,EAAE,OAAO,EAC/B;gBACA,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,OAAO,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;gBAC5E,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;oBAC1B,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,OAAO;oBAC3B,QAAQ,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO;oBACxC,MAAM,EAAE,cAAc,CAAC,OAAO,CAAC,MAAM;iBACtC,CAAC,CAAC;aACJ;YAED,IACE,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,IAAI;gBAC5B,cAAc,CAAC,OAAO,EAAE,IAAI,KAAK,WAAW;gBAC5C,cAAc,CAAC,OAAO,EAAE,OAAO,EAC/B;gBACA,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;oBAC1B,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,OAAO;oBAC3B,QAAQ,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO;iBACzC,CAAC,CAAC;aACJ;YAED,IAAI,MAAM,CAAC,QAAQ,EAAE,OAAO,IAAI,IAAI,IAAI,cAAc,CAAC,OAAO,EAAE,IAAI,KAAK,WAAW,EAAE;gBACpF,IAAI,CAAC,KAAK,CAAC,wBAAwB,EAAE;oBACnC,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,OAAO;oBACjC,QAAQ,EAAE,cAAc,CAAC,QAAQ,EAAE,OAAO,IAAI,EAAE;iBACjD,CAAC,CAAC;aACJ;YAED,IAAI,MAAM,CAAC,QAAQ,EAAE,OAAO,IAAI,IAAI,IAAI,cAAc,CAAC,OAAO,EAAE,IAAI,KAAK,WAAW,EAAE;gBACpF,IAAI,CAAC,KAAK,CAAC,wBAAwB,EAAE;oBACnC,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,OAAO;oBACjC,QAAQ,EAAE,cAAc,CAAC,QAAQ,EAAE,OAAO,IAAI,EAAE;iBACjD,CAAC,CAAC;aACJ;YAED,MAAM,KAAK,GAAG,uBAAA,IAAI,kFAAqB,MAAzB,IAAI,EAAsB,cAAc,CAAC,CAAC;YAExD,IAAI,cAAc,CAAC,aAAa,EAAE;gBAChC,uBAAA,IAAI,oFAAuB,MAA3B,IAAI,EAAwB,cAAc,CAAC,CAAC;gBAE5C,IAAI,KAAK,CAAC,uBAAuB,IAAI,IAAI,EAAE;oBACzC,uBAAA,IAAI,oFAAuB,MAA3B,IAAI,EAAwB,cAAc,EAAE,KAAK,CAAC,uBAAuB,CAAC,CAAC;iBAC5E;aACF;YAED,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,KAAK,CAAC,UAAU,IAAI,EAAE,EAAE;gBACpD,IAAI,KAAK,CAAC,uBAAuB,KAAK,QAAQ,CAAC,KAAK,EAAE;oBACpD,uBAAA,IAAI,oFAAuB,MAA3B,IAAI,EAAwB,cAAc,CAAC,CAAC;oBAE5C,kDAAkD;oBAClD,IAAI,KAAK,CAAC,uBAAuB,IAAI,IAAI,EAAE;wBACzC,uBAAA,IAAI,oFAAuB,MAA3B,IAAI,EAAwB,cAAc,EAAE,KAAK,CAAC,uBAAuB,CAAC,CAAC;qBAC5E;iBACF;gBAED,KAAK,CAAC,uBAAuB,GAAG,QAAQ,CAAC,KAAK,CAAC;aAChD;YAED,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,KAAK,CAAC,UAAU,IAAI,EAAE,EAAE;gBACzD,MAAM,gBAAgB,GAAG,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC;gBAClF,IAAI,CAAC,gBAAgB,EAAE,IAAI,EAAE;oBAC3B,SAAS;iBACV;gBAED,IAAI,gBAAgB,EAAE,IAAI,KAAK,UAAU,EAAE;oBACzC,IAAI,CAAC,KAAK,CAAC,qCAAqC,EAAE;wBAChD,IAAI,EAAE,gBAAgB,CAAC,QAAQ,EAAE,IAAI;wBACrC,KAAK,EAAE,aAAa,CAAC,KAAK;wBAC1B,SAAS,EAAE,gBAAgB,CAAC,QAAQ,CAAC,SAAS;wBAC9C,gBAAgB,EAAE,gBAAgB,CAAC,QAAQ,CAAC,gBAAgB;wBAC5D,eAAe,EAAE,aAAa,CAAC,QAAQ,EAAE,SAAS,IAAI,EAAE;qBACzD,CAAC,CAAC;iBACJ;qBAAM;oBACL,WAAW,CAAC,gBAAgB,EAAE,IAAI,CAAC,CAAC;iBACrC;aACF;SACF;IACH,CAAC,qGAEsB,cAA6C,EAAE,aAAqB;QACzF,MAAM,KAAK,GAAG,uBAAA,IAAI,kFAAqB,MAAzB,IAAI,EAAsB,cAAc,CAAC,CAAC;QACxD,IAAI,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YAC5C,qCAAqC;YACrC,OAAO;SACR;QAED,MAAM,gBAAgB,GAAG,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,aAAa,CAAC,CAAC;QAC5E,IAAI,CAAC,gBAAgB,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC,CAAC;SAC1C;QACD,IAAI,CAAC,gBAAgB,CAAC,IAAI,EAAE;YAC1B,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;SACtD;QAED,IAAI,gBAAgB,CAAC,IAAI,KAAK,UAAU,EAAE;YACxC,MAAM,SAAS,GAAG,uBAAA,IAAI,oCAAQ,EAAE,KAAK,EAAE,IAAI,CACzC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,KAAK,gBAAgB,CAAC,QAAQ,CAAC,IAAI,CAC5F,CAAC;YAEF,IAAI,CAAC,KAAK,CAAC,oCAAoC,EAAE;gBAC/C,IAAI,EAAE,gBAAgB,CAAC,QAAQ,CAAC,IAAI;gBACpC,KAAK,EAAE,aAAa;gBACpB,SAAS,EAAE,gBAAgB,CAAC,QAAQ,CAAC,SAAS;gBAC9C,gBAAgB,EACd,IAAA,2BAAkB,EAAC,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,CAAC,gBAAgB,CAAC,QAAQ,CAAC,SAAS,CAAC;oBACxF,CAAC,CAAC,SAAS,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,QAAQ,CAAC,SAAS,CAAC;wBAC9E,CAAC,CAAC,IAAI;aACT,CAAC,CAAC;SACJ;aAAM;YACL,WAAW,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;SACpC;IACH,CAAC,qGAEsB,cAA6C;QAClE,MAAM,KAAK,GAAG,uBAAA,IAAI,kFAAqB,MAAzB,IAAI,EAAsB,cAAc,CAAC,CAAC;QAExD,IAAI,cAAc,CAAC,OAAO,CAAC,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,EAAE;YACzD,KAAK,CAAC,YAAY,GAAG,IAAI,CAAC;YAE1B,MAAM,cAAc,GAAG,uBAAA,IAAI,6FAAgC,MAApC,IAAI,CAAkC,CAAC;YAE9D,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;gBACzB,OAAO,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO;gBACvC,MAAM,EAAE,cAAc,CAAC,CAAC,CAAC,cAAc,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAE,IAAY;aAClG,CAAC,CAAC;SACJ;QAED,IAAI,cAAc,CAAC,OAAO,CAAC,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,EAAE;YACzD,KAAK,CAAC,YAAY,GAAG,IAAI,CAAC;YAE1B,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;SACzE;QAED,IAAI,cAAc,CAAC,QAAQ,EAAE,OAAO,IAAI,CAAC,KAAK,CAAC,qBAAqB,EAAE;YACpE,KAAK,CAAC,qBAAqB,GAAG,IAAI,CAAC;YAEnC,IAAI,CAAC,KAAK,CAAC,uBAAuB,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;SACnF;QAED,IAAI,cAAc,CAAC,QAAQ,EAAE,OAAO,IAAI,CAAC,KAAK,CAAC,qBAAqB,EAAE;YACpE,KAAK,CAAC,qBAAqB,GAAG,IAAI,CAAC;YAEnC,IAAI,CAAC,KAAK,CAAC,uBAAuB,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;SACnF;IACH,CAAC;QAGC,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,mBAAW,CAAC,yCAAyC,CAAC,CAAC;SAClE;QACD,MAAM,QAAQ,GAAG,uBAAA,IAAI,2DAA+B,CAAC;QACrD,IAAI,CAAC,QAAQ,EAAE;YACb,MAAM,IAAI,mBAAW,CAAC,0CAA0C,CAAC,CAAC;SACnE;QACD,uBAAA,IAAI,uDAAkC,SAAS,MAAA,CAAC;QAChD,uBAAA,IAAI,2CAAsB,EAAE,MAAA,CAAC;QAC7B,OAAO,sBAAsB,CAAC,QAAQ,EAAE,uBAAA,IAAI,oCAAQ,CAAC,CAAC;IACxD,CAAC;QA0DC,MAAM,cAAc,GAAG,uBAAA,IAAI,oCAAQ,EAAE,eAAe,CAAC;QACrD,IAAI,IAAA,qCAA4B,EAAU,cAAc,CAAC,EAAE;YACzD,OAAO,cAAc,CAAC;SACvB;QAED,OAAO,IAAI,CAAC;IACd,CAAC,2GAEyB,KAA0B;;QAClD,IAAI,QAAQ,GAAG,uBAAA,IAAI,2DAA+B,CAAC;QACnD,MAAM,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK,CAAC;QACnC,IAAI,CAAC,QAAQ,EAAE;YACb,QAAQ,GAAG,uBAAA,IAAI,uDAAkC;gBAC/C,GAAG,IAAI;gBACP,OAAO,EAAE,EAAE;aACZ,MAAA,CAAC;SACH;aAAM;YACL,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;SAC/B;QAED,KAAK,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,KAAK,EAAE,QAAQ,GAAG,IAAI,EAAE,GAAG,KAAK,EAAE,IAAI,KAAK,CAAC,OAAO,EAAE;YACtF,IAAI,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACrC,IAAI,CAAC,MAAM,EAAE;gBACX,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,EAAE,aAAa,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,EAAE,QAAQ,EAAE,GAAG,KAAK,EAAE,CAAC;aAC9F;YAED,IAAI,QAAQ,EAAE;gBACZ,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;oBACpB,MAAM,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC;iBAC/C;qBAAM;oBACL,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,QAAQ,CAAC;oBAC/C,aAAa,CAAC,IAAI,CAAC,CAAC;oBACpB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;oBAErC,IAAI,OAAO,EAAE;wBACX,MAAA,MAAM,CAAC,QAAQ,EAAC,OAAO,QAAP,OAAO,GAAK,EAAE,EAAC;wBAC/B,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,CAAC;qBAC1C;oBAED,IAAI,OAAO,EAAE;wBACX,MAAA,MAAM,CAAC,QAAQ,EAAC,OAAO,QAAP,OAAO,GAAK,EAAE,EAAC;wBAC/B,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,CAAC;qBAC1C;iBACF;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,MAAM,CAAC,aAAa,GAAG,aAAa,CAAC;gBAErC,IAAI,uBAAA,IAAI,oCAAQ,IAAI,IAAA,8BAAqB,EAAC,uBAAA,IAAI,oCAAQ,CAAC,EAAE;oBACvD,IAAI,aAAa,KAAK,QAAQ,EAAE;wBAC9B,MAAM,IAAI,+BAAuB,EAAE,CAAC;qBACrC;oBAED,IAAI,aAAa,KAAK,gBAAgB,EAAE;wBACtC,MAAM,IAAI,sCAA8B,EAAE,CAAC;qBAC5C;iBACF;aACF;YAED,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;YAE7B,IAAI,CAAC,KAAK;gBAAE,SAAS,CAAC,kCAAkC;YAExD,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,IAAI,EAAE,UAAU,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK,CAAC;YAC7E,aAAa,CAAC,IAAI,CAAC,CAAC;YACpB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;YAEpC,IAAI,OAAO,EAAE;gBACX,MAAM,CAAC,OAAO,CAAC,OAAO,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC;aACnE;YAED,IAAI,IAAI;gBAAE,MAAM,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;YACrC,IAAI,aAAa,EAAE;gBACjB,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,aAAa,EAAE;oBACjC,MAAM,CAAC,OAAO,CAAC,aAAa,GAAG,aAAa,CAAC;iBAC9C;qBAAM;oBACL,IAAI,aAAa,CAAC,IAAI;wBAAE,MAAM,CAAC,OAAO,CAAC,aAAa,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;oBAC/E,IAAI,aAAa,CAAC,SAAS,EAAE;wBAC3B,MAAA,MAAM,CAAC,OAAO,CAAC,aAAa,EAAC,SAAS,QAAT,SAAS,GAAK,EAAE,EAAC;wBAC9C,MAAM,CAAC,OAAO,CAAC,aAAa,CAAC,SAAS,IAAI,aAAa,CAAC,SAAS,CAAC;qBACnE;iBACF;aACF;YACD,IAAI,OAAO,EAAE;gBACX,MAAM,CAAC,OAAO,CAAC,OAAO,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC;gBAElE,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,uBAAA,IAAI,6FAAgC,MAApC,IAAI,CAAkC,EAAE;oBACrE,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,IAAA,qBAAY,EAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;iBAC9D;aACF;YAED,IAAI,UAAU,EAAE;gBACd,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU;oBAAE,MAAM,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;gBAE/D,KAAK,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,IAAI,UAAU,EAAE;oBACnE,MAAM,SAAS,GAAG,OAAC,MAAM,CAAC,OAAO,CAAC,UAAU,EAAC,KAAK,SAAL,KAAK,IAChD,EAAoD,EAAC,CAAC;oBACxD,MAAM,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;oBAC/B,IAAI,EAAE;wBAAE,SAAS,CAAC,EAAE,GAAG,EAAE,CAAC;oBAC1B,IAAI,IAAI;wBAAE,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;oBAChC,IAAI,EAAE;wBAAE,SAAS,CAAC,QAAQ,KAAlB,SAAS,CAAC,QAAQ,GAAK,EAAE,IAAI,EAAE,EAAE,CAAC,IAAI,IAAI,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE,EAAC;oBACtE,IAAI,EAAE,EAAE,IAAI;wBAAE,SAAS,CAAC,QAAS,CAAC,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC;oBACjD,IAAI,EAAE,EAAE,SAAS,EAAE;wBACjB,SAAS,CAAC,QAAS,CAAC,SAAS,IAAI,EAAE,CAAC,SAAS,CAAC;wBAE9C,IAAI,IAAA,4BAAmB,EAAC,uBAAA,IAAI,oCAAQ,EAAE,SAAS,CAAC,EAAE;4BAChD,SAAS,CAAC,QAAS,CAAC,gBAAgB,GAAG,IAAA,qBAAY,EAAC,SAAS,CAAC,QAAS,CAAC,SAAS,CAAC,CAAC;yBACpF;qBACF;iBACF;aACF;SACF;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC,EAEA,MAAM,CAAC,aAAa,EAAC;QACpB,MAAM,SAAS,GAA0B,EAAE,CAAC;QAC5C,MAAM,SAAS,GAGT,EAAE,CAAC;QACT,IAAI,IAAI,GAAG,KAAK,CAAC;QAEjB,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;YACzB,MAAM,MAAM,GAAG,SAAS,CAAC,KAAK,EAAE,CAAC;YACjC,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;aACvB;iBAAM;gBACL,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACvB;QACH,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YAClB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;aAC3B;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACpB;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACpB;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,OAAO;YACL,IAAI,EAAE,KAAK,IAAkD,EAAE;gBAC7D,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE;oBACrB,IAAI,IAAI,EAAE;wBACR,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;qBACzC;oBACD,OAAO,IAAI,OAAO,CAAkC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,CACtE,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CACpC,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;iBAC/F;gBACD,MAAM,KAAK,GAAG,SAAS,CAAC,KAAK,EAAG,CAAC;gBACjC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;YACvC,CAAC;YACD,MAAM,EAAE,KAAK,IAAI,EAAE;gBACjB,IAAI,CAAC,KAAK,EAAE,CAAC;gBACb,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;YAC1C,CAAC;SACF,CAAC;IACJ,CAAC;IAED,gBAAgB;QACd,MAAM,MAAM,GAAG,IAAI,kBAAM,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC,gBAAgB,EAAE,CAAC;IACnC,CAAC;CACF;AA9dD,oDA8dC;AAED,SAAS,sBAAsB,CAC7B,QAAgC,EAChC,MAAyC;IAEzC,MAAM,EAAE,EAAE,EAAE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,kBAAkB,EAAE,GAAG,IAAI,EAAE,GAAG,QAAQ,CAAC;IAC9E,MAAM,UAAU,GAAmB;QACjC,GAAG,IAAI;QACP,EAAE;QACF,OAAO,EAAE,OAAO,CAAC,GAAG,CAClB,CAAC,EAAE,OAAO,EAAE,aAAa,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,UAAU,EAAE,EAAyB,EAAE;YACpF,IAAI,CAAC,aAAa,EAAE;gBAClB,MAAM,IAAI,mBAAW,CAAC,oCAAoC,KAAK,EAAE,CAAC,CAAC;aACpE;YAED,MAAM,EAAE,OAAO,GAAG,IAAI,EAAE,aAAa,EAAE,UAAU,EAAE,GAAG,WAAW,EAAE,GAAG,OAAO,CAAC;YAC9E,MAAM,IAAI,GAAG,OAAO,CAAC,IAAmB,CAAC,CAAC,qHAAqH;YAC/J,IAAI,CAAC,IAAI,EAAE;gBACT,MAAM,IAAI,mBAAW,CAAC,2BAA2B,KAAK,EAAE,CAAC,CAAC;aAC3D;YAED,IAAI,aAAa,EAAE;gBACjB,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,aAAa,CAAC;gBAChD,IAAI,IAAI,IAAI,IAAI,EAAE;oBAChB,MAAM,IAAI,mBAAW,CAAC,8CAA8C,KAAK,EAAE,CAAC,CAAC;iBAC9E;gBAED,IAAI,CAAC,IAAI,EAAE;oBACT,MAAM,IAAI,mBAAW,CAAC,yCAAyC,KAAK,EAAE,CAAC,CAAC;iBACzE;gBAED,OAAO;oBACL,GAAG,UAAU;oBACb,OAAO,EAAE;wBACP,OAAO;wBACP,aAAa,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE;wBACxC,IAAI;wBACJ,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,IAAI;qBACjC;oBACD,aAAa;oBACb,KAAK;oBACL,QAAQ;iBACT,CAAC;aACH;YAED,IAAI,UAAU,EAAE;gBACd,OAAO;oBACL,GAAG,UAAU;oBACb,KAAK;oBACL,aAAa;oBACb,QAAQ;oBACR,OAAO,EAAE;wBACP,GAAG,WAAW;wBACd,IAAI;wBACJ,OAAO;wBACP,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,IAAI;wBAChC,UAAU,EAAE,UAAU,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,EAAE,EAAE;4BAC1C,MAAM,EAAE,QAAQ,EAAE,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,GAAG,QAAQ,EAAE,GAAG,SAAS,CAAC;4BAC1D,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,MAAM,EAAE,GAAG,EAAE,IAAI,EAAE,CAAC;4BACtD,IAAI,EAAE,IAAI,IAAI,EAAE;gCACd,MAAM,IAAI,mBAAW,CAAC,mBAAmB,KAAK,gBAAgB,CAAC,SAAS,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;6BAC1F;4BACD,IAAI,IAAI,IAAI,IAAI,EAAE;gCAChB,MAAM,IAAI,mBAAW,CAAC,mBAAmB,KAAK,gBAAgB,CAAC,WAAW,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;6BAC5F;4BACD,IAAI,IAAI,IAAI,IAAI,EAAE;gCAChB,MAAM,IAAI,mBAAW,CACnB,mBAAmB,KAAK,gBAAgB,CAAC,oBAAoB,GAAG,CAAC,QAAQ,CAAC,EAAE,CAC7E,CAAC;6BACH;4BACD,IAAI,IAAI,IAAI,IAAI,EAAE;gCAChB,MAAM,IAAI,mBAAW,CACnB,mBAAmB,KAAK,gBAAgB,CAAC,yBAAyB,GAAG,CAAC,QAAQ,CAAC,EAAE,CAClF,CAAC;6BACH;4BAED,OAAO,EAAE,GAAG,QAAQ,EAAE,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE,GAAG,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,EAAE,CAAC;wBACnF,CAAC,CAAC;qBACH;iBACF,CAAC;aACH;YACD,OAAO;gBACL,GAAG,UAAU;gBACb,OAAO,EAAE,EAAE,GAAG,WAAW,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,IAAI,EAAE;gBAC5E,aAAa;gBACb,KAAK;gBACL,QAAQ;aACT,CAAC;QACJ,CAAC,CACF;QACD,OAAO;QACP,KAAK;QACL,MAAM,EAAE,iBAAiB;QACzB,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE,kBAAkB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;KACtD,CAAC;IAEF,OAAO,IAAA,iCAAwB,EAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,GAAG,CAAC,CAAU;IACrB,OAAO,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AAC3B,CAAC;AA0JD;;;;GAIG;AACH,SAAS,aAAa,CAAe,GAAqB;IACxD,OAAO;AACT,CAAC;AAED,SAAS,WAAW,CAAC,EAAS,IAAG,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStream.mjs b/node_modules/openai/lib/ChatCompletionStream.mjs new file mode 100644 index 0000000..dd86282 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStream.mjs @@ -0,0 +1,499 @@ +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _ChatCompletionStream_instances, _ChatCompletionStream_params, _ChatCompletionStream_choiceEventStates, _ChatCompletionStream_currentChatCompletionSnapshot, _ChatCompletionStream_beginRequest, _ChatCompletionStream_getChoiceEventState, _ChatCompletionStream_addChunk, _ChatCompletionStream_emitToolCallDoneEvent, _ChatCompletionStream_emitContentDoneEvents, _ChatCompletionStream_endRequest, _ChatCompletionStream_getAutoParseableResponseFormat, _ChatCompletionStream_accumulateChatCompletion; +import { OpenAIError, APIUserAbortError, LengthFinishReasonError, ContentFilterFinishReasonError, } from "../error.mjs"; +import { AbstractChatCompletionRunner, } from "./AbstractChatCompletionRunner.mjs"; +import { Stream } from "../streaming.mjs"; +import { hasAutoParseableInput, isAutoParsableResponseFormat, isAutoParsableTool, maybeParseChatCompletion, shouldParseToolCall, } from "../lib/parser.mjs"; +import { partialParse } from "../_vendor/partial-json-parser/parser.mjs"; +export class ChatCompletionStream extends AbstractChatCompletionRunner { + constructor(params) { + super(); + _ChatCompletionStream_instances.add(this); + _ChatCompletionStream_params.set(this, void 0); + _ChatCompletionStream_choiceEventStates.set(this, void 0); + _ChatCompletionStream_currentChatCompletionSnapshot.set(this, void 0); + __classPrivateFieldSet(this, _ChatCompletionStream_params, params, "f"); + __classPrivateFieldSet(this, _ChatCompletionStream_choiceEventStates, [], "f"); + } + get currentChatCompletionSnapshot() { + return __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f"); + } + /** + * Intended for use on the frontend, consuming a stream produced with + * `.toReadableStream()` on the backend. + * + * Note that messages sent to the model do not appear in `.on('message')` + * in this context. + */ + static fromReadableStream(stream) { + const runner = new ChatCompletionStream(null); + runner._run(() => runner._fromReadableStream(stream)); + return runner; + } + static createChatCompletion(client, params, options) { + const runner = new ChatCompletionStream(params); + runner._run(() => runner._runChatCompletion(client, { ...params, stream: true }, { ...options, headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' } })); + return runner; + } + async _createChatCompletion(client, params, options) { + super._createChatCompletion; + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this); + const stream = await client.chat.completions.create({ ...params, stream: true }, { ...options, signal: this.controller.signal }); + this._connected(); + for await (const chunk of stream) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this)); + } + async _fromReadableStream(readableStream, options) { + const signal = options?.signal; + if (signal) { + if (signal.aborted) + this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this); + this._connected(); + const stream = Stream.fromReadableStream(readableStream, this.controller); + let chatId; + for await (const chunk of stream) { + if (chatId && chatId !== chunk.id) { + // A new request has been made. + this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this)); + } + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk); + chatId = chunk.id; + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this)); + } + [(_ChatCompletionStream_params = new WeakMap(), _ChatCompletionStream_choiceEventStates = new WeakMap(), _ChatCompletionStream_currentChatCompletionSnapshot = new WeakMap(), _ChatCompletionStream_instances = new WeakSet(), _ChatCompletionStream_beginRequest = function _ChatCompletionStream_beginRequest() { + if (this.ended) + return; + __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f"); + }, _ChatCompletionStream_getChoiceEventState = function _ChatCompletionStream_getChoiceEventState(choice) { + let state = __classPrivateFieldGet(this, _ChatCompletionStream_choiceEventStates, "f")[choice.index]; + if (state) { + return state; + } + state = { + content_done: false, + refusal_done: false, + logprobs_content_done: false, + logprobs_refusal_done: false, + done_tool_calls: new Set(), + current_tool_call_index: null, + }; + __classPrivateFieldGet(this, _ChatCompletionStream_choiceEventStates, "f")[choice.index] = state; + return state; + }, _ChatCompletionStream_addChunk = function _ChatCompletionStream_addChunk(chunk) { + if (this.ended) + return; + const completion = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_accumulateChatCompletion).call(this, chunk); + this._emit('chunk', chunk, completion); + for (const choice of chunk.choices) { + const choiceSnapshot = completion.choices[choice.index]; + if (choice.delta.content != null && + choiceSnapshot.message?.role === 'assistant' && + choiceSnapshot.message?.content) { + this._emit('content', choice.delta.content, choiceSnapshot.message.content); + this._emit('content.delta', { + delta: choice.delta.content, + snapshot: choiceSnapshot.message.content, + parsed: choiceSnapshot.message.parsed, + }); + } + if (choice.delta.refusal != null && + choiceSnapshot.message?.role === 'assistant' && + choiceSnapshot.message?.refusal) { + this._emit('refusal.delta', { + delta: choice.delta.refusal, + snapshot: choiceSnapshot.message.refusal, + }); + } + if (choice.logprobs?.content != null && choiceSnapshot.message?.role === 'assistant') { + this._emit('logprobs.content.delta', { + content: choice.logprobs?.content, + snapshot: choiceSnapshot.logprobs?.content ?? [], + }); + } + if (choice.logprobs?.refusal != null && choiceSnapshot.message?.role === 'assistant') { + this._emit('logprobs.refusal.delta', { + refusal: choice.logprobs?.refusal, + snapshot: choiceSnapshot.logprobs?.refusal ?? [], + }); + } + const state = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot); + if (choiceSnapshot.finish_reason) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitContentDoneEvents).call(this, choiceSnapshot); + if (state.current_tool_call_index != null) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitToolCallDoneEvent).call(this, choiceSnapshot, state.current_tool_call_index); + } + } + for (const toolCall of choice.delta.tool_calls ?? []) { + if (state.current_tool_call_index !== toolCall.index) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitContentDoneEvents).call(this, choiceSnapshot); + // new tool call started, the previous one is done + if (state.current_tool_call_index != null) { + __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitToolCallDoneEvent).call(this, choiceSnapshot, state.current_tool_call_index); + } + } + state.current_tool_call_index = toolCall.index; + } + for (const toolCallDelta of choice.delta.tool_calls ?? []) { + const toolCallSnapshot = choiceSnapshot.message.tool_calls?.[toolCallDelta.index]; + if (!toolCallSnapshot?.type) { + continue; + } + if (toolCallSnapshot?.type === 'function') { + this._emit('tool_calls.function.arguments.delta', { + name: toolCallSnapshot.function?.name, + index: toolCallDelta.index, + arguments: toolCallSnapshot.function.arguments, + parsed_arguments: toolCallSnapshot.function.parsed_arguments, + arguments_delta: toolCallDelta.function?.arguments ?? '', + }); + } + else { + assertNever(toolCallSnapshot?.type); + } + } + } + }, _ChatCompletionStream_emitToolCallDoneEvent = function _ChatCompletionStream_emitToolCallDoneEvent(choiceSnapshot, toolCallIndex) { + const state = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot); + if (state.done_tool_calls.has(toolCallIndex)) { + // we've already fired the done event + return; + } + const toolCallSnapshot = choiceSnapshot.message.tool_calls?.[toolCallIndex]; + if (!toolCallSnapshot) { + throw new Error('no tool call snapshot'); + } + if (!toolCallSnapshot.type) { + throw new Error('tool call snapshot missing `type`'); + } + if (toolCallSnapshot.type === 'function') { + const inputTool = __classPrivateFieldGet(this, _ChatCompletionStream_params, "f")?.tools?.find((tool) => tool.type === 'function' && tool.function.name === toolCallSnapshot.function.name); + this._emit('tool_calls.function.arguments.done', { + name: toolCallSnapshot.function.name, + index: toolCallIndex, + arguments: toolCallSnapshot.function.arguments, + parsed_arguments: isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCallSnapshot.function.arguments) + : inputTool?.function.strict ? JSON.parse(toolCallSnapshot.function.arguments) + : null, + }); + } + else { + assertNever(toolCallSnapshot.type); + } + }, _ChatCompletionStream_emitContentDoneEvents = function _ChatCompletionStream_emitContentDoneEvents(choiceSnapshot) { + const state = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot); + if (choiceSnapshot.message.content && !state.content_done) { + state.content_done = true; + const responseFormat = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getAutoParseableResponseFormat).call(this); + this._emit('content.done', { + content: choiceSnapshot.message.content, + parsed: responseFormat ? responseFormat.$parseRaw(choiceSnapshot.message.content) : null, + }); + } + if (choiceSnapshot.message.refusal && !state.refusal_done) { + state.refusal_done = true; + this._emit('refusal.done', { refusal: choiceSnapshot.message.refusal }); + } + if (choiceSnapshot.logprobs?.content && !state.logprobs_content_done) { + state.logprobs_content_done = true; + this._emit('logprobs.content.done', { content: choiceSnapshot.logprobs.content }); + } + if (choiceSnapshot.logprobs?.refusal && !state.logprobs_refusal_done) { + state.logprobs_refusal_done = true; + this._emit('logprobs.refusal.done', { refusal: choiceSnapshot.logprobs.refusal }); + } + }, _ChatCompletionStream_endRequest = function _ChatCompletionStream_endRequest() { + if (this.ended) { + throw new OpenAIError(`stream has ended, this shouldn't happen`); + } + const snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f"); + if (!snapshot) { + throw new OpenAIError(`request ended without sending any chunks`); + } + __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f"); + __classPrivateFieldSet(this, _ChatCompletionStream_choiceEventStates, [], "f"); + return finalizeChatCompletion(snapshot, __classPrivateFieldGet(this, _ChatCompletionStream_params, "f")); + }, _ChatCompletionStream_getAutoParseableResponseFormat = function _ChatCompletionStream_getAutoParseableResponseFormat() { + const responseFormat = __classPrivateFieldGet(this, _ChatCompletionStream_params, "f")?.response_format; + if (isAutoParsableResponseFormat(responseFormat)) { + return responseFormat; + } + return null; + }, _ChatCompletionStream_accumulateChatCompletion = function _ChatCompletionStream_accumulateChatCompletion(chunk) { + var _a, _b, _c, _d; + let snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f"); + const { choices, ...rest } = chunk; + if (!snapshot) { + snapshot = __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, { + ...rest, + choices: [], + }, "f"); + } + else { + Object.assign(snapshot, rest); + } + for (const { delta, finish_reason, index, logprobs = null, ...other } of chunk.choices) { + let choice = snapshot.choices[index]; + if (!choice) { + choice = snapshot.choices[index] = { finish_reason, index, message: {}, logprobs, ...other }; + } + if (logprobs) { + if (!choice.logprobs) { + choice.logprobs = Object.assign({}, logprobs); + } + else { + const { content, refusal, ...rest } = logprobs; + assertIsEmpty(rest); + Object.assign(choice.logprobs, rest); + if (content) { + (_a = choice.logprobs).content ?? (_a.content = []); + choice.logprobs.content.push(...content); + } + if (refusal) { + (_b = choice.logprobs).refusal ?? (_b.refusal = []); + choice.logprobs.refusal.push(...refusal); + } + } + } + if (finish_reason) { + choice.finish_reason = finish_reason; + if (__classPrivateFieldGet(this, _ChatCompletionStream_params, "f") && hasAutoParseableInput(__classPrivateFieldGet(this, _ChatCompletionStream_params, "f"))) { + if (finish_reason === 'length') { + throw new LengthFinishReasonError(); + } + if (finish_reason === 'content_filter') { + throw new ContentFilterFinishReasonError(); + } + } + } + Object.assign(choice, other); + if (!delta) + continue; // Shouldn't happen; just in case. + const { content, refusal, function_call, role, tool_calls, ...rest } = delta; + assertIsEmpty(rest); + Object.assign(choice.message, rest); + if (refusal) { + choice.message.refusal = (choice.message.refusal || '') + refusal; + } + if (role) + choice.message.role = role; + if (function_call) { + if (!choice.message.function_call) { + choice.message.function_call = function_call; + } + else { + if (function_call.name) + choice.message.function_call.name = function_call.name; + if (function_call.arguments) { + (_c = choice.message.function_call).arguments ?? (_c.arguments = ''); + choice.message.function_call.arguments += function_call.arguments; + } + } + } + if (content) { + choice.message.content = (choice.message.content || '') + content; + if (!choice.message.refusal && __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getAutoParseableResponseFormat).call(this)) { + choice.message.parsed = partialParse(choice.message.content); + } + } + if (tool_calls) { + if (!choice.message.tool_calls) + choice.message.tool_calls = []; + for (const { index, id, type, function: fn, ...rest } of tool_calls) { + const tool_call = ((_d = choice.message.tool_calls)[index] ?? (_d[index] = {})); + Object.assign(tool_call, rest); + if (id) + tool_call.id = id; + if (type) + tool_call.type = type; + if (fn) + tool_call.function ?? (tool_call.function = { name: fn.name ?? '', arguments: '' }); + if (fn?.name) + tool_call.function.name = fn.name; + if (fn?.arguments) { + tool_call.function.arguments += fn.arguments; + if (shouldParseToolCall(__classPrivateFieldGet(this, _ChatCompletionStream_params, "f"), tool_call)) { + tool_call.function.parsed_arguments = partialParse(tool_call.function.arguments); + } + } + } + } + } + return snapshot; + }, Symbol.asyncIterator)]() { + const pushQueue = []; + const readQueue = []; + let done = false; + this.on('chunk', (chunk) => { + const reader = readQueue.shift(); + if (reader) { + reader.resolve(chunk); + } + else { + pushQueue.push(chunk); + } + }); + this.on('end', () => { + done = true; + for (const reader of readQueue) { + reader.resolve(undefined); + } + readQueue.length = 0; + }); + this.on('abort', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + this.on('error', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + return { + next: async () => { + if (!pushQueue.length) { + if (done) { + return { value: undefined, done: true }; + } + return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true })); + } + const chunk = pushQueue.shift(); + return { value: chunk, done: false }; + }, + return: async () => { + this.abort(); + return { value: undefined, done: true }; + }, + }; + } + toReadableStream() { + const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller); + return stream.toReadableStream(); + } +} +function finalizeChatCompletion(snapshot, params) { + const { id, choices, created, model, system_fingerprint, ...rest } = snapshot; + const completion = { + ...rest, + id, + choices: choices.map(({ message, finish_reason, index, logprobs, ...choiceRest }) => { + if (!finish_reason) { + throw new OpenAIError(`missing finish_reason for choice ${index}`); + } + const { content = null, function_call, tool_calls, ...messageRest } = message; + const role = message.role; // this is what we expect; in theory it could be different which would make our types a slight lie but would be fine. + if (!role) { + throw new OpenAIError(`missing role for choice ${index}`); + } + if (function_call) { + const { arguments: args, name } = function_call; + if (args == null) { + throw new OpenAIError(`missing function_call.arguments for choice ${index}`); + } + if (!name) { + throw new OpenAIError(`missing function_call.name for choice ${index}`); + } + return { + ...choiceRest, + message: { + content, + function_call: { arguments: args, name }, + role, + refusal: message.refusal ?? null, + }, + finish_reason, + index, + logprobs, + }; + } + if (tool_calls) { + return { + ...choiceRest, + index, + finish_reason, + logprobs, + message: { + ...messageRest, + role, + content, + refusal: message.refusal ?? null, + tool_calls: tool_calls.map((tool_call, i) => { + const { function: fn, type, id, ...toolRest } = tool_call; + const { arguments: args, name, ...fnRest } = fn || {}; + if (id == null) { + throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].id\n${str(snapshot)}`); + } + if (type == null) { + throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].type\n${str(snapshot)}`); + } + if (name == null) { + throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].function.name\n${str(snapshot)}`); + } + if (args == null) { + throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].function.arguments\n${str(snapshot)}`); + } + return { ...toolRest, id, type, function: { ...fnRest, name, arguments: args } }; + }), + }, + }; + } + return { + ...choiceRest, + message: { ...messageRest, content, role, refusal: message.refusal ?? null }, + finish_reason, + index, + logprobs, + }; + }), + created, + model, + object: 'chat.completion', + ...(system_fingerprint ? { system_fingerprint } : {}), + }; + return maybeParseChatCompletion(completion, params); +} +function str(x) { + return JSON.stringify(x); +} +/** + * Ensures the given argument is an empty object, useful for + * asserting that all known properties on an object have been + * destructured. + */ +function assertIsEmpty(obj) { + return; +} +function assertNever(_x) { } +//# sourceMappingURL=ChatCompletionStream.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStream.mjs.map b/node_modules/openai/lib/ChatCompletionStream.mjs.map new file mode 100644 index 0000000..0384397 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStream.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"ChatCompletionStream.mjs","sourceRoot":"","sources":["../src/lib/ChatCompletionStream.ts"],"names":[],"mappings":";;;;;;;;;;;;OACO,EACL,WAAW,EACX,iBAAiB,EACjB,uBAAuB,EACvB,8BAA8B,GAC/B;OASM,EACL,4BAA4B,GAE7B;OAEM,EAAE,MAAM,EAAE;OAGV,EAEL,qBAAqB,EACrB,4BAA4B,EAC5B,kBAAkB,EAClB,wBAAwB,EACxB,mBAAmB,GACpB;OACM,EAAE,YAAY,EAAE;AA+FvB,MAAM,OAAO,oBACX,SAAQ,4BAA0E;IAOlF,YAAY,MAAyC;QACnD,KAAK,EAAE,CAAC;;QALV,+CAA2C;QAC3C,0DAAuC;QACvC,sEAAmE;QAIjE,uBAAA,IAAI,gCAAW,MAAM,MAAA,CAAC;QACtB,uBAAA,IAAI,2CAAsB,EAAE,MAAA,CAAC;IAC/B,CAAC;IAED,IAAI,6BAA6B;QAC/B,OAAO,uBAAA,IAAI,2DAA+B,CAAC;IAC7C,CAAC;IAED;;;;;;OAMG;IACH,MAAM,CAAC,kBAAkB,CAAC,MAAsB;QAC9C,MAAM,MAAM,GAAG,IAAI,oBAAoB,CAAC,IAAI,CAAC,CAAC;QAC9C,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC;QACtD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM,CAAC,oBAAoB,CACzB,MAAc,EACd,MAAkC,EAClC,OAA6B;QAE7B,MAAM,MAAM,GAAG,IAAI,oBAAoB,CAAU,MAA6C,CAAC,CAAC;QAChG,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CACf,MAAM,CAAC,kBAAkB,CACvB,MAAM,EACN,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,EAC3B,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,QAAQ,EAAE,EAAE,CACxF,CACF,CAAC;QACF,OAAO,MAAM,CAAC;IAChB,CAAC;IAoMkB,KAAK,CAAC,qBAAqB,CAC5C,MAAc,EACd,MAAkC,EAClC,OAA6B;QAE7B,KAAK,CAAC,qBAAqB,CAAC;QAC5B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QACD,uBAAA,IAAI,2EAAc,MAAlB,IAAI,CAAgB,CAAC;QAErB,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CACjD,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,EAC3B,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,CAC/C,CAAC;QACF,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,uBAAA,IAAI,uEAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;SACvB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,iBAAiB,EAAE,CAAC;SAC/B;QACD,OAAO,IAAI,CAAC,kBAAkB,CAAC,uBAAA,IAAI,yEAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IACrD,CAAC;IAES,KAAK,CAAC,mBAAmB,CACjC,cAA8B,EAC9B,OAA6B;QAE7B,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC/B,IAAI,MAAM,EAAE;YACV,IAAI,MAAM,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;YAC5C,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;SACjE;QACD,uBAAA,IAAI,2EAAc,MAAlB,IAAI,CAAgB,CAAC;QACrB,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,MAAM,MAAM,GAAG,MAAM,CAAC,kBAAkB,CAAsB,cAAc,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;QAC/F,IAAI,MAAM,CAAC;QACX,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,IAAI,MAAM,IAAI,MAAM,KAAK,KAAK,CAAC,EAAE,EAAE;gBACjC,+BAA+B;gBAC/B,IAAI,CAAC,kBAAkB,CAAC,uBAAA,IAAI,yEAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;aAC7C;YAED,uBAAA,IAAI,uEAAU,MAAd,IAAI,EAAW,KAAK,CAAC,CAAC;YACtB,MAAM,GAAG,KAAK,CAAC,EAAE,CAAC;SACnB;QACD,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;YACrC,MAAM,IAAI,iBAAiB,EAAE,CAAC;SAC/B;QACD,OAAO,IAAI,CAAC,kBAAkB,CAAC,uBAAA,IAAI,yEAAY,MAAhB,IAAI,CAAc,CAAC,CAAC;IACrD,CAAC;IAuHD;QA7WE,IAAI,IAAI,CAAC,KAAK;YAAE,OAAO;QACvB,uBAAA,IAAI,uDAAkC,SAAS,MAAA,CAAC;IAClD,CAAC,iGAEoB,MAAqC;QACxD,IAAI,KAAK,GAAG,uBAAA,IAAI,+CAAmB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAClD,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAC;SACd;QAED,KAAK,GAAG;YACN,YAAY,EAAE,KAAK;YACnB,YAAY,EAAE,KAAK;YACnB,qBAAqB,EAAE,KAAK;YAC5B,qBAAqB,EAAE,KAAK;YAC5B,eAAe,EAAE,IAAI,GAAG,EAAE;YAC1B,uBAAuB,EAAE,IAAI;SAC9B,CAAC;QACF,uBAAA,IAAI,+CAAmB,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC;QAC9C,OAAO,KAAK,CAAC;IACf,CAAC,2EAE8C,KAA0B;QACvE,IAAI,IAAI,CAAC,KAAK;YAAE,OAAO;QAEvB,MAAM,UAAU,GAAG,uBAAA,IAAI,uFAA0B,MAA9B,IAAI,EAA2B,KAAK,CAAC,CAAC;QACzD,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC;QAEvC,KAAK,MAAM,MAAM,IAAI,KAAK,CAAC,OAAO,EAAE;YAClC,MAAM,cAAc,GAAG,UAAU,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAE,CAAC;YAEzD,IACE,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,IAAI;gBAC5B,cAAc,CAAC,OAAO,EAAE,IAAI,KAAK,WAAW;gBAC5C,cAAc,CAAC,OAAO,EAAE,OAAO,EAC/B;gBACA,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,OAAO,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;gBAC5E,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;oBAC1B,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,OAAO;oBAC3B,QAAQ,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO;oBACxC,MAAM,EAAE,cAAc,CAAC,OAAO,CAAC,MAAM;iBACtC,CAAC,CAAC;aACJ;YAED,IACE,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,IAAI;gBAC5B,cAAc,CAAC,OAAO,EAAE,IAAI,KAAK,WAAW;gBAC5C,cAAc,CAAC,OAAO,EAAE,OAAO,EAC/B;gBACA,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;oBAC1B,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,OAAO;oBAC3B,QAAQ,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO;iBACzC,CAAC,CAAC;aACJ;YAED,IAAI,MAAM,CAAC,QAAQ,EAAE,OAAO,IAAI,IAAI,IAAI,cAAc,CAAC,OAAO,EAAE,IAAI,KAAK,WAAW,EAAE;gBACpF,IAAI,CAAC,KAAK,CAAC,wBAAwB,EAAE;oBACnC,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,OAAO;oBACjC,QAAQ,EAAE,cAAc,CAAC,QAAQ,EAAE,OAAO,IAAI,EAAE;iBACjD,CAAC,CAAC;aACJ;YAED,IAAI,MAAM,CAAC,QAAQ,EAAE,OAAO,IAAI,IAAI,IAAI,cAAc,CAAC,OAAO,EAAE,IAAI,KAAK,WAAW,EAAE;gBACpF,IAAI,CAAC,KAAK,CAAC,wBAAwB,EAAE;oBACnC,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,OAAO;oBACjC,QAAQ,EAAE,cAAc,CAAC,QAAQ,EAAE,OAAO,IAAI,EAAE;iBACjD,CAAC,CAAC;aACJ;YAED,MAAM,KAAK,GAAG,uBAAA,IAAI,kFAAqB,MAAzB,IAAI,EAAsB,cAAc,CAAC,CAAC;YAExD,IAAI,cAAc,CAAC,aAAa,EAAE;gBAChC,uBAAA,IAAI,oFAAuB,MAA3B,IAAI,EAAwB,cAAc,CAAC,CAAC;gBAE5C,IAAI,KAAK,CAAC,uBAAuB,IAAI,IAAI,EAAE;oBACzC,uBAAA,IAAI,oFAAuB,MAA3B,IAAI,EAAwB,cAAc,EAAE,KAAK,CAAC,uBAAuB,CAAC,CAAC;iBAC5E;aACF;YAED,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,KAAK,CAAC,UAAU,IAAI,EAAE,EAAE;gBACpD,IAAI,KAAK,CAAC,uBAAuB,KAAK,QAAQ,CAAC,KAAK,EAAE;oBACpD,uBAAA,IAAI,oFAAuB,MAA3B,IAAI,EAAwB,cAAc,CAAC,CAAC;oBAE5C,kDAAkD;oBAClD,IAAI,KAAK,CAAC,uBAAuB,IAAI,IAAI,EAAE;wBACzC,uBAAA,IAAI,oFAAuB,MAA3B,IAAI,EAAwB,cAAc,EAAE,KAAK,CAAC,uBAAuB,CAAC,CAAC;qBAC5E;iBACF;gBAED,KAAK,CAAC,uBAAuB,GAAG,QAAQ,CAAC,KAAK,CAAC;aAChD;YAED,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,KAAK,CAAC,UAAU,IAAI,EAAE,EAAE;gBACzD,MAAM,gBAAgB,GAAG,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC;gBAClF,IAAI,CAAC,gBAAgB,EAAE,IAAI,EAAE;oBAC3B,SAAS;iBACV;gBAED,IAAI,gBAAgB,EAAE,IAAI,KAAK,UAAU,EAAE;oBACzC,IAAI,CAAC,KAAK,CAAC,qCAAqC,EAAE;wBAChD,IAAI,EAAE,gBAAgB,CAAC,QAAQ,EAAE,IAAI;wBACrC,KAAK,EAAE,aAAa,CAAC,KAAK;wBAC1B,SAAS,EAAE,gBAAgB,CAAC,QAAQ,CAAC,SAAS;wBAC9C,gBAAgB,EAAE,gBAAgB,CAAC,QAAQ,CAAC,gBAAgB;wBAC5D,eAAe,EAAE,aAAa,CAAC,QAAQ,EAAE,SAAS,IAAI,EAAE;qBACzD,CAAC,CAAC;iBACJ;qBAAM;oBACL,WAAW,CAAC,gBAAgB,EAAE,IAAI,CAAC,CAAC;iBACrC;aACF;SACF;IACH,CAAC,qGAEsB,cAA6C,EAAE,aAAqB;QACzF,MAAM,KAAK,GAAG,uBAAA,IAAI,kFAAqB,MAAzB,IAAI,EAAsB,cAAc,CAAC,CAAC;QACxD,IAAI,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YAC5C,qCAAqC;YACrC,OAAO;SACR;QAED,MAAM,gBAAgB,GAAG,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,aAAa,CAAC,CAAC;QAC5E,IAAI,CAAC,gBAAgB,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC,CAAC;SAC1C;QACD,IAAI,CAAC,gBAAgB,CAAC,IAAI,EAAE;YAC1B,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;SACtD;QAED,IAAI,gBAAgB,CAAC,IAAI,KAAK,UAAU,EAAE;YACxC,MAAM,SAAS,GAAG,uBAAA,IAAI,oCAAQ,EAAE,KAAK,EAAE,IAAI,CACzC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,KAAK,gBAAgB,CAAC,QAAQ,CAAC,IAAI,CAC5F,CAAC;YAEF,IAAI,CAAC,KAAK,CAAC,oCAAoC,EAAE;gBAC/C,IAAI,EAAE,gBAAgB,CAAC,QAAQ,CAAC,IAAI;gBACpC,KAAK,EAAE,aAAa;gBACpB,SAAS,EAAE,gBAAgB,CAAC,QAAQ,CAAC,SAAS;gBAC9C,gBAAgB,EACd,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,CAAC,gBAAgB,CAAC,QAAQ,CAAC,SAAS,CAAC;oBACxF,CAAC,CAAC,SAAS,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,QAAQ,CAAC,SAAS,CAAC;wBAC9E,CAAC,CAAC,IAAI;aACT,CAAC,CAAC;SACJ;aAAM;YACL,WAAW,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;SACpC;IACH,CAAC,qGAEsB,cAA6C;QAClE,MAAM,KAAK,GAAG,uBAAA,IAAI,kFAAqB,MAAzB,IAAI,EAAsB,cAAc,CAAC,CAAC;QAExD,IAAI,cAAc,CAAC,OAAO,CAAC,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,EAAE;YACzD,KAAK,CAAC,YAAY,GAAG,IAAI,CAAC;YAE1B,MAAM,cAAc,GAAG,uBAAA,IAAI,6FAAgC,MAApC,IAAI,CAAkC,CAAC;YAE9D,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;gBACzB,OAAO,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO;gBACvC,MAAM,EAAE,cAAc,CAAC,CAAC,CAAC,cAAc,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAE,IAAY;aAClG,CAAC,CAAC;SACJ;QAED,IAAI,cAAc,CAAC,OAAO,CAAC,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,EAAE;YACzD,KAAK,CAAC,YAAY,GAAG,IAAI,CAAC;YAE1B,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;SACzE;QAED,IAAI,cAAc,CAAC,QAAQ,EAAE,OAAO,IAAI,CAAC,KAAK,CAAC,qBAAqB,EAAE;YACpE,KAAK,CAAC,qBAAqB,GAAG,IAAI,CAAC;YAEnC,IAAI,CAAC,KAAK,CAAC,uBAAuB,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;SACnF;QAED,IAAI,cAAc,CAAC,QAAQ,EAAE,OAAO,IAAI,CAAC,KAAK,CAAC,qBAAqB,EAAE;YACpE,KAAK,CAAC,qBAAqB,GAAG,IAAI,CAAC;YAEnC,IAAI,CAAC,KAAK,CAAC,uBAAuB,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;SACnF;IACH,CAAC;QAGC,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,WAAW,CAAC,yCAAyC,CAAC,CAAC;SAClE;QACD,MAAM,QAAQ,GAAG,uBAAA,IAAI,2DAA+B,CAAC;QACrD,IAAI,CAAC,QAAQ,EAAE;YACb,MAAM,IAAI,WAAW,CAAC,0CAA0C,CAAC,CAAC;SACnE;QACD,uBAAA,IAAI,uDAAkC,SAAS,MAAA,CAAC;QAChD,uBAAA,IAAI,2CAAsB,EAAE,MAAA,CAAC;QAC7B,OAAO,sBAAsB,CAAC,QAAQ,EAAE,uBAAA,IAAI,oCAAQ,CAAC,CAAC;IACxD,CAAC;QA0DC,MAAM,cAAc,GAAG,uBAAA,IAAI,oCAAQ,EAAE,eAAe,CAAC;QACrD,IAAI,4BAA4B,CAAU,cAAc,CAAC,EAAE;YACzD,OAAO,cAAc,CAAC;SACvB;QAED,OAAO,IAAI,CAAC;IACd,CAAC,2GAEyB,KAA0B;;QAClD,IAAI,QAAQ,GAAG,uBAAA,IAAI,2DAA+B,CAAC;QACnD,MAAM,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK,CAAC;QACnC,IAAI,CAAC,QAAQ,EAAE;YACb,QAAQ,GAAG,uBAAA,IAAI,uDAAkC;gBAC/C,GAAG,IAAI;gBACP,OAAO,EAAE,EAAE;aACZ,MAAA,CAAC;SACH;aAAM;YACL,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;SAC/B;QAED,KAAK,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,KAAK,EAAE,QAAQ,GAAG,IAAI,EAAE,GAAG,KAAK,EAAE,IAAI,KAAK,CAAC,OAAO,EAAE;YACtF,IAAI,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACrC,IAAI,CAAC,MAAM,EAAE;gBACX,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,EAAE,aAAa,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,EAAE,QAAQ,EAAE,GAAG,KAAK,EAAE,CAAC;aAC9F;YAED,IAAI,QAAQ,EAAE;gBACZ,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;oBACpB,MAAM,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC;iBAC/C;qBAAM;oBACL,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,QAAQ,CAAC;oBAC/C,aAAa,CAAC,IAAI,CAAC,CAAC;oBACpB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;oBAErC,IAAI,OAAO,EAAE;wBACX,MAAA,MAAM,CAAC,QAAQ,EAAC,OAAO,QAAP,OAAO,GAAK,EAAE,EAAC;wBAC/B,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,CAAC;qBAC1C;oBAED,IAAI,OAAO,EAAE;wBACX,MAAA,MAAM,CAAC,QAAQ,EAAC,OAAO,QAAP,OAAO,GAAK,EAAE,EAAC;wBAC/B,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,CAAC;qBAC1C;iBACF;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,MAAM,CAAC,aAAa,GAAG,aAAa,CAAC;gBAErC,IAAI,uBAAA,IAAI,oCAAQ,IAAI,qBAAqB,CAAC,uBAAA,IAAI,oCAAQ,CAAC,EAAE;oBACvD,IAAI,aAAa,KAAK,QAAQ,EAAE;wBAC9B,MAAM,IAAI,uBAAuB,EAAE,CAAC;qBACrC;oBAED,IAAI,aAAa,KAAK,gBAAgB,EAAE;wBACtC,MAAM,IAAI,8BAA8B,EAAE,CAAC;qBAC5C;iBACF;aACF;YAED,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;YAE7B,IAAI,CAAC,KAAK;gBAAE,SAAS,CAAC,kCAAkC;YAExD,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,IAAI,EAAE,UAAU,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK,CAAC;YAC7E,aAAa,CAAC,IAAI,CAAC,CAAC;YACpB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;YAEpC,IAAI,OAAO,EAAE;gBACX,MAAM,CAAC,OAAO,CAAC,OAAO,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC;aACnE;YAED,IAAI,IAAI;gBAAE,MAAM,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;YACrC,IAAI,aAAa,EAAE;gBACjB,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,aAAa,EAAE;oBACjC,MAAM,CAAC,OAAO,CAAC,aAAa,GAAG,aAAa,CAAC;iBAC9C;qBAAM;oBACL,IAAI,aAAa,CAAC,IAAI;wBAAE,MAAM,CAAC,OAAO,CAAC,aAAa,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;oBAC/E,IAAI,aAAa,CAAC,SAAS,EAAE;wBAC3B,MAAA,MAAM,CAAC,OAAO,CAAC,aAAa,EAAC,SAAS,QAAT,SAAS,GAAK,EAAE,EAAC;wBAC9C,MAAM,CAAC,OAAO,CAAC,aAAa,CAAC,SAAS,IAAI,aAAa,CAAC,SAAS,CAAC;qBACnE;iBACF;aACF;YACD,IAAI,OAAO,EAAE;gBACX,MAAM,CAAC,OAAO,CAAC,OAAO,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC;gBAElE,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,uBAAA,IAAI,6FAAgC,MAApC,IAAI,CAAkC,EAAE;oBACrE,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;iBAC9D;aACF;YAED,IAAI,UAAU,EAAE;gBACd,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU;oBAAE,MAAM,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;gBAE/D,KAAK,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,IAAI,UAAU,EAAE;oBACnE,MAAM,SAAS,GAAG,OAAC,MAAM,CAAC,OAAO,CAAC,UAAU,EAAC,KAAK,SAAL,KAAK,IAChD,EAAoD,EAAC,CAAC;oBACxD,MAAM,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;oBAC/B,IAAI,EAAE;wBAAE,SAAS,CAAC,EAAE,GAAG,EAAE,CAAC;oBAC1B,IAAI,IAAI;wBAAE,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;oBAChC,IAAI,EAAE;wBAAE,SAAS,CAAC,QAAQ,KAAlB,SAAS,CAAC,QAAQ,GAAK,EAAE,IAAI,EAAE,EAAE,CAAC,IAAI,IAAI,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE,EAAC;oBACtE,IAAI,EAAE,EAAE,IAAI;wBAAE,SAAS,CAAC,QAAS,CAAC,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC;oBACjD,IAAI,EAAE,EAAE,SAAS,EAAE;wBACjB,SAAS,CAAC,QAAS,CAAC,SAAS,IAAI,EAAE,CAAC,SAAS,CAAC;wBAE9C,IAAI,mBAAmB,CAAC,uBAAA,IAAI,oCAAQ,EAAE,SAAS,CAAC,EAAE;4BAChD,SAAS,CAAC,QAAS,CAAC,gBAAgB,GAAG,YAAY,CAAC,SAAS,CAAC,QAAS,CAAC,SAAS,CAAC,CAAC;yBACpF;qBACF;iBACF;aACF;SACF;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC,EAEA,MAAM,CAAC,aAAa,EAAC;QACpB,MAAM,SAAS,GAA0B,EAAE,CAAC;QAC5C,MAAM,SAAS,GAGT,EAAE,CAAC;QACT,IAAI,IAAI,GAAG,KAAK,CAAC;QAEjB,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;YACzB,MAAM,MAAM,GAAG,SAAS,CAAC,KAAK,EAAE,CAAC;YACjC,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;aACvB;iBAAM;gBACL,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACvB;QACH,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YAClB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;aAC3B;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACpB;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC;YACZ,KAAK,MAAM,MAAM,IAAI,SAAS,EAAE;gBAC9B,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;aACpB;YACD,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,OAAO;YACL,IAAI,EAAE,KAAK,IAAkD,EAAE;gBAC7D,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE;oBACrB,IAAI,IAAI,EAAE;wBACR,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;qBACzC;oBACD,OAAO,IAAI,OAAO,CAAkC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,CACtE,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CACpC,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;iBAC/F;gBACD,MAAM,KAAK,GAAG,SAAS,CAAC,KAAK,EAAG,CAAC;gBACjC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;YACvC,CAAC;YACD,MAAM,EAAE,KAAK,IAAI,EAAE;gBACjB,IAAI,CAAC,KAAK,EAAE,CAAC;gBACb,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;YAC1C,CAAC;SACF,CAAC;IACJ,CAAC;IAED,gBAAgB;QACd,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC,gBAAgB,EAAE,CAAC;IACnC,CAAC;CACF;AAED,SAAS,sBAAsB,CAC7B,QAAgC,EAChC,MAAyC;IAEzC,MAAM,EAAE,EAAE,EAAE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,kBAAkB,EAAE,GAAG,IAAI,EAAE,GAAG,QAAQ,CAAC;IAC9E,MAAM,UAAU,GAAmB;QACjC,GAAG,IAAI;QACP,EAAE;QACF,OAAO,EAAE,OAAO,CAAC,GAAG,CAClB,CAAC,EAAE,OAAO,EAAE,aAAa,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,UAAU,EAAE,EAAyB,EAAE;YACpF,IAAI,CAAC,aAAa,EAAE;gBAClB,MAAM,IAAI,WAAW,CAAC,oCAAoC,KAAK,EAAE,CAAC,CAAC;aACpE;YAED,MAAM,EAAE,OAAO,GAAG,IAAI,EAAE,aAAa,EAAE,UAAU,EAAE,GAAG,WAAW,EAAE,GAAG,OAAO,CAAC;YAC9E,MAAM,IAAI,GAAG,OAAO,CAAC,IAAmB,CAAC,CAAC,qHAAqH;YAC/J,IAAI,CAAC,IAAI,EAAE;gBACT,MAAM,IAAI,WAAW,CAAC,2BAA2B,KAAK,EAAE,CAAC,CAAC;aAC3D;YAED,IAAI,aAAa,EAAE;gBACjB,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,aAAa,CAAC;gBAChD,IAAI,IAAI,IAAI,IAAI,EAAE;oBAChB,MAAM,IAAI,WAAW,CAAC,8CAA8C,KAAK,EAAE,CAAC,CAAC;iBAC9E;gBAED,IAAI,CAAC,IAAI,EAAE;oBACT,MAAM,IAAI,WAAW,CAAC,yCAAyC,KAAK,EAAE,CAAC,CAAC;iBACzE;gBAED,OAAO;oBACL,GAAG,UAAU;oBACb,OAAO,EAAE;wBACP,OAAO;wBACP,aAAa,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE;wBACxC,IAAI;wBACJ,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,IAAI;qBACjC;oBACD,aAAa;oBACb,KAAK;oBACL,QAAQ;iBACT,CAAC;aACH;YAED,IAAI,UAAU,EAAE;gBACd,OAAO;oBACL,GAAG,UAAU;oBACb,KAAK;oBACL,aAAa;oBACb,QAAQ;oBACR,OAAO,EAAE;wBACP,GAAG,WAAW;wBACd,IAAI;wBACJ,OAAO;wBACP,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,IAAI;wBAChC,UAAU,EAAE,UAAU,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,EAAE,EAAE;4BAC1C,MAAM,EAAE,QAAQ,EAAE,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,GAAG,QAAQ,EAAE,GAAG,SAAS,CAAC;4BAC1D,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,MAAM,EAAE,GAAG,EAAE,IAAI,EAAE,CAAC;4BACtD,IAAI,EAAE,IAAI,IAAI,EAAE;gCACd,MAAM,IAAI,WAAW,CAAC,mBAAmB,KAAK,gBAAgB,CAAC,SAAS,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;6BAC1F;4BACD,IAAI,IAAI,IAAI,IAAI,EAAE;gCAChB,MAAM,IAAI,WAAW,CAAC,mBAAmB,KAAK,gBAAgB,CAAC,WAAW,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;6BAC5F;4BACD,IAAI,IAAI,IAAI,IAAI,EAAE;gCAChB,MAAM,IAAI,WAAW,CACnB,mBAAmB,KAAK,gBAAgB,CAAC,oBAAoB,GAAG,CAAC,QAAQ,CAAC,EAAE,CAC7E,CAAC;6BACH;4BACD,IAAI,IAAI,IAAI,IAAI,EAAE;gCAChB,MAAM,IAAI,WAAW,CACnB,mBAAmB,KAAK,gBAAgB,CAAC,yBAAyB,GAAG,CAAC,QAAQ,CAAC,EAAE,CAClF,CAAC;6BACH;4BAED,OAAO,EAAE,GAAG,QAAQ,EAAE,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE,GAAG,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,EAAE,CAAC;wBACnF,CAAC,CAAC;qBACH;iBACF,CAAC;aACH;YACD,OAAO;gBACL,GAAG,UAAU;gBACb,OAAO,EAAE,EAAE,GAAG,WAAW,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,IAAI,EAAE;gBAC5E,aAAa;gBACb,KAAK;gBACL,QAAQ;aACT,CAAC;QACJ,CAAC,CACF;QACD,OAAO;QACP,KAAK;QACL,MAAM,EAAE,iBAAiB;QACzB,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE,kBAAkB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;KACtD,CAAC;IAEF,OAAO,wBAAwB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,GAAG,CAAC,CAAU;IACrB,OAAO,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AAC3B,CAAC;AA0JD;;;;GAIG;AACH,SAAS,aAAa,CAAe,GAAqB;IACxD,OAAO;AACT,CAAC;AAED,SAAS,WAAW,CAAC,EAAS,IAAG,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStreamingRunner.d.ts b/node_modules/openai/lib/ChatCompletionStreamingRunner.d.ts new file mode 100644 index 0000000..b092b93 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStreamingRunner.d.ts @@ -0,0 +1,24 @@ +import { type ChatCompletionChunk, type ChatCompletionCreateParamsStreaming } from "../resources/chat/completions.js"; +import { RunnerOptions, type AbstractChatCompletionRunnerEvents } from "./AbstractChatCompletionRunner.js"; +import { type ReadableStream } from "../_shims/index.js"; +import { RunnableTools, type BaseFunctionsArgs, type RunnableFunctions } from "./RunnableFunction.js"; +import { ChatCompletionSnapshot, ChatCompletionStream } from "./ChatCompletionStream.js"; +import OpenAI from "../index.js"; +import { AutoParseableTool } from "../lib/parser.js"; +export interface ChatCompletionStreamEvents extends AbstractChatCompletionRunnerEvents { + content: (contentDelta: string, contentSnapshot: string) => void; + chunk: (chunk: ChatCompletionChunk, snapshot: ChatCompletionSnapshot) => void; +} +export type ChatCompletionStreamingFunctionRunnerParams = Omit & { + functions: RunnableFunctions; +}; +export type ChatCompletionStreamingToolRunnerParams = Omit & { + tools: RunnableTools | AutoParseableTool[]; +}; +export declare class ChatCompletionStreamingRunner extends ChatCompletionStream implements AsyncIterable { + static fromReadableStream(stream: ReadableStream): ChatCompletionStreamingRunner; + /** @deprecated - please use `runTools` instead. */ + static runFunctions(client: OpenAI, params: ChatCompletionStreamingFunctionRunnerParams, options?: RunnerOptions): ChatCompletionStreamingRunner; + static runTools(client: OpenAI, params: ChatCompletionStreamingToolRunnerParams, options?: RunnerOptions): ChatCompletionStreamingRunner; +} +//# sourceMappingURL=ChatCompletionStreamingRunner.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStreamingRunner.d.ts.map b/node_modules/openai/lib/ChatCompletionStreamingRunner.d.ts.map new file mode 100644 index 0000000..2a2df68 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStreamingRunner.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ChatCompletionStreamingRunner.d.ts","sourceRoot":"","sources":["../src/lib/ChatCompletionStreamingRunner.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,mBAAmB,EACxB,KAAK,mCAAmC,EACzC,MAAM,+BAA+B,CAAC;AACvC,OAAO,EAAE,aAAa,EAAE,KAAK,kCAAkC,EAAE,MAAM,gCAAgC,CAAC;AACxG,OAAO,EAAE,KAAK,cAAc,EAAE,MAAM,iBAAiB,CAAC;AACtD,OAAO,EAAE,aAAa,EAAE,KAAK,iBAAiB,EAAE,KAAK,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AACnG,OAAO,EAAE,sBAAsB,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AACtF,OAAO,MAAM,MAAM,UAAU,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAElD,MAAM,WAAW,0BAA2B,SAAQ,kCAAkC;IACpF,OAAO,EAAE,CAAC,YAAY,EAAE,MAAM,EAAE,eAAe,EAAE,MAAM,KAAK,IAAI,CAAC;IACjE,KAAK,EAAE,CAAC,KAAK,EAAE,mBAAmB,EAAE,QAAQ,EAAE,sBAAsB,KAAK,IAAI,CAAC;CAC/E;AAED,MAAM,MAAM,2CAA2C,CAAC,aAAa,SAAS,iBAAiB,IAAI,IAAI,CACrG,mCAAmC,EACnC,WAAW,CACZ,GAAG;IACF,SAAS,EAAE,iBAAiB,CAAC,aAAa,CAAC,CAAC;CAC7C,CAAC;AAEF,MAAM,MAAM,uCAAuC,CAAC,aAAa,SAAS,iBAAiB,IAAI,IAAI,CACjG,mCAAmC,EACnC,OAAO,CACR,GAAG;IACF,KAAK,EAAE,aAAa,CAAC,aAAa,CAAC,GAAG,iBAAiB,CAAC,GAAG,EAAE,IAAI,CAAC,EAAE,CAAC;CACtE,CAAC;AAEF,qBAAa,6BAA6B,CAAC,OAAO,GAAG,IAAI,CACvD,SAAQ,oBAAoB,CAAC,OAAO,CACpC,YAAW,aAAa,CAAC,mBAAmB,CAAC;WAE7B,kBAAkB,CAAC,MAAM,EAAE,cAAc,GAAG,6BAA6B,CAAC,IAAI,CAAC;IAM/F,mDAAmD;IACnD,MAAM,CAAC,YAAY,CAAC,CAAC,SAAS,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,EAC/C,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,2CAA2C,CAAC,CAAC,CAAC,EACtD,OAAO,CAAC,EAAE,aAAa,GACtB,6BAA6B,CAAC,IAAI,CAAC;IAUtC,MAAM,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,EAAE,OAAO,GAAG,IAAI,EAC3D,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,uCAAuC,CAAC,CAAC,CAAC,EAClD,OAAO,CAAC,EAAE,aAAa,GACtB,6BAA6B,CAAC,OAAO,CAAC;CAY1C"} \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStreamingRunner.js b/node_modules/openai/lib/ChatCompletionStreamingRunner.js new file mode 100644 index 0000000..dd42c88 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStreamingRunner.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChatCompletionStreamingRunner = void 0; +const ChatCompletionStream_1 = require("./ChatCompletionStream.js"); +class ChatCompletionStreamingRunner extends ChatCompletionStream_1.ChatCompletionStream { + static fromReadableStream(stream) { + const runner = new ChatCompletionStreamingRunner(null); + runner._run(() => runner._fromReadableStream(stream)); + return runner; + } + /** @deprecated - please use `runTools` instead. */ + static runFunctions(client, params, options) { + const runner = new ChatCompletionStreamingRunner(null); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' }, + }; + runner._run(() => runner._runFunctions(client, params, opts)); + return runner; + } + static runTools(client, params, options) { + const runner = new ChatCompletionStreamingRunner( + // @ts-expect-error TODO these types are incompatible + params); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' }, + }; + runner._run(() => runner._runTools(client, params, opts)); + return runner; + } +} +exports.ChatCompletionStreamingRunner = ChatCompletionStreamingRunner; +//# sourceMappingURL=ChatCompletionStreamingRunner.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStreamingRunner.js.map b/node_modules/openai/lib/ChatCompletionStreamingRunner.js.map new file mode 100644 index 0000000..c1c8508 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStreamingRunner.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ChatCompletionStreamingRunner.js","sourceRoot":"","sources":["../src/lib/ChatCompletionStreamingRunner.ts"],"names":[],"mappings":";;;AAOA,oEAAsF;AAuBtF,MAAa,6BACX,SAAQ,2CAA6B;IAGrC,MAAM,CAAU,kBAAkB,CAAC,MAAsB;QACvD,MAAM,MAAM,GAAG,IAAI,6BAA6B,CAAC,IAAI,CAAC,CAAC;QACvD,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC;QACtD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,mDAAmD;IACnD,MAAM,CAAC,YAAY,CACjB,MAAc,EACd,MAAsD,EACtD,OAAuB;QAEvB,MAAM,MAAM,GAAG,IAAI,6BAA6B,CAAC,IAAI,CAAC,CAAC;QACvD,MAAM,IAAI,GAAG;YACX,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,cAAc,EAAE;SAC9E,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;QAC9D,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM,CAAC,QAAQ,CACb,MAAc,EACd,MAAkD,EAClD,OAAuB;QAEvB,MAAM,MAAM,GAAG,IAAI,6BAA6B;QAC9C,qDAAqD;QACrD,MAAM,CACP,CAAC;QACF,MAAM,IAAI,GAAG;YACX,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,UAAU,EAAE;SAC1E,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;QAC1D,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAzCD,sEAyCC"} \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStreamingRunner.mjs b/node_modules/openai/lib/ChatCompletionStreamingRunner.mjs new file mode 100644 index 0000000..361c551 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStreamingRunner.mjs @@ -0,0 +1,30 @@ +import { ChatCompletionStream } from "./ChatCompletionStream.mjs"; +export class ChatCompletionStreamingRunner extends ChatCompletionStream { + static fromReadableStream(stream) { + const runner = new ChatCompletionStreamingRunner(null); + runner._run(() => runner._fromReadableStream(stream)); + return runner; + } + /** @deprecated - please use `runTools` instead. */ + static runFunctions(client, params, options) { + const runner = new ChatCompletionStreamingRunner(null); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' }, + }; + runner._run(() => runner._runFunctions(client, params, opts)); + return runner; + } + static runTools(client, params, options) { + const runner = new ChatCompletionStreamingRunner( + // @ts-expect-error TODO these types are incompatible + params); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' }, + }; + runner._run(() => runner._runTools(client, params, opts)); + return runner; + } +} +//# sourceMappingURL=ChatCompletionStreamingRunner.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/ChatCompletionStreamingRunner.mjs.map b/node_modules/openai/lib/ChatCompletionStreamingRunner.mjs.map new file mode 100644 index 0000000..cb255d8 --- /dev/null +++ b/node_modules/openai/lib/ChatCompletionStreamingRunner.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"ChatCompletionStreamingRunner.mjs","sourceRoot":"","sources":["../src/lib/ChatCompletionStreamingRunner.ts"],"names":[],"mappings":"OAOO,EAA0B,oBAAoB,EAAE;AAuBvD,MAAM,OAAO,6BACX,SAAQ,oBAA6B;IAGrC,MAAM,CAAU,kBAAkB,CAAC,MAAsB;QACvD,MAAM,MAAM,GAAG,IAAI,6BAA6B,CAAC,IAAI,CAAC,CAAC;QACvD,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC;QACtD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,mDAAmD;IACnD,MAAM,CAAC,YAAY,CACjB,MAAc,EACd,MAAsD,EACtD,OAAuB;QAEvB,MAAM,MAAM,GAAG,IAAI,6BAA6B,CAAC,IAAI,CAAC,CAAC;QACvD,MAAM,IAAI,GAAG;YACX,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,cAAc,EAAE;SAC9E,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;QAC9D,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM,CAAC,QAAQ,CACb,MAAc,EACd,MAAkD,EAClD,OAAuB;QAEvB,MAAM,MAAM,GAAG,IAAI,6BAA6B;QAC9C,qDAAqD;QACrD,MAAM,CACP,CAAC;QACF,MAAM,IAAI,GAAG;YACX,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,2BAA2B,EAAE,UAAU,EAAE;SAC1E,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;QAC1D,OAAO,MAAM,CAAC;IAChB,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/lib/EventStream.d.ts b/node_modules/openai/lib/EventStream.d.ts new file mode 100644 index 0000000..a37a44a --- /dev/null +++ b/node_modules/openai/lib/EventStream.d.ts @@ -0,0 +1,63 @@ + +import { APIUserAbortError, OpenAIError } from "../error.js"; +export declare class EventStream { + #private; + controller: AbortController; + constructor(); + protected _run(this: EventStream, executor: () => Promise): void; + protected _connected(this: EventStream): void; + get ended(): boolean; + get errored(): boolean; + get aborted(): boolean; + abort(): void; + /** + * Adds the listener function to the end of the listeners array for the event. + * No checks are made to see if the listener has already been added. Multiple calls passing + * the same combination of event and listener will result in the listener being added, and + * called, multiple times. + * @returns this ChatCompletionStream, so that calls can be chained + */ + on(event: Event, listener: EventListener): this; + /** + * Removes the specified listener from the listener array for the event. + * off() will remove, at most, one instance of a listener from the listener array. If any single + * listener has been added multiple times to the listener array for the specified event, then + * off() must be called multiple times to remove each instance. + * @returns this ChatCompletionStream, so that calls can be chained + */ + off(event: Event, listener: EventListener): this; + /** + * Adds a one-time listener function for the event. The next time the event is triggered, + * this listener is removed and then invoked. + * @returns this ChatCompletionStream, so that calls can be chained + */ + once(event: Event, listener: EventListener): this; + /** + * This is similar to `.once()`, but returns a Promise that resolves the next time + * the event is triggered, instead of calling a listener callback. + * @returns a Promise that resolves the next time given event is triggered, + * or rejects if an error is emitted. (If you request the 'error' event, + * returns a promise that resolves with the error). + * + * Example: + * + * const message = await stream.emitted('message') // rejects if the stream errors + */ + emitted(event: Event): Promise extends [infer Param] ? Param : EventParameters extends [] ? void : EventParameters>; + done(): Promise; + _emit(event: Event, ...args: EventParameters): void; + _emit(event: Event, ...args: EventParameters): void; + protected _emitFinal(): void; +} +type EventListener = Events[EventType]; +export type EventParameters = { + [Event in EventType]: EventListener extends (...args: infer P) => any ? P : never; +}[EventType]; +export interface BaseEvents { + connect: () => void; + error: (error: OpenAIError) => void; + abort: (error: APIUserAbortError) => void; + end: () => void; +} +export {}; +//# sourceMappingURL=EventStream.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/EventStream.d.ts.map b/node_modules/openai/lib/EventStream.d.ts.map new file mode 100644 index 0000000..77f6291 --- /dev/null +++ b/node_modules/openai/lib/EventStream.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"EventStream.d.ts","sourceRoot":"","sources":["../src/lib/EventStream.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,iBAAiB,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AAE1D,qBAAa,WAAW,CAAC,UAAU,SAAS,UAAU;;IACpD,UAAU,EAAE,eAAe,CAAyB;;IAsCpD,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,UAAU,CAAC,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC,GAAG,CAAC;IAW1E,SAAS,CAAC,UAAU,CAAC,IAAI,EAAE,WAAW,CAAC,UAAU,CAAC;IAMlD,IAAI,KAAK,IAAI,OAAO,CAEnB;IAED,IAAI,OAAO,IAAI,OAAO,CAErB;IAED,IAAI,OAAO,IAAI,OAAO,CAErB;IAED,KAAK;IAIL;;;;;;OAMG;IACH,EAAE,CAAC,KAAK,SAAS,MAAM,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,aAAa,CAAC,UAAU,EAAE,KAAK,CAAC,GAAG,IAAI;IAOlG;;;;;;OAMG;IACH,GAAG,CAAC,KAAK,SAAS,MAAM,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,aAAa,CAAC,UAAU,EAAE,KAAK,CAAC,GAAG,IAAI;IAQnG;;;;OAIG;IACH,IAAI,CAAC,KAAK,SAAS,MAAM,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,aAAa,CAAC,UAAU,EAAE,KAAK,CAAC,GAAG,IAAI;IAOpG;;;;;;;;;;OAUG;IACH,OAAO,CAAC,KAAK,SAAS,MAAM,UAAU,EACpC,KAAK,EAAE,KAAK,GACX,OAAO,CACR,eAAe,CAAC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,GAAG,KAAK,GAC9D,eAAe,CAAC,UAAU,EAAE,KAAK,CAAC,SAAS,EAAE,GAAG,IAAI,GACpD,eAAe,CAAC,UAAU,EAAE,KAAK,CAAC,CACrC;IAQK,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IA0B3B,KAAK,CAAC,KAAK,SAAS,MAAM,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,IAAI,EAAE,eAAe,CAAC,UAAU,EAAE,KAAK,CAAC,GAAG,IAAI;IACtG,KAAK,CAAC,KAAK,SAAS,MAAM,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,IAAI,EAAE,eAAe,CAAC,UAAU,EAAE,KAAK,CAAC,GAAG,IAAI;IAoDtG,SAAS,CAAC,UAAU,IAAI,IAAI;CAC7B;AAED,KAAK,aAAa,CAAC,MAAM,EAAE,SAAS,SAAS,MAAM,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,CAAC;AAO/E,MAAM,MAAM,eAAe,CAAC,MAAM,EAAE,SAAS,SAAS,MAAM,MAAM,IAAI;KACnE,KAAK,IAAI,SAAS,GAAG,aAAa,CAAC,MAAM,EAAE,SAAS,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,MAAM,CAAC,KAAK,GAAG,GAAG,CAAC,GAAG,KAAK;CACrG,CAAC,SAAS,CAAC,CAAC;AAEb,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,MAAM,IAAI,CAAC;IACpB,KAAK,EAAE,CAAC,KAAK,EAAE,WAAW,KAAK,IAAI,CAAC;IACpC,KAAK,EAAE,CAAC,KAAK,EAAE,iBAAiB,KAAK,IAAI,CAAC;IAC1C,GAAG,EAAE,MAAM,IAAI,CAAC;CACjB"} \ No newline at end of file diff --git a/node_modules/openai/lib/EventStream.js b/node_modules/openai/lib/EventStream.js new file mode 100644 index 0000000..e469269 --- /dev/null +++ b/node_modules/openai/lib/EventStream.js @@ -0,0 +1,200 @@ +"use strict"; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _EventStream_instances, _EventStream_connectedPromise, _EventStream_resolveConnectedPromise, _EventStream_rejectConnectedPromise, _EventStream_endPromise, _EventStream_resolveEndPromise, _EventStream_rejectEndPromise, _EventStream_listeners, _EventStream_ended, _EventStream_errored, _EventStream_aborted, _EventStream_catchingPromiseCreated, _EventStream_handleError; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EventStream = void 0; +const error_1 = require("../error.js"); +class EventStream { + constructor() { + _EventStream_instances.add(this); + this.controller = new AbortController(); + _EventStream_connectedPromise.set(this, void 0); + _EventStream_resolveConnectedPromise.set(this, () => { }); + _EventStream_rejectConnectedPromise.set(this, () => { }); + _EventStream_endPromise.set(this, void 0); + _EventStream_resolveEndPromise.set(this, () => { }); + _EventStream_rejectEndPromise.set(this, () => { }); + _EventStream_listeners.set(this, {}); + _EventStream_ended.set(this, false); + _EventStream_errored.set(this, false); + _EventStream_aborted.set(this, false); + _EventStream_catchingPromiseCreated.set(this, false); + __classPrivateFieldSet(this, _EventStream_connectedPromise, new Promise((resolve, reject) => { + __classPrivateFieldSet(this, _EventStream_resolveConnectedPromise, resolve, "f"); + __classPrivateFieldSet(this, _EventStream_rejectConnectedPromise, reject, "f"); + }), "f"); + __classPrivateFieldSet(this, _EventStream_endPromise, new Promise((resolve, reject) => { + __classPrivateFieldSet(this, _EventStream_resolveEndPromise, resolve, "f"); + __classPrivateFieldSet(this, _EventStream_rejectEndPromise, reject, "f"); + }), "f"); + // Don't let these promises cause unhandled rejection errors. + // we will manually cause an unhandled rejection error later + // if the user hasn't registered any error listener or called + // any promise-returning method. + __classPrivateFieldGet(this, _EventStream_connectedPromise, "f").catch(() => { }); + __classPrivateFieldGet(this, _EventStream_endPromise, "f").catch(() => { }); + } + _run(executor) { + // Unfortunately if we call `executor()` immediately we get runtime errors about + // references to `this` before the `super()` constructor call returns. + setTimeout(() => { + executor().then(() => { + this._emitFinal(); + this._emit('end'); + }, __classPrivateFieldGet(this, _EventStream_instances, "m", _EventStream_handleError).bind(this)); + }, 0); + } + _connected() { + if (this.ended) + return; + __classPrivateFieldGet(this, _EventStream_resolveConnectedPromise, "f").call(this); + this._emit('connect'); + } + get ended() { + return __classPrivateFieldGet(this, _EventStream_ended, "f"); + } + get errored() { + return __classPrivateFieldGet(this, _EventStream_errored, "f"); + } + get aborted() { + return __classPrivateFieldGet(this, _EventStream_aborted, "f"); + } + abort() { + this.controller.abort(); + } + /** + * Adds the listener function to the end of the listeners array for the event. + * No checks are made to see if the listener has already been added. Multiple calls passing + * the same combination of event and listener will result in the listener being added, and + * called, multiple times. + * @returns this ChatCompletionStream, so that calls can be chained + */ + on(event, listener) { + const listeners = __classPrivateFieldGet(this, _EventStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _EventStream_listeners, "f")[event] = []); + listeners.push({ listener }); + return this; + } + /** + * Removes the specified listener from the listener array for the event. + * off() will remove, at most, one instance of a listener from the listener array. If any single + * listener has been added multiple times to the listener array for the specified event, then + * off() must be called multiple times to remove each instance. + * @returns this ChatCompletionStream, so that calls can be chained + */ + off(event, listener) { + const listeners = __classPrivateFieldGet(this, _EventStream_listeners, "f")[event]; + if (!listeners) + return this; + const index = listeners.findIndex((l) => l.listener === listener); + if (index >= 0) + listeners.splice(index, 1); + return this; + } + /** + * Adds a one-time listener function for the event. The next time the event is triggered, + * this listener is removed and then invoked. + * @returns this ChatCompletionStream, so that calls can be chained + */ + once(event, listener) { + const listeners = __classPrivateFieldGet(this, _EventStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _EventStream_listeners, "f")[event] = []); + listeners.push({ listener, once: true }); + return this; + } + /** + * This is similar to `.once()`, but returns a Promise that resolves the next time + * the event is triggered, instead of calling a listener callback. + * @returns a Promise that resolves the next time given event is triggered, + * or rejects if an error is emitted. (If you request the 'error' event, + * returns a promise that resolves with the error). + * + * Example: + * + * const message = await stream.emitted('message') // rejects if the stream errors + */ + emitted(event) { + return new Promise((resolve, reject) => { + __classPrivateFieldSet(this, _EventStream_catchingPromiseCreated, true, "f"); + if (event !== 'error') + this.once('error', reject); + this.once(event, resolve); + }); + } + async done() { + __classPrivateFieldSet(this, _EventStream_catchingPromiseCreated, true, "f"); + await __classPrivateFieldGet(this, _EventStream_endPromise, "f"); + } + _emit(event, ...args) { + // make sure we don't emit any events after end + if (__classPrivateFieldGet(this, _EventStream_ended, "f")) { + return; + } + if (event === 'end') { + __classPrivateFieldSet(this, _EventStream_ended, true, "f"); + __classPrivateFieldGet(this, _EventStream_resolveEndPromise, "f").call(this); + } + const listeners = __classPrivateFieldGet(this, _EventStream_listeners, "f")[event]; + if (listeners) { + __classPrivateFieldGet(this, _EventStream_listeners, "f")[event] = listeners.filter((l) => !l.once); + listeners.forEach(({ listener }) => listener(...args)); + } + if (event === 'abort') { + const error = args[0]; + if (!__classPrivateFieldGet(this, _EventStream_catchingPromiseCreated, "f") && !listeners?.length) { + Promise.reject(error); + } + __classPrivateFieldGet(this, _EventStream_rejectConnectedPromise, "f").call(this, error); + __classPrivateFieldGet(this, _EventStream_rejectEndPromise, "f").call(this, error); + this._emit('end'); + return; + } + if (event === 'error') { + // NOTE: _emit('error', error) should only be called from #handleError(). + const error = args[0]; + if (!__classPrivateFieldGet(this, _EventStream_catchingPromiseCreated, "f") && !listeners?.length) { + // Trigger an unhandled rejection if the user hasn't registered any error handlers. + // If you are seeing stack traces here, make sure to handle errors via either: + // - runner.on('error', () => ...) + // - await runner.done() + // - await runner.finalChatCompletion() + // - etc. + Promise.reject(error); + } + __classPrivateFieldGet(this, _EventStream_rejectConnectedPromise, "f").call(this, error); + __classPrivateFieldGet(this, _EventStream_rejectEndPromise, "f").call(this, error); + this._emit('end'); + } + } + _emitFinal() { } +} +exports.EventStream = EventStream; +_EventStream_connectedPromise = new WeakMap(), _EventStream_resolveConnectedPromise = new WeakMap(), _EventStream_rejectConnectedPromise = new WeakMap(), _EventStream_endPromise = new WeakMap(), _EventStream_resolveEndPromise = new WeakMap(), _EventStream_rejectEndPromise = new WeakMap(), _EventStream_listeners = new WeakMap(), _EventStream_ended = new WeakMap(), _EventStream_errored = new WeakMap(), _EventStream_aborted = new WeakMap(), _EventStream_catchingPromiseCreated = new WeakMap(), _EventStream_instances = new WeakSet(), _EventStream_handleError = function _EventStream_handleError(error) { + __classPrivateFieldSet(this, _EventStream_errored, true, "f"); + if (error instanceof Error && error.name === 'AbortError') { + error = new error_1.APIUserAbortError(); + } + if (error instanceof error_1.APIUserAbortError) { + __classPrivateFieldSet(this, _EventStream_aborted, true, "f"); + return this._emit('abort', error); + } + if (error instanceof error_1.OpenAIError) { + return this._emit('error', error); + } + if (error instanceof Error) { + const openAIError = new error_1.OpenAIError(error.message); + // @ts-ignore + openAIError.cause = error; + return this._emit('error', openAIError); + } + return this._emit('error', new error_1.OpenAIError(String(error))); +}; +//# sourceMappingURL=EventStream.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/EventStream.js.map b/node_modules/openai/lib/EventStream.js.map new file mode 100644 index 0000000..92e5610 --- /dev/null +++ b/node_modules/openai/lib/EventStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"EventStream.js","sourceRoot":"","sources":["../src/lib/EventStream.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,uCAA0D;AAE1D,MAAa,WAAW;IAoBtB;;QAnBA,eAAU,GAAoB,IAAI,eAAe,EAAE,CAAC;QAEpD,gDAAiC;QACjC,+CAAuC,GAAG,EAAE,GAAE,CAAC,EAAC;QAChD,8CAAwD,GAAG,EAAE,GAAE,CAAC,EAAC;QAEjE,0CAA2B;QAC3B,yCAAiC,GAAG,EAAE,GAAE,CAAC,EAAC;QAC1C,wCAAkD,GAAG,EAAE,GAAE,CAAC,EAAC;QAE3D,iCAEI,EAAE,EAAC;QAEP,6BAAS,KAAK,EAAC;QACf,+BAAW,KAAK,EAAC;QACjB,+BAAW,KAAK,EAAC;QACjB,8CAA0B,KAAK,EAAC;QAG9B,uBAAA,IAAI,iCAAqB,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC7D,uBAAA,IAAI,wCAA4B,OAAO,MAAA,CAAC;YACxC,uBAAA,IAAI,uCAA2B,MAAM,MAAA,CAAC;QACxC,CAAC,CAAC,MAAA,CAAC;QAEH,uBAAA,IAAI,2BAAe,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACvD,uBAAA,IAAI,kCAAsB,OAAO,MAAA,CAAC;YAClC,uBAAA,IAAI,iCAAqB,MAAM,MAAA,CAAC;QAClC,CAAC,CAAC,MAAA,CAAC;QAEH,6DAA6D;QAC7D,4DAA4D;QAC5D,6DAA6D;QAC7D,gCAAgC;QAChC,uBAAA,IAAI,qCAAkB,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;QACvC,uBAAA,IAAI,+BAAY,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;IACnC,CAAC;IAES,IAAI,CAAgC,QAA4B;QACxE,gFAAgF;QAChF,sEAAsE;QACtE,UAAU,CAAC,GAAG,EAAE;YACd,QAAQ,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE;gBACnB,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YACpB,CAAC,EAAE,uBAAA,IAAI,wDAAa,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;QACnC,CAAC,EAAE,CAAC,CAAC,CAAC;IACR,CAAC;IAES,UAAU;QAClB,IAAI,IAAI,CAAC,KAAK;YAAE,OAAO;QACvB,uBAAA,IAAI,4CAAyB,MAA7B,IAAI,CAA2B,CAAC;QAChC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;IACxB,CAAC;IAED,IAAI,KAAK;QACP,OAAO,uBAAA,IAAI,0BAAO,CAAC;IACrB,CAAC;IAED,IAAI,OAAO;QACT,OAAO,uBAAA,IAAI,4BAAS,CAAC;IACvB,CAAC;IAED,IAAI,OAAO;QACT,OAAO,uBAAA,IAAI,4BAAS,CAAC;IACvB,CAAC;IAED,KAAK;QACH,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;IAC1B,CAAC;IAED;;;;;;OAMG;IACH,EAAE,CAAiC,KAAY,EAAE,QAA0C;QACzF,MAAM,SAAS,GACb,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,IAAI,CAAC,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QAC1D,SAAS,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,CAAC,CAAC;QAC7B,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;;OAMG;IACH,GAAG,CAAiC,KAAY,EAAE,QAA0C;QAC1F,MAAM,SAAS,GAAG,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,CAAC;QACzC,IAAI,CAAC,SAAS;YAAE,OAAO,IAAI,CAAC;QAC5B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;QAClE,IAAI,KAAK,IAAI,CAAC;YAAE,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3C,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;OAIG;IACH,IAAI,CAAiC,KAAY,EAAE,QAA0C;QAC3F,MAAM,SAAS,GACb,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,IAAI,CAAC,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QAC1D,SAAS,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;QACzC,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;;;;;;OAUG;IACH,OAAO,CACL,KAAY;QAMZ,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,uBAAA,IAAI,uCAA2B,IAAI,MAAA,CAAC;YACpC,IAAI,KAAK,KAAK,OAAO;gBAAE,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAClD,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,OAAc,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,IAAI;QACR,uBAAA,IAAI,uCAA2B,IAAI,MAAA,CAAC;QACpC,MAAM,uBAAA,IAAI,+BAAY,CAAC;IACzB,CAAC;IAyBD,KAAK,CAEH,KAAY,EACZ,GAAG,IAAwC;QAE3C,+CAA+C;QAC/C,IAAI,uBAAA,IAAI,0BAAO,EAAE;YACf,OAAO;SACR;QAED,IAAI,KAAK,KAAK,KAAK,EAAE;YACnB,uBAAA,IAAI,sBAAU,IAAI,MAAA,CAAC;YACnB,uBAAA,IAAI,sCAAmB,MAAvB,IAAI,CAAqB,CAAC;SAC3B;QAED,MAAM,SAAS,GAAkD,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,CAAC;QACxF,IAAI,SAAS,EAAE;YACb,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAQ,CAAC;YACjE,SAAS,CAAC,OAAO,CAAC,CAAC,EAAE,QAAQ,EAAO,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAI,IAAY,CAAC,CAAC,CAAC;SACtE;QAED,IAAI,KAAK,KAAK,OAAO,EAAE;YACrB,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAsB,CAAC;YAC3C,IAAI,CAAC,uBAAA,IAAI,2CAAwB,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE;gBACvD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aACvB;YACD,uBAAA,IAAI,2CAAwB,MAA5B,IAAI,EAAyB,KAAK,CAAC,CAAC;YACpC,uBAAA,IAAI,qCAAkB,MAAtB,IAAI,EAAmB,KAAK,CAAC,CAAC;YAC9B,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAClB,OAAO;SACR;QAED,IAAI,KAAK,KAAK,OAAO,EAAE;YACrB,yEAAyE;YAEzE,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAgB,CAAC;YACrC,IAAI,CAAC,uBAAA,IAAI,2CAAwB,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE;gBACvD,mFAAmF;gBACnF,8EAA8E;gBAC9E,kCAAkC;gBAClC,wBAAwB;gBACxB,uCAAuC;gBACvC,SAAS;gBACT,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aACvB;YACD,uBAAA,IAAI,2CAAwB,MAA5B,IAAI,EAAyB,KAAK,CAAC,CAAC;YACpC,uBAAA,IAAI,qCAAkB,MAAtB,IAAI,EAAmB,KAAK,CAAC,CAAC;YAC9B,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACnB;IACH,CAAC;IAES,UAAU,KAAU,CAAC;CAChC;AA1ND,kCA0NC;olBA3E6C,KAAc;IACxD,uBAAA,IAAI,wBAAY,IAAI,MAAA,CAAC;IACrB,IAAI,KAAK,YAAY,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE;QACzD,KAAK,GAAG,IAAI,yBAAiB,EAAE,CAAC;KACjC;IACD,IAAI,KAAK,YAAY,yBAAiB,EAAE;QACtC,uBAAA,IAAI,wBAAY,IAAI,MAAA,CAAC;QACrB,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACnC;IACD,IAAI,KAAK,YAAY,mBAAW,EAAE;QAChC,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACnC;IACD,IAAI,KAAK,YAAY,KAAK,EAAE;QAC1B,MAAM,WAAW,GAAgB,IAAI,mBAAW,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAChE,aAAa;QACb,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;QAC1B,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;KACzC;IACD,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,mBAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAC7D,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/EventStream.mjs b/node_modules/openai/lib/EventStream.mjs new file mode 100644 index 0000000..271c4d0 --- /dev/null +++ b/node_modules/openai/lib/EventStream.mjs @@ -0,0 +1,196 @@ +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _EventStream_instances, _EventStream_connectedPromise, _EventStream_resolveConnectedPromise, _EventStream_rejectConnectedPromise, _EventStream_endPromise, _EventStream_resolveEndPromise, _EventStream_rejectEndPromise, _EventStream_listeners, _EventStream_ended, _EventStream_errored, _EventStream_aborted, _EventStream_catchingPromiseCreated, _EventStream_handleError; +import { APIUserAbortError, OpenAIError } from "../error.mjs"; +export class EventStream { + constructor() { + _EventStream_instances.add(this); + this.controller = new AbortController(); + _EventStream_connectedPromise.set(this, void 0); + _EventStream_resolveConnectedPromise.set(this, () => { }); + _EventStream_rejectConnectedPromise.set(this, () => { }); + _EventStream_endPromise.set(this, void 0); + _EventStream_resolveEndPromise.set(this, () => { }); + _EventStream_rejectEndPromise.set(this, () => { }); + _EventStream_listeners.set(this, {}); + _EventStream_ended.set(this, false); + _EventStream_errored.set(this, false); + _EventStream_aborted.set(this, false); + _EventStream_catchingPromiseCreated.set(this, false); + __classPrivateFieldSet(this, _EventStream_connectedPromise, new Promise((resolve, reject) => { + __classPrivateFieldSet(this, _EventStream_resolveConnectedPromise, resolve, "f"); + __classPrivateFieldSet(this, _EventStream_rejectConnectedPromise, reject, "f"); + }), "f"); + __classPrivateFieldSet(this, _EventStream_endPromise, new Promise((resolve, reject) => { + __classPrivateFieldSet(this, _EventStream_resolveEndPromise, resolve, "f"); + __classPrivateFieldSet(this, _EventStream_rejectEndPromise, reject, "f"); + }), "f"); + // Don't let these promises cause unhandled rejection errors. + // we will manually cause an unhandled rejection error later + // if the user hasn't registered any error listener or called + // any promise-returning method. + __classPrivateFieldGet(this, _EventStream_connectedPromise, "f").catch(() => { }); + __classPrivateFieldGet(this, _EventStream_endPromise, "f").catch(() => { }); + } + _run(executor) { + // Unfortunately if we call `executor()` immediately we get runtime errors about + // references to `this` before the `super()` constructor call returns. + setTimeout(() => { + executor().then(() => { + this._emitFinal(); + this._emit('end'); + }, __classPrivateFieldGet(this, _EventStream_instances, "m", _EventStream_handleError).bind(this)); + }, 0); + } + _connected() { + if (this.ended) + return; + __classPrivateFieldGet(this, _EventStream_resolveConnectedPromise, "f").call(this); + this._emit('connect'); + } + get ended() { + return __classPrivateFieldGet(this, _EventStream_ended, "f"); + } + get errored() { + return __classPrivateFieldGet(this, _EventStream_errored, "f"); + } + get aborted() { + return __classPrivateFieldGet(this, _EventStream_aborted, "f"); + } + abort() { + this.controller.abort(); + } + /** + * Adds the listener function to the end of the listeners array for the event. + * No checks are made to see if the listener has already been added. Multiple calls passing + * the same combination of event and listener will result in the listener being added, and + * called, multiple times. + * @returns this ChatCompletionStream, so that calls can be chained + */ + on(event, listener) { + const listeners = __classPrivateFieldGet(this, _EventStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _EventStream_listeners, "f")[event] = []); + listeners.push({ listener }); + return this; + } + /** + * Removes the specified listener from the listener array for the event. + * off() will remove, at most, one instance of a listener from the listener array. If any single + * listener has been added multiple times to the listener array for the specified event, then + * off() must be called multiple times to remove each instance. + * @returns this ChatCompletionStream, so that calls can be chained + */ + off(event, listener) { + const listeners = __classPrivateFieldGet(this, _EventStream_listeners, "f")[event]; + if (!listeners) + return this; + const index = listeners.findIndex((l) => l.listener === listener); + if (index >= 0) + listeners.splice(index, 1); + return this; + } + /** + * Adds a one-time listener function for the event. The next time the event is triggered, + * this listener is removed and then invoked. + * @returns this ChatCompletionStream, so that calls can be chained + */ + once(event, listener) { + const listeners = __classPrivateFieldGet(this, _EventStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _EventStream_listeners, "f")[event] = []); + listeners.push({ listener, once: true }); + return this; + } + /** + * This is similar to `.once()`, but returns a Promise that resolves the next time + * the event is triggered, instead of calling a listener callback. + * @returns a Promise that resolves the next time given event is triggered, + * or rejects if an error is emitted. (If you request the 'error' event, + * returns a promise that resolves with the error). + * + * Example: + * + * const message = await stream.emitted('message') // rejects if the stream errors + */ + emitted(event) { + return new Promise((resolve, reject) => { + __classPrivateFieldSet(this, _EventStream_catchingPromiseCreated, true, "f"); + if (event !== 'error') + this.once('error', reject); + this.once(event, resolve); + }); + } + async done() { + __classPrivateFieldSet(this, _EventStream_catchingPromiseCreated, true, "f"); + await __classPrivateFieldGet(this, _EventStream_endPromise, "f"); + } + _emit(event, ...args) { + // make sure we don't emit any events after end + if (__classPrivateFieldGet(this, _EventStream_ended, "f")) { + return; + } + if (event === 'end') { + __classPrivateFieldSet(this, _EventStream_ended, true, "f"); + __classPrivateFieldGet(this, _EventStream_resolveEndPromise, "f").call(this); + } + const listeners = __classPrivateFieldGet(this, _EventStream_listeners, "f")[event]; + if (listeners) { + __classPrivateFieldGet(this, _EventStream_listeners, "f")[event] = listeners.filter((l) => !l.once); + listeners.forEach(({ listener }) => listener(...args)); + } + if (event === 'abort') { + const error = args[0]; + if (!__classPrivateFieldGet(this, _EventStream_catchingPromiseCreated, "f") && !listeners?.length) { + Promise.reject(error); + } + __classPrivateFieldGet(this, _EventStream_rejectConnectedPromise, "f").call(this, error); + __classPrivateFieldGet(this, _EventStream_rejectEndPromise, "f").call(this, error); + this._emit('end'); + return; + } + if (event === 'error') { + // NOTE: _emit('error', error) should only be called from #handleError(). + const error = args[0]; + if (!__classPrivateFieldGet(this, _EventStream_catchingPromiseCreated, "f") && !listeners?.length) { + // Trigger an unhandled rejection if the user hasn't registered any error handlers. + // If you are seeing stack traces here, make sure to handle errors via either: + // - runner.on('error', () => ...) + // - await runner.done() + // - await runner.finalChatCompletion() + // - etc. + Promise.reject(error); + } + __classPrivateFieldGet(this, _EventStream_rejectConnectedPromise, "f").call(this, error); + __classPrivateFieldGet(this, _EventStream_rejectEndPromise, "f").call(this, error); + this._emit('end'); + } + } + _emitFinal() { } +} +_EventStream_connectedPromise = new WeakMap(), _EventStream_resolveConnectedPromise = new WeakMap(), _EventStream_rejectConnectedPromise = new WeakMap(), _EventStream_endPromise = new WeakMap(), _EventStream_resolveEndPromise = new WeakMap(), _EventStream_rejectEndPromise = new WeakMap(), _EventStream_listeners = new WeakMap(), _EventStream_ended = new WeakMap(), _EventStream_errored = new WeakMap(), _EventStream_aborted = new WeakMap(), _EventStream_catchingPromiseCreated = new WeakMap(), _EventStream_instances = new WeakSet(), _EventStream_handleError = function _EventStream_handleError(error) { + __classPrivateFieldSet(this, _EventStream_errored, true, "f"); + if (error instanceof Error && error.name === 'AbortError') { + error = new APIUserAbortError(); + } + if (error instanceof APIUserAbortError) { + __classPrivateFieldSet(this, _EventStream_aborted, true, "f"); + return this._emit('abort', error); + } + if (error instanceof OpenAIError) { + return this._emit('error', error); + } + if (error instanceof Error) { + const openAIError = new OpenAIError(error.message); + // @ts-ignore + openAIError.cause = error; + return this._emit('error', openAIError); + } + return this._emit('error', new OpenAIError(String(error))); +}; +//# sourceMappingURL=EventStream.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/EventStream.mjs.map b/node_modules/openai/lib/EventStream.mjs.map new file mode 100644 index 0000000..35c0647 --- /dev/null +++ b/node_modules/openai/lib/EventStream.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"EventStream.mjs","sourceRoot":"","sources":["../src/lib/EventStream.ts"],"names":[],"mappings":";;;;;;;;;;;;OAAO,EAAE,iBAAiB,EAAE,WAAW,EAAE;AAEzC,MAAM,OAAO,WAAW;IAoBtB;;QAnBA,eAAU,GAAoB,IAAI,eAAe,EAAE,CAAC;QAEpD,gDAAiC;QACjC,+CAAuC,GAAG,EAAE,GAAE,CAAC,EAAC;QAChD,8CAAwD,GAAG,EAAE,GAAE,CAAC,EAAC;QAEjE,0CAA2B;QAC3B,yCAAiC,GAAG,EAAE,GAAE,CAAC,EAAC;QAC1C,wCAAkD,GAAG,EAAE,GAAE,CAAC,EAAC;QAE3D,iCAEI,EAAE,EAAC;QAEP,6BAAS,KAAK,EAAC;QACf,+BAAW,KAAK,EAAC;QACjB,+BAAW,KAAK,EAAC;QACjB,8CAA0B,KAAK,EAAC;QAG9B,uBAAA,IAAI,iCAAqB,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC7D,uBAAA,IAAI,wCAA4B,OAAO,MAAA,CAAC;YACxC,uBAAA,IAAI,uCAA2B,MAAM,MAAA,CAAC;QACxC,CAAC,CAAC,MAAA,CAAC;QAEH,uBAAA,IAAI,2BAAe,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACvD,uBAAA,IAAI,kCAAsB,OAAO,MAAA,CAAC;YAClC,uBAAA,IAAI,iCAAqB,MAAM,MAAA,CAAC;QAClC,CAAC,CAAC,MAAA,CAAC;QAEH,6DAA6D;QAC7D,4DAA4D;QAC5D,6DAA6D;QAC7D,gCAAgC;QAChC,uBAAA,IAAI,qCAAkB,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;QACvC,uBAAA,IAAI,+BAAY,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;IACnC,CAAC;IAES,IAAI,CAAgC,QAA4B;QACxE,gFAAgF;QAChF,sEAAsE;QACtE,UAAU,CAAC,GAAG,EAAE;YACd,QAAQ,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE;gBACnB,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YACpB,CAAC,EAAE,uBAAA,IAAI,wDAAa,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;QACnC,CAAC,EAAE,CAAC,CAAC,CAAC;IACR,CAAC;IAES,UAAU;QAClB,IAAI,IAAI,CAAC,KAAK;YAAE,OAAO;QACvB,uBAAA,IAAI,4CAAyB,MAA7B,IAAI,CAA2B,CAAC;QAChC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;IACxB,CAAC;IAED,IAAI,KAAK;QACP,OAAO,uBAAA,IAAI,0BAAO,CAAC;IACrB,CAAC;IAED,IAAI,OAAO;QACT,OAAO,uBAAA,IAAI,4BAAS,CAAC;IACvB,CAAC;IAED,IAAI,OAAO;QACT,OAAO,uBAAA,IAAI,4BAAS,CAAC;IACvB,CAAC;IAED,KAAK;QACH,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;IAC1B,CAAC;IAED;;;;;;OAMG;IACH,EAAE,CAAiC,KAAY,EAAE,QAA0C;QACzF,MAAM,SAAS,GACb,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,IAAI,CAAC,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QAC1D,SAAS,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,CAAC,CAAC;QAC7B,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;;OAMG;IACH,GAAG,CAAiC,KAAY,EAAE,QAA0C;QAC1F,MAAM,SAAS,GAAG,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,CAAC;QACzC,IAAI,CAAC,SAAS;YAAE,OAAO,IAAI,CAAC;QAC5B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;QAClE,IAAI,KAAK,IAAI,CAAC;YAAE,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3C,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;OAIG;IACH,IAAI,CAAiC,KAAY,EAAE,QAA0C;QAC3F,MAAM,SAAS,GACb,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,IAAI,CAAC,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QAC1D,SAAS,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;QACzC,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;;;;;;OAUG;IACH,OAAO,CACL,KAAY;QAMZ,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,uBAAA,IAAI,uCAA2B,IAAI,MAAA,CAAC;YACpC,IAAI,KAAK,KAAK,OAAO;gBAAE,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAClD,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,OAAc,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,IAAI;QACR,uBAAA,IAAI,uCAA2B,IAAI,MAAA,CAAC;QACpC,MAAM,uBAAA,IAAI,+BAAY,CAAC;IACzB,CAAC;IAyBD,KAAK,CAEH,KAAY,EACZ,GAAG,IAAwC;QAE3C,+CAA+C;QAC/C,IAAI,uBAAA,IAAI,0BAAO,EAAE;YACf,OAAO;SACR;QAED,IAAI,KAAK,KAAK,KAAK,EAAE;YACnB,uBAAA,IAAI,sBAAU,IAAI,MAAA,CAAC;YACnB,uBAAA,IAAI,sCAAmB,MAAvB,IAAI,CAAqB,CAAC;SAC3B;QAED,MAAM,SAAS,GAAkD,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,CAAC;QACxF,IAAI,SAAS,EAAE;YACb,uBAAA,IAAI,8BAAW,CAAC,KAAK,CAAC,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAQ,CAAC;YACjE,SAAS,CAAC,OAAO,CAAC,CAAC,EAAE,QAAQ,EAAO,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAI,IAAY,CAAC,CAAC,CAAC;SACtE;QAED,IAAI,KAAK,KAAK,OAAO,EAAE;YACrB,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAsB,CAAC;YAC3C,IAAI,CAAC,uBAAA,IAAI,2CAAwB,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE;gBACvD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aACvB;YACD,uBAAA,IAAI,2CAAwB,MAA5B,IAAI,EAAyB,KAAK,CAAC,CAAC;YACpC,uBAAA,IAAI,qCAAkB,MAAtB,IAAI,EAAmB,KAAK,CAAC,CAAC;YAC9B,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAClB,OAAO;SACR;QAED,IAAI,KAAK,KAAK,OAAO,EAAE;YACrB,yEAAyE;YAEzE,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAgB,CAAC;YACrC,IAAI,CAAC,uBAAA,IAAI,2CAAwB,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE;gBACvD,mFAAmF;gBACnF,8EAA8E;gBAC9E,kCAAkC;gBAClC,wBAAwB;gBACxB,uCAAuC;gBACvC,SAAS;gBACT,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aACvB;YACD,uBAAA,IAAI,2CAAwB,MAA5B,IAAI,EAAyB,KAAK,CAAC,CAAC;YACpC,uBAAA,IAAI,qCAAkB,MAAtB,IAAI,EAAmB,KAAK,CAAC,CAAC;YAC9B,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACnB;IACH,CAAC;IAES,UAAU,KAAU,CAAC;CAChC;olBA3E6C,KAAc;IACxD,uBAAA,IAAI,wBAAY,IAAI,MAAA,CAAC;IACrB,IAAI,KAAK,YAAY,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE;QACzD,KAAK,GAAG,IAAI,iBAAiB,EAAE,CAAC;KACjC;IACD,IAAI,KAAK,YAAY,iBAAiB,EAAE;QACtC,uBAAA,IAAI,wBAAY,IAAI,MAAA,CAAC;QACrB,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACnC;IACD,IAAI,KAAK,YAAY,WAAW,EAAE;QAChC,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACnC;IACD,IAAI,KAAK,YAAY,KAAK,EAAE;QAC1B,MAAM,WAAW,GAAgB,IAAI,WAAW,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAChE,aAAa;QACb,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;QAC1B,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;KACzC;IACD,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAC7D,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/RunnableFunction.d.ts b/node_modules/openai/lib/RunnableFunction.d.ts new file mode 100644 index 0000000..b5cdc93 --- /dev/null +++ b/node_modules/openai/lib/RunnableFunction.d.ts @@ -0,0 +1,97 @@ +import { type ChatCompletionRunner } from "./ChatCompletionRunner.js"; +import { type ChatCompletionStreamingRunner } from "./ChatCompletionStreamingRunner.js"; +import { JSONSchema } from "./jsonschema.js"; +type PromiseOrValue = T | Promise; +export type RunnableFunctionWithParse = { + /** + * @param args the return value from `parse`. + * @param runner the runner evaluating this callback. + * @returns a string to send back to OpenAI. + */ + function: (args: Args, runner: ChatCompletionRunner | ChatCompletionStreamingRunner) => PromiseOrValue; + /** + * @param input the raw args from the OpenAI function call. + * @returns the parsed arguments to pass to `function` + */ + parse: (input: string) => PromiseOrValue; + /** + * The parameters the function accepts, describes as a JSON Schema object. + */ + parameters: JSONSchema; + /** + * A description of what the function does, used by the model to choose when and how to call the function. + */ + description: string; + /** + * The name of the function to be called. Will default to function.name if omitted. + */ + name?: string | undefined; + strict?: boolean | undefined; +}; +export type RunnableFunctionWithoutParse = { + /** + * @param args the raw args from the OpenAI function call. + * @returns a string to send back to OpenAI + */ + function: (args: string, runner: ChatCompletionRunner | ChatCompletionStreamingRunner) => PromiseOrValue; + /** + * The parameters the function accepts, describes as a JSON Schema object. + */ + parameters: JSONSchema; + /** + * A description of what the function does, used by the model to choose when and how to call the function. + */ + description: string; + /** + * The name of the function to be called. Will default to function.name if omitted. + */ + name?: string | undefined; + strict?: boolean | undefined; +}; +export type RunnableFunction = Args extends string ? RunnableFunctionWithoutParse : Args extends object ? RunnableFunctionWithParse : never; +export type RunnableToolFunction = Args extends string ? RunnableToolFunctionWithoutParse : Args extends object ? RunnableToolFunctionWithParse : never; +export type RunnableToolFunctionWithoutParse = { + type: 'function'; + function: RunnableFunctionWithoutParse; +}; +export type RunnableToolFunctionWithParse = { + type: 'function'; + function: RunnableFunctionWithParse; +}; +export declare function isRunnableFunctionWithParse(fn: any): fn is RunnableFunctionWithParse; +export type BaseFunctionsArgs = readonly (object | string)[]; +export type RunnableFunctions = [ + any[] +] extends [FunctionsArgs] ? readonly RunnableFunction[] : { + [Index in keyof FunctionsArgs]: Index extends number ? RunnableFunction : FunctionsArgs[Index]; +}; +export type RunnableTools = [ + any[] +] extends [FunctionsArgs] ? readonly RunnableToolFunction[] : { + [Index in keyof FunctionsArgs]: Index extends number ? RunnableToolFunction : FunctionsArgs[Index]; +}; +/** + * This is helper class for passing a `function` and `parse` where the `function` + * argument type matches the `parse` return type. + * + * @deprecated - please use ParsingToolFunction instead. + */ +export declare class ParsingFunction { + function: RunnableFunctionWithParse['function']; + parse: RunnableFunctionWithParse['parse']; + parameters: RunnableFunctionWithParse['parameters']; + description: RunnableFunctionWithParse['description']; + name?: RunnableFunctionWithParse['name']; + constructor(input: RunnableFunctionWithParse); +} +/** + * This is helper class for passing a `function` and `parse` where the `function` + * argument type matches the `parse` return type. + */ +export declare class ParsingToolFunction { + type: 'function'; + function: RunnableFunctionWithParse; + constructor(input: RunnableFunctionWithParse); +} +export {}; +//# sourceMappingURL=RunnableFunction.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/RunnableFunction.d.ts.map b/node_modules/openai/lib/RunnableFunction.d.ts.map new file mode 100644 index 0000000..0e5cf65 --- /dev/null +++ b/node_modules/openai/lib/RunnableFunction.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"RunnableFunction.d.ts","sourceRoot":"","sources":["../src/lib/RunnableFunction.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AACnE,OAAO,EAAE,KAAK,6BAA6B,EAAE,MAAM,iCAAiC,CAAC;AACrF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C,KAAK,cAAc,CAAC,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;AAExC,MAAM,MAAM,yBAAyB,CAAC,IAAI,SAAS,MAAM,IAAI;IAC3D;;;;OAIG;IACH,QAAQ,EAAE,CACR,IAAI,EAAE,IAAI,EACV,MAAM,EAAE,oBAAoB,CAAC,OAAO,CAAC,GAAG,6BAA6B,CAAC,OAAO,CAAC,KAC3E,cAAc,CAAC,OAAO,CAAC,CAAC;IAC7B;;;OAGG;IACH,KAAK,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,cAAc,CAAC,IAAI,CAAC,CAAC;IAC/C;;OAEG;IACH,UAAU,EAAE,UAAU,CAAC;IACvB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;CAC9B,CAAC;AAEF,MAAM,MAAM,4BAA4B,GAAG;IACzC;;;OAGG;IACH,QAAQ,EAAE,CACR,IAAI,EAAE,MAAM,EACZ,MAAM,EAAE,oBAAoB,CAAC,OAAO,CAAC,GAAG,6BAA6B,CAAC,OAAO,CAAC,KAC3E,cAAc,CAAC,OAAO,CAAC,CAAC;IAC7B;;OAEG;IACH,UAAU,EAAE,UAAU,CAAC;IACvB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;CAC9B,CAAC;AAEF,MAAM,MAAM,gBAAgB,CAAC,IAAI,SAAS,MAAM,GAAG,MAAM,IACvD,IAAI,SAAS,MAAM,GAAG,4BAA4B,GAChD,IAAI,SAAS,MAAM,GAAG,yBAAyB,CAAC,IAAI,CAAC,GACrD,KAAK,CAAC;AAEV,MAAM,MAAM,oBAAoB,CAAC,IAAI,SAAS,MAAM,GAAG,MAAM,IAC3D,IAAI,SAAS,MAAM,GAAG,gCAAgC,GACpD,IAAI,SAAS,MAAM,GAAG,6BAA6B,CAAC,IAAI,CAAC,GACzD,KAAK,CAAC;AAEV,MAAM,MAAM,gCAAgC,GAAG;IAC7C,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE,4BAA4B,CAAC;CACxC,CAAC;AACF,MAAM,MAAM,6BAA6B,CAAC,IAAI,SAAS,MAAM,IAAI;IAC/D,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE,yBAAyB,CAAC,IAAI,CAAC,CAAC;CAC3C,CAAC;AAEF,wBAAgB,2BAA2B,CAAC,IAAI,SAAS,MAAM,EAC7D,EAAE,EAAE,GAAG,GACN,EAAE,IAAI,yBAAyB,CAAC,IAAI,CAAC,CAEvC;AAED,MAAM,MAAM,iBAAiB,GAAG,SAAS,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,CAAC;AAE7D,MAAM,MAAM,iBAAiB,CAAC,aAAa,SAAS,iBAAiB,IACnE;IAAC,GAAG,EAAE;CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,gBAAgB,CAAC,GAAG,CAAC,EAAE,GAChE;KACG,KAAK,IAAI,MAAM,aAAa,GAAG,KAAK,SAAS,MAAM,GAAG,gBAAgB,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,GAC3F,aAAa,CAAC,KAAK,CAAC;CACvB,CAAC;AAEN,MAAM,MAAM,aAAa,CAAC,aAAa,SAAS,iBAAiB,IAC/D;IAAC,GAAG,EAAE;CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,oBAAoB,CAAC,GAAG,CAAC,EAAE,GACpE;KACG,KAAK,IAAI,MAAM,aAAa,GAAG,KAAK,SAAS,MAAM,GAAG,oBAAoB,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,GAC/F,aAAa,CAAC,KAAK,CAAC;CACvB,CAAC;AAEN;;;;;GAKG;AACH,qBAAa,eAAe,CAAC,IAAI,SAAS,MAAM;IAC9C,QAAQ,EAAE,yBAAyB,CAAC,IAAI,CAAC,CAAC,UAAU,CAAC,CAAC;IACtD,KAAK,EAAE,yBAAyB,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC;IAChD,UAAU,EAAE,yBAAyB,CAAC,IAAI,CAAC,CAAC,YAAY,CAAC,CAAC;IAC1D,WAAW,EAAE,yBAAyB,CAAC,IAAI,CAAC,CAAC,aAAa,CAAC,CAAC;IAC5D,IAAI,CAAC,EAAE,yBAAyB,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC;gBAEnC,KAAK,EAAE,yBAAyB,CAAC,IAAI,CAAC;CAOnD;AAED;;;GAGG;AACH,qBAAa,mBAAmB,CAAC,IAAI,SAAS,MAAM;IAClD,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE,yBAAyB,CAAC,IAAI,CAAC,CAAC;gBAE9B,KAAK,EAAE,yBAAyB,CAAC,IAAI,CAAC;CAInD"} \ No newline at end of file diff --git a/node_modules/openai/lib/RunnableFunction.js b/node_modules/openai/lib/RunnableFunction.js new file mode 100644 index 0000000..a079269 --- /dev/null +++ b/node_modules/openai/lib/RunnableFunction.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ParsingToolFunction = exports.ParsingFunction = exports.isRunnableFunctionWithParse = void 0; +function isRunnableFunctionWithParse(fn) { + return typeof fn.parse === 'function'; +} +exports.isRunnableFunctionWithParse = isRunnableFunctionWithParse; +/** + * This is helper class for passing a `function` and `parse` where the `function` + * argument type matches the `parse` return type. + * + * @deprecated - please use ParsingToolFunction instead. + */ +class ParsingFunction { + constructor(input) { + this.function = input.function; + this.parse = input.parse; + this.parameters = input.parameters; + this.description = input.description; + this.name = input.name; + } +} +exports.ParsingFunction = ParsingFunction; +/** + * This is helper class for passing a `function` and `parse` where the `function` + * argument type matches the `parse` return type. + */ +class ParsingToolFunction { + constructor(input) { + this.type = 'function'; + this.function = input; + } +} +exports.ParsingToolFunction = ParsingToolFunction; +//# sourceMappingURL=RunnableFunction.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/RunnableFunction.js.map b/node_modules/openai/lib/RunnableFunction.js.map new file mode 100644 index 0000000..11ef4f4 --- /dev/null +++ b/node_modules/openai/lib/RunnableFunction.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RunnableFunction.js","sourceRoot":"","sources":["../src/lib/RunnableFunction.ts"],"names":[],"mappings":";;;AA+EA,SAAgB,2BAA2B,CACzC,EAAO;IAEP,OAAO,OAAQ,EAAU,CAAC,KAAK,KAAK,UAAU,CAAC;AACjD,CAAC;AAJD,kEAIC;AAkBD;;;;;GAKG;AACH,MAAa,eAAe;IAO1B,YAAY,KAAsC;QAChD,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAC/B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;QACzB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,UAAU,CAAC;QACnC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC;QACrC,IAAI,CAAC,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;IACzB,CAAC;CACF;AAdD,0CAcC;AAED;;;GAGG;AACH,MAAa,mBAAmB;IAI9B,YAAY,KAAsC;QAChD,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC;IACxB,CAAC;CACF;AARD,kDAQC"} \ No newline at end of file diff --git a/node_modules/openai/lib/RunnableFunction.mjs b/node_modules/openai/lib/RunnableFunction.mjs new file mode 100644 index 0000000..c9ef113 --- /dev/null +++ b/node_modules/openai/lib/RunnableFunction.mjs @@ -0,0 +1,29 @@ +export function isRunnableFunctionWithParse(fn) { + return typeof fn.parse === 'function'; +} +/** + * This is helper class for passing a `function` and `parse` where the `function` + * argument type matches the `parse` return type. + * + * @deprecated - please use ParsingToolFunction instead. + */ +export class ParsingFunction { + constructor(input) { + this.function = input.function; + this.parse = input.parse; + this.parameters = input.parameters; + this.description = input.description; + this.name = input.name; + } +} +/** + * This is helper class for passing a `function` and `parse` where the `function` + * argument type matches the `parse` return type. + */ +export class ParsingToolFunction { + constructor(input) { + this.type = 'function'; + this.function = input; + } +} +//# sourceMappingURL=RunnableFunction.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/RunnableFunction.mjs.map b/node_modules/openai/lib/RunnableFunction.mjs.map new file mode 100644 index 0000000..8b7dc7a --- /dev/null +++ b/node_modules/openai/lib/RunnableFunction.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"RunnableFunction.mjs","sourceRoot":"","sources":["../src/lib/RunnableFunction.ts"],"names":[],"mappings":"AA+EA,MAAM,UAAU,2BAA2B,CACzC,EAAO;IAEP,OAAO,OAAQ,EAAU,CAAC,KAAK,KAAK,UAAU,CAAC;AACjD,CAAC;AAkBD;;;;;GAKG;AACH,MAAM,OAAO,eAAe;IAO1B,YAAY,KAAsC;QAChD,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAC/B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;QACzB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,UAAU,CAAC;QACnC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC;QACrC,IAAI,CAAC,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;IACzB,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,OAAO,mBAAmB;IAI9B,YAAY,KAAsC;QAChD,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC;IACxB,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/lib/Util.d.ts b/node_modules/openai/lib/Util.d.ts new file mode 100644 index 0000000..e68f2b6 --- /dev/null +++ b/node_modules/openai/lib/Util.d.ts @@ -0,0 +1,5 @@ +/** + * Like `Promise.allSettled()` but throws an error if any promises are rejected. + */ +export declare const allSettledWithThrow: (promises: Promise[]) => Promise; +//# sourceMappingURL=Util.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/Util.d.ts.map b/node_modules/openai/lib/Util.d.ts.map new file mode 100644 index 0000000..527ed19 --- /dev/null +++ b/node_modules/openai/lib/Util.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"Util.d.ts","sourceRoot":"","sources":["../src/lib/Util.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,eAAO,MAAM,mBAAmB,6CAmB/B,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/Util.js b/node_modules/openai/lib/Util.js new file mode 100644 index 0000000..cf581f7 --- /dev/null +++ b/node_modules/openai/lib/Util.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.allSettledWithThrow = void 0; +/** + * Like `Promise.allSettled()` but throws an error if any promises are rejected. + */ +const allSettledWithThrow = async (promises) => { + const results = await Promise.allSettled(promises); + const rejected = results.filter((result) => result.status === 'rejected'); + if (rejected.length) { + for (const result of rejected) { + console.error(result.reason); + } + throw new Error(`${rejected.length} promise(s) failed - see the above errors`); + } + // Note: TS was complaining about using `.filter().map()` here for some reason + const values = []; + for (const result of results) { + if (result.status === 'fulfilled') { + values.push(result.value); + } + } + return values; +}; +exports.allSettledWithThrow = allSettledWithThrow; +//# sourceMappingURL=Util.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/Util.js.map b/node_modules/openai/lib/Util.js.map new file mode 100644 index 0000000..6be7373 --- /dev/null +++ b/node_modules/openai/lib/Util.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Util.js","sourceRoot":"","sources":["../src/lib/Util.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACI,MAAM,mBAAmB,GAAG,KAAK,EAAK,QAAsB,EAAgB,EAAE;IACnF,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;IACnD,MAAM,QAAQ,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,MAAM,EAAmC,EAAE,CAAC,MAAM,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC;IAC3G,IAAI,QAAQ,CAAC,MAAM,EAAE;QACnB,KAAK,MAAM,MAAM,IAAI,QAAQ,EAAE;YAC7B,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;SAC9B;QAED,MAAM,IAAI,KAAK,CAAC,GAAG,QAAQ,CAAC,MAAM,2CAA2C,CAAC,CAAC;KAChF;IAED,8EAA8E;IAC9E,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;QAC5B,IAAI,MAAM,CAAC,MAAM,KAAK,WAAW,EAAE;YACjC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC3B;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC,CAAC;AAnBW,QAAA,mBAAmB,uBAmB9B"} \ No newline at end of file diff --git a/node_modules/openai/lib/Util.mjs b/node_modules/openai/lib/Util.mjs new file mode 100644 index 0000000..845a138 --- /dev/null +++ b/node_modules/openai/lib/Util.mjs @@ -0,0 +1,22 @@ +/** + * Like `Promise.allSettled()` but throws an error if any promises are rejected. + */ +export const allSettledWithThrow = async (promises) => { + const results = await Promise.allSettled(promises); + const rejected = results.filter((result) => result.status === 'rejected'); + if (rejected.length) { + for (const result of rejected) { + console.error(result.reason); + } + throw new Error(`${rejected.length} promise(s) failed - see the above errors`); + } + // Note: TS was complaining about using `.filter().map()` here for some reason + const values = []; + for (const result of results) { + if (result.status === 'fulfilled') { + values.push(result.value); + } + } + return values; +}; +//# sourceMappingURL=Util.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/Util.mjs.map b/node_modules/openai/lib/Util.mjs.map new file mode 100644 index 0000000..ca770c6 --- /dev/null +++ b/node_modules/openai/lib/Util.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"Util.mjs","sourceRoot":"","sources":["../src/lib/Util.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,CAAC,MAAM,mBAAmB,GAAG,KAAK,EAAK,QAAsB,EAAgB,EAAE;IACnF,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;IACnD,MAAM,QAAQ,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,MAAM,EAAmC,EAAE,CAAC,MAAM,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC;IAC3G,IAAI,QAAQ,CAAC,MAAM,EAAE;QACnB,KAAK,MAAM,MAAM,IAAI,QAAQ,EAAE;YAC7B,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;SAC9B;QAED,MAAM,IAAI,KAAK,CAAC,GAAG,QAAQ,CAAC,MAAM,2CAA2C,CAAC,CAAC;KAChF;IAED,8EAA8E;IAC9E,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;QAC5B,IAAI,MAAM,CAAC,MAAM,KAAK,WAAW,EAAE;YACjC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC3B;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/chatCompletionUtils.d.ts b/node_modules/openai/lib/chatCompletionUtils.d.ts new file mode 100644 index 0000000..dd86d34 --- /dev/null +++ b/node_modules/openai/lib/chatCompletionUtils.d.ts @@ -0,0 +1,6 @@ +import { type ChatCompletionAssistantMessageParam, type ChatCompletionFunctionMessageParam, type ChatCompletionMessageParam, type ChatCompletionToolMessageParam } from "../resources.js"; +export declare const isAssistantMessage: (message: ChatCompletionMessageParam | null | undefined) => message is ChatCompletionAssistantMessageParam; +export declare const isFunctionMessage: (message: ChatCompletionMessageParam | null | undefined) => message is ChatCompletionFunctionMessageParam; +export declare const isToolMessage: (message: ChatCompletionMessageParam | null | undefined) => message is ChatCompletionToolMessageParam; +export declare function isPresent(obj: T | null | undefined): obj is T; +//# sourceMappingURL=chatCompletionUtils.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/chatCompletionUtils.d.ts.map b/node_modules/openai/lib/chatCompletionUtils.d.ts.map new file mode 100644 index 0000000..b6d46e2 --- /dev/null +++ b/node_modules/openai/lib/chatCompletionUtils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"chatCompletionUtils.d.ts","sourceRoot":"","sources":["../src/lib/chatCompletionUtils.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,mCAAmC,EACxC,KAAK,kCAAkC,EACvC,KAAK,0BAA0B,EAC/B,KAAK,8BAA8B,EACpC,MAAM,cAAc,CAAC;AAEtB,eAAO,MAAM,kBAAkB,YACpB,0BAA0B,GAAG,IAAI,GAAG,SAAS,mDAGvD,CAAC;AAEF,eAAO,MAAM,iBAAiB,YACnB,0BAA0B,GAAG,IAAI,GAAG,SAAS,kDAGvD,CAAC;AAEF,eAAO,MAAM,aAAa,YACf,0BAA0B,GAAG,IAAI,GAAG,SAAS,8CAGvD,CAAC;AAEF,wBAAgB,SAAS,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,IAAI,GAAG,SAAS,GAAG,GAAG,IAAI,CAAC,CAEhE"} \ No newline at end of file diff --git a/node_modules/openai/lib/chatCompletionUtils.js b/node_modules/openai/lib/chatCompletionUtils.js new file mode 100644 index 0000000..911545f --- /dev/null +++ b/node_modules/openai/lib/chatCompletionUtils.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isPresent = exports.isToolMessage = exports.isFunctionMessage = exports.isAssistantMessage = void 0; +const isAssistantMessage = (message) => { + return message?.role === 'assistant'; +}; +exports.isAssistantMessage = isAssistantMessage; +const isFunctionMessage = (message) => { + return message?.role === 'function'; +}; +exports.isFunctionMessage = isFunctionMessage; +const isToolMessage = (message) => { + return message?.role === 'tool'; +}; +exports.isToolMessage = isToolMessage; +function isPresent(obj) { + return obj != null; +} +exports.isPresent = isPresent; +//# sourceMappingURL=chatCompletionUtils.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/chatCompletionUtils.js.map b/node_modules/openai/lib/chatCompletionUtils.js.map new file mode 100644 index 0000000..574356f --- /dev/null +++ b/node_modules/openai/lib/chatCompletionUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"chatCompletionUtils.js","sourceRoot":"","sources":["../src/lib/chatCompletionUtils.ts"],"names":[],"mappings":";;;AAOO,MAAM,kBAAkB,GAAG,CAChC,OAAsD,EACN,EAAE;IAClD,OAAO,OAAO,EAAE,IAAI,KAAK,WAAW,CAAC;AACvC,CAAC,CAAC;AAJW,QAAA,kBAAkB,sBAI7B;AAEK,MAAM,iBAAiB,GAAG,CAC/B,OAAsD,EACP,EAAE;IACjD,OAAO,OAAO,EAAE,IAAI,KAAK,UAAU,CAAC;AACtC,CAAC,CAAC;AAJW,QAAA,iBAAiB,qBAI5B;AAEK,MAAM,aAAa,GAAG,CAC3B,OAAsD,EACX,EAAE;IAC7C,OAAO,OAAO,EAAE,IAAI,KAAK,MAAM,CAAC;AAClC,CAAC,CAAC;AAJW,QAAA,aAAa,iBAIxB;AAEF,SAAgB,SAAS,CAAI,GAAyB;IACpD,OAAO,GAAG,IAAI,IAAI,CAAC;AACrB,CAAC;AAFD,8BAEC"} \ No newline at end of file diff --git a/node_modules/openai/lib/chatCompletionUtils.mjs b/node_modules/openai/lib/chatCompletionUtils.mjs new file mode 100644 index 0000000..b283429 --- /dev/null +++ b/node_modules/openai/lib/chatCompletionUtils.mjs @@ -0,0 +1,13 @@ +export const isAssistantMessage = (message) => { + return message?.role === 'assistant'; +}; +export const isFunctionMessage = (message) => { + return message?.role === 'function'; +}; +export const isToolMessage = (message) => { + return message?.role === 'tool'; +}; +export function isPresent(obj) { + return obj != null; +} +//# sourceMappingURL=chatCompletionUtils.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/chatCompletionUtils.mjs.map b/node_modules/openai/lib/chatCompletionUtils.mjs.map new file mode 100644 index 0000000..00e60f6 --- /dev/null +++ b/node_modules/openai/lib/chatCompletionUtils.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"chatCompletionUtils.mjs","sourceRoot":"","sources":["../src/lib/chatCompletionUtils.ts"],"names":[],"mappings":"AAOA,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAChC,OAAsD,EACN,EAAE;IAClD,OAAO,OAAO,EAAE,IAAI,KAAK,WAAW,CAAC;AACvC,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAC/B,OAAsD,EACP,EAAE;IACjD,OAAO,OAAO,EAAE,IAAI,KAAK,UAAU,CAAC;AACtC,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAG,CAC3B,OAAsD,EACX,EAAE;IAC7C,OAAO,OAAO,EAAE,IAAI,KAAK,MAAM,CAAC;AAClC,CAAC,CAAC;AAEF,MAAM,UAAU,SAAS,CAAI,GAAyB;IACpD,OAAO,GAAG,IAAI,IAAI,CAAC;AACrB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/lib/jsonschema.d.ts b/node_modules/openai/lib/jsonschema.d.ts new file mode 100644 index 0000000..0a569e9 --- /dev/null +++ b/node_modules/openai/lib/jsonschema.d.ts @@ -0,0 +1,106 @@ +/** + * Primitive type + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.1.1 + */ +export type JSONSchemaTypeName = ({} & string) | 'string' | 'number' | 'integer' | 'boolean' | 'object' | 'array' | 'null'; +/** + * Primitive type + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.1.1 + */ +export type JSONSchemaType = string | number | boolean | JSONSchemaObject | JSONSchemaArray | null; +export interface JSONSchemaObject { + [key: string]: JSONSchemaType; +} +export interface JSONSchemaArray extends Array { +} +/** + * Meta schema + * + * Recommended values: + * - 'http://json-schema.org/schema#' + * - 'http://json-schema.org/hyper-schema#' + * - 'http://json-schema.org/draft-07/schema#' + * - 'http://json-schema.org/draft-07/hyper-schema#' + * + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-5 + */ +export type JSONSchemaVersion = string; +/** + * JSON Schema v7 + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01 + */ +export type JSONSchemaDefinition = JSONSchema | boolean; +export interface JSONSchema { + $id?: string | undefined; + $comment?: string | undefined; + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.1 + */ + type?: JSONSchemaTypeName | JSONSchemaTypeName[] | undefined; + enum?: JSONSchemaType[] | undefined; + const?: JSONSchemaType | undefined; + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.2 + */ + multipleOf?: number | undefined; + maximum?: number | undefined; + exclusiveMaximum?: number | undefined; + minimum?: number | undefined; + exclusiveMinimum?: number | undefined; + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.3 + */ + maxLength?: number | undefined; + minLength?: number | undefined; + pattern?: string | undefined; + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.4 + */ + items?: JSONSchemaDefinition | JSONSchemaDefinition[] | undefined; + additionalItems?: JSONSchemaDefinition | undefined; + maxItems?: number | undefined; + minItems?: number | undefined; + uniqueItems?: boolean | undefined; + contains?: JSONSchemaDefinition | undefined; + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.5 + */ + maxProperties?: number | undefined; + minProperties?: number | undefined; + required?: string[] | undefined; + properties?: { + [key: string]: JSONSchemaDefinition; + } | undefined; + patternProperties?: { + [key: string]: JSONSchemaDefinition; + } | undefined; + additionalProperties?: JSONSchemaDefinition | undefined; + propertyNames?: JSONSchemaDefinition | undefined; + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.6 + */ + if?: JSONSchemaDefinition | undefined; + then?: JSONSchemaDefinition | undefined; + else?: JSONSchemaDefinition | undefined; + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.7 + */ + allOf?: JSONSchemaDefinition[] | undefined; + anyOf?: JSONSchemaDefinition[] | undefined; + oneOf?: JSONSchemaDefinition[] | undefined; + not?: JSONSchemaDefinition | undefined; + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-7 + */ + format?: string | undefined; + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-10 + */ + title?: string | undefined; + description?: string | undefined; + default?: JSONSchemaType | undefined; + readOnly?: boolean | undefined; + writeOnly?: boolean | undefined; + examples?: JSONSchemaType | undefined; +} +//# sourceMappingURL=jsonschema.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/jsonschema.d.ts.map b/node_modules/openai/lib/jsonschema.d.ts.map new file mode 100644 index 0000000..6e5c623 --- /dev/null +++ b/node_modules/openai/lib/jsonschema.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"jsonschema.d.ts","sourceRoot":"","sources":["../src/lib/jsonschema.ts"],"names":[],"mappings":"AASA;;;GAGG;AACH,MAAM,MAAM,kBAAkB,GAC1B,CAAC,EAAE,GAAG,MAAM,CAAC,GACb,QAAQ,GACR,QAAQ,GACR,SAAS,GACT,SAAS,GACT,QAAQ,GACR,OAAO,GACP,MAAM,CAAC;AAEX;;;GAGG;AACH,MAAM,MAAM,cAAc,GACtB,MAAM,GACN,MAAM,GACN,OAAO,GACP,gBAAgB,GAChB,eAAe,GACf,IAAI,CAAC;AAGT,MAAM,WAAW,gBAAgB;IAC/B,CAAC,GAAG,EAAE,MAAM,GAAG,cAAc,CAAC;CAC/B;AAID,MAAM,WAAW,eAAgB,SAAQ,KAAK,CAAC,cAAc,CAAC;CAAG;AAEjE;;;;;;;;;;GAUG;AACH,MAAM,MAAM,iBAAiB,GAAG,MAAM,CAAC;AAEvC;;;GAGG;AACH,MAAM,MAAM,oBAAoB,GAAG,UAAU,GAAG,OAAO,CAAC;AACxD,MAAM,WAAW,UAAU;IACzB,GAAG,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACzB,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE9B;;OAEG;IACH,IAAI,CAAC,EAAE,kBAAkB,GAAG,kBAAkB,EAAE,GAAG,SAAS,CAAC;IAC7D,IAAI,CAAC,EAAE,cAAc,EAAE,GAAG,SAAS,CAAC;IACpC,KAAK,CAAC,EAAE,cAAc,GAAG,SAAS,CAAC;IAEnC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAChC,OAAO,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC7B,gBAAgB,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACtC,OAAO,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC7B,gBAAgB,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAEtC;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC/B,SAAS,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC/B,OAAO,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE7B;;OAEG;IACH,KAAK,CAAC,EAAE,oBAAoB,GAAG,oBAAoB,EAAE,GAAG,SAAS,CAAC;IAClE,eAAe,CAAC,EAAE,oBAAoB,GAAG,SAAS,CAAC;IACnD,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B,WAAW,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAClC,QAAQ,CAAC,EAAE,oBAAoB,GAAG,SAAS,CAAC;IAE5C;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACnC,aAAa,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACnC,QAAQ,CAAC,EAAE,MAAM,EAAE,GAAG,SAAS,CAAC;IAChC,UAAU,CAAC,EACP;QACE,CAAC,GAAG,EAAE,MAAM,GAAG,oBAAoB,CAAC;KACrC,GACD,SAAS,CAAC;IACd,iBAAiB,CAAC,EACd;QACE,CAAC,GAAG,EAAE,MAAM,GAAG,oBAAoB,CAAC;KACrC,GACD,SAAS,CAAC;IACd,oBAAoB,CAAC,EAAE,oBAAoB,GAAG,SAAS,CAAC;IACxD,aAAa,CAAC,EAAE,oBAAoB,GAAG,SAAS,CAAC;IAEjD;;OAEG;IACH,EAAE,CAAC,EAAE,oBAAoB,GAAG,SAAS,CAAC;IACtC,IAAI,CAAC,EAAE,oBAAoB,GAAG,SAAS,CAAC;IACxC,IAAI,CAAC,EAAE,oBAAoB,GAAG,SAAS,CAAC;IAExC;;OAEG;IACH,KAAK,CAAC,EAAE,oBAAoB,EAAE,GAAG,SAAS,CAAC;IAC3C,KAAK,CAAC,EAAE,oBAAoB,EAAE,GAAG,SAAS,CAAC;IAC3C,KAAK,CAAC,EAAE,oBAAoB,EAAE,GAAG,SAAS,CAAC;IAC3C,GAAG,CAAC,EAAE,oBAAoB,GAAG,SAAS,CAAC;IAEvC;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE5B;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC,OAAO,CAAC,EAAE,cAAc,GAAG,SAAS,CAAC;IACrC,QAAQ,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAC/B,SAAS,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAChC,QAAQ,CAAC,EAAE,cAAc,GAAG,SAAS,CAAC;CACvC"} \ No newline at end of file diff --git a/node_modules/openai/lib/jsonschema.js b/node_modules/openai/lib/jsonschema.js new file mode 100644 index 0000000..bf2a34e --- /dev/null +++ b/node_modules/openai/lib/jsonschema.js @@ -0,0 +1,11 @@ +"use strict"; +// File mostly copied from @types/json-schema, but stripped down a bit for brevity +// https://github.com/DefinitelyTyped/DefinitelyTyped/blob/817274f3280152ba2929a6067c93df8b34c4c9aa/types/json-schema/index.d.ts +// +// ================================================================================================== +// JSON Schema Draft 07 +// ================================================================================================== +// https://tools.ietf.org/html/draft-handrews-json-schema-validation-01 +// -------------------------------------------------------------------------------------------------- +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=jsonschema.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/jsonschema.js.map b/node_modules/openai/lib/jsonschema.js.map new file mode 100644 index 0000000..30d08a7 --- /dev/null +++ b/node_modules/openai/lib/jsonschema.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jsonschema.js","sourceRoot":"","sources":["../src/lib/jsonschema.ts"],"names":[],"mappings":";AAAA,kFAAkF;AAClF,gIAAgI;AAChI,EAAE;AACF,qGAAqG;AACrG,uBAAuB;AACvB,qGAAqG;AACrG,uEAAuE;AACvE,qGAAqG"} \ No newline at end of file diff --git a/node_modules/openai/lib/jsonschema.mjs b/node_modules/openai/lib/jsonschema.mjs new file mode 100644 index 0000000..047481b --- /dev/null +++ b/node_modules/openai/lib/jsonschema.mjs @@ -0,0 +1,10 @@ +// File mostly copied from @types/json-schema, but stripped down a bit for brevity +// https://github.com/DefinitelyTyped/DefinitelyTyped/blob/817274f3280152ba2929a6067c93df8b34c4c9aa/types/json-schema/index.d.ts +// +// ================================================================================================== +// JSON Schema Draft 07 +// ================================================================================================== +// https://tools.ietf.org/html/draft-handrews-json-schema-validation-01 +// -------------------------------------------------------------------------------------------------- +export {}; +//# sourceMappingURL=jsonschema.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/jsonschema.mjs.map b/node_modules/openai/lib/jsonschema.mjs.map new file mode 100644 index 0000000..4603e80 --- /dev/null +++ b/node_modules/openai/lib/jsonschema.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"jsonschema.mjs","sourceRoot":"","sources":["../src/lib/jsonschema.ts"],"names":[],"mappings":"AAAA,kFAAkF;AAClF,gIAAgI;AAChI,EAAE;AACF,qGAAqG;AACrG,uBAAuB;AACvB,qGAAqG;AACrG,uEAAuE;AACvE,qGAAqG"} \ No newline at end of file diff --git a/node_modules/openai/lib/parser.d.ts b/node_modules/openai/lib/parser.d.ts new file mode 100644 index 0000000..15f6753 --- /dev/null +++ b/node_modules/openai/lib/parser.d.ts @@ -0,0 +1,37 @@ +import { ChatCompletion, ChatCompletionCreateParams, ChatCompletionMessageToolCall, ChatCompletionTool } from "../resources/chat/completions.js"; +import { ChatCompletionStreamingToolRunnerParams, ChatCompletionStreamParams, ChatCompletionToolRunnerParams, ParsedChatCompletion } from "../resources/beta/chat/completions.js"; +import { ResponseFormatJSONSchema } from "../resources/shared.js"; +type AnyChatCompletionCreateParams = ChatCompletionCreateParams | ChatCompletionToolRunnerParams | ChatCompletionStreamingToolRunnerParams | ChatCompletionStreamParams; +export type ExtractParsedContentFromParams = Params['response_format'] extends AutoParseableResponseFormat ? P : null; +export type AutoParseableResponseFormat = ResponseFormatJSONSchema & { + __output: ParsedT; + $brand: 'auto-parseable-response-format'; + $parseRaw(content: string): ParsedT; +}; +export declare function makeParseableResponseFormat(response_format: ResponseFormatJSONSchema, parser: (content: string) => ParsedT): AutoParseableResponseFormat; +export declare function isAutoParsableResponseFormat(response_format: any): response_format is AutoParseableResponseFormat; +type ToolOptions = { + name: string; + arguments: any; + function?: ((args: any) => any) | undefined; +}; +export type AutoParseableTool = ChatCompletionTool & { + __arguments: OptionsT['arguments']; + __name: OptionsT['name']; + __hasFunction: HasFunction; + $brand: 'auto-parseable-tool'; + $callback: ((args: OptionsT['arguments']) => any) | undefined; + $parseRaw(args: string): OptionsT['arguments']; +}; +export declare function makeParseableTool(tool: ChatCompletionTool, { parser, callback, }: { + parser: (content: string) => OptionsT['arguments']; + callback: ((args: any) => any) | undefined; +}): AutoParseableTool; +export declare function isAutoParsableTool(tool: any): tool is AutoParseableTool; +export declare function maybeParseChatCompletion>>(completion: ChatCompletion, params: Params): ParsedChatCompletion; +export declare function parseChatCompletion>(completion: ChatCompletion, params: Params): ParsedChatCompletion; +export declare function shouldParseToolCall(params: ChatCompletionCreateParams | null | undefined, toolCall: ChatCompletionMessageToolCall): boolean; +export declare function hasAutoParseableInput(params: AnyChatCompletionCreateParams): boolean; +export declare function validateInputTools(tools: ChatCompletionTool[] | undefined): void; +export {}; +//# sourceMappingURL=parser.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/lib/parser.d.ts.map b/node_modules/openai/lib/parser.d.ts.map new file mode 100644 index 0000000..b997b75 --- /dev/null +++ b/node_modules/openai/lib/parser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"parser.d.ts","sourceRoot":"","sources":["../src/lib/parser.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,cAAc,EACd,0BAA0B,EAC1B,6BAA6B,EAC7B,kBAAkB,EACnB,MAAM,+BAA+B,CAAC;AACvC,OAAO,EACL,uCAAuC,EACvC,0BAA0B,EAC1B,8BAA8B,EAC9B,oBAAoB,EAGrB,MAAM,oCAAoC,CAAC;AAC5C,OAAO,EAAE,wBAAwB,EAAE,MAAM,qBAAqB,CAAC;AAG/D,KAAK,6BAA6B,GAC9B,0BAA0B,GAC1B,8BAA8B,CAAC,GAAG,CAAC,GACnC,uCAAuC,CAAC,GAAG,CAAC,GAC5C,0BAA0B,CAAC;AAE/B,MAAM,MAAM,8BAA8B,CAAC,MAAM,SAAS,6BAA6B,IACrF,MAAM,CAAC,iBAAiB,CAAC,SAAS,2BAA2B,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC;AAEpF,MAAM,MAAM,2BAA2B,CAAC,OAAO,IAAI,wBAAwB,GAAG;IAC5E,QAAQ,EAAE,OAAO,CAAC;IAElB,MAAM,EAAE,gCAAgC,CAAC;IACzC,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC;CACrC,CAAC;AAEF,wBAAgB,2BAA2B,CAAC,OAAO,EACjD,eAAe,EAAE,wBAAwB,EACzC,MAAM,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,OAAO,GACnC,2BAA2B,CAAC,OAAO,CAAC,CAetC;AAED,wBAAgB,4BAA4B,CAAC,OAAO,EAClD,eAAe,EAAE,GAAG,GACnB,eAAe,IAAI,2BAA2B,CAAC,OAAO,CAAC,CAEzD;AAED,KAAK,WAAW,GAAG;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,GAAG,CAAC;IACf,QAAQ,CAAC,EAAE,CAAC,CAAC,IAAI,EAAE,GAAG,KAAK,GAAG,CAAC,GAAG,SAAS,CAAC;CAC7C,CAAC;AAEF,MAAM,MAAM,iBAAiB,CAC3B,QAAQ,SAAS,WAAW,EAC5B,WAAW,GAAG,QAAQ,CAAC,UAAU,CAAC,SAAS,QAAQ,GAAG,IAAI,GAAG,KAAK,IAChE,kBAAkB,GAAG;IACvB,WAAW,EAAE,QAAQ,CAAC,WAAW,CAAC,CAAC;IACnC,MAAM,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;IACzB,aAAa,EAAE,WAAW,CAAC;IAE3B,MAAM,EAAE,qBAAqB,CAAC;IAC9B,SAAS,EAAE,CAAC,CAAC,IAAI,EAAE,QAAQ,CAAC,WAAW,CAAC,KAAK,GAAG,CAAC,GAAG,SAAS,CAAC;IAC9D,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ,CAAC,WAAW,CAAC,CAAC;CAChD,CAAC;AAEF,wBAAgB,iBAAiB,CAAC,QAAQ,SAAS,WAAW,EAC5D,IAAI,EAAE,kBAAkB,EACxB,EACE,MAAM,EACN,QAAQ,GACT,EAAE;IACD,MAAM,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,QAAQ,CAAC,WAAW,CAAC,CAAC;IACnD,QAAQ,EAAE,CAAC,CAAC,IAAI,EAAE,GAAG,KAAK,GAAG,CAAC,GAAG,SAAS,CAAC;CAC5C,GACA,iBAAiB,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,CAmB1C;AAED,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI,IAAI,iBAAiB,CAAC,GAAG,CAAC,CAE5E;AAED,wBAAgB,wBAAwB,CACtC,MAAM,SAAS,0BAA0B,GAAG,IAAI,EAChD,OAAO,GAAG,MAAM,SAAS,IAAI,GAAG,IAAI,GAAG,8BAA8B,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,EAC1F,UAAU,EAAE,cAAc,EAAE,MAAM,EAAE,MAAM,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAY3E;AAED,wBAAgB,mBAAmB,CACjC,MAAM,SAAS,0BAA0B,EACzC,OAAO,GAAG,8BAA8B,CAAC,MAAM,CAAC,EAChD,UAAU,EAAE,cAAc,EAAE,MAAM,EAAE,MAAM,GAAG,oBAAoB,CAAC,OAAO,CAAC,CAwB3E;AAwCD,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,0BAA0B,GAAG,IAAI,GAAG,SAAS,EACrD,QAAQ,EAAE,6BAA6B,GACtC,OAAO,CAOT;AAED,wBAAgB,qBAAqB,CAAC,MAAM,EAAE,6BAA6B,GAAG,OAAO,CAUpF;AAED,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,kBAAkB,EAAE,GAAG,SAAS,QAczE"} \ No newline at end of file diff --git a/node_modules/openai/lib/parser.js b/node_modules/openai/lib/parser.js new file mode 100644 index 0000000..02f3731 --- /dev/null +++ b/node_modules/openai/lib/parser.js @@ -0,0 +1,133 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.validateInputTools = exports.hasAutoParseableInput = exports.shouldParseToolCall = exports.parseChatCompletion = exports.maybeParseChatCompletion = exports.isAutoParsableTool = exports.makeParseableTool = exports.isAutoParsableResponseFormat = exports.makeParseableResponseFormat = void 0; +const error_1 = require("../error.js"); +function makeParseableResponseFormat(response_format, parser) { + const obj = { ...response_format }; + Object.defineProperties(obj, { + $brand: { + value: 'auto-parseable-response-format', + enumerable: false, + }, + $parseRaw: { + value: parser, + enumerable: false, + }, + }); + return obj; +} +exports.makeParseableResponseFormat = makeParseableResponseFormat; +function isAutoParsableResponseFormat(response_format) { + return response_format?.['$brand'] === 'auto-parseable-response-format'; +} +exports.isAutoParsableResponseFormat = isAutoParsableResponseFormat; +function makeParseableTool(tool, { parser, callback, }) { + const obj = { ...tool }; + Object.defineProperties(obj, { + $brand: { + value: 'auto-parseable-tool', + enumerable: false, + }, + $parseRaw: { + value: parser, + enumerable: false, + }, + $callback: { + value: callback, + enumerable: false, + }, + }); + return obj; +} +exports.makeParseableTool = makeParseableTool; +function isAutoParsableTool(tool) { + return tool?.['$brand'] === 'auto-parseable-tool'; +} +exports.isAutoParsableTool = isAutoParsableTool; +function maybeParseChatCompletion(completion, params) { + if (!params || !hasAutoParseableInput(params)) { + return { + ...completion, + choices: completion.choices.map((choice) => ({ + ...choice, + message: { ...choice.message, parsed: null, tool_calls: choice.message.tool_calls ?? [] }, + })), + }; + } + return parseChatCompletion(completion, params); +} +exports.maybeParseChatCompletion = maybeParseChatCompletion; +function parseChatCompletion(completion, params) { + const choices = completion.choices.map((choice) => { + if (choice.finish_reason === 'length') { + throw new error_1.LengthFinishReasonError(); + } + if (choice.finish_reason === 'content_filter') { + throw new error_1.ContentFilterFinishReasonError(); + } + return { + ...choice, + message: { + ...choice.message, + tool_calls: choice.message.tool_calls?.map((toolCall) => parseToolCall(params, toolCall)) ?? [], + parsed: choice.message.content && !choice.message.refusal ? + parseResponseFormat(params, choice.message.content) + : null, + }, + }; + }); + return { ...completion, choices }; +} +exports.parseChatCompletion = parseChatCompletion; +function parseResponseFormat(params, content) { + if (params.response_format?.type !== 'json_schema') { + return null; + } + if (params.response_format?.type === 'json_schema') { + if ('$parseRaw' in params.response_format) { + const response_format = params.response_format; + return response_format.$parseRaw(content); + } + return JSON.parse(content); + } + return null; +} +function parseToolCall(params, toolCall) { + const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); + return { + ...toolCall, + function: { + ...toolCall.function, + parsed_arguments: isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCall.function.arguments) + : inputTool?.function.strict ? JSON.parse(toolCall.function.arguments) + : null, + }, + }; +} +function shouldParseToolCall(params, toolCall) { + if (!params) { + return false; + } + const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); + return isAutoParsableTool(inputTool) || inputTool?.function.strict || false; +} +exports.shouldParseToolCall = shouldParseToolCall; +function hasAutoParseableInput(params) { + if (isAutoParsableResponseFormat(params.response_format)) { + return true; + } + return (params.tools?.some((t) => isAutoParsableTool(t) || (t.type === 'function' && t.function.strict === true)) ?? false); +} +exports.hasAutoParseableInput = hasAutoParseableInput; +function validateInputTools(tools) { + for (const tool of tools ?? []) { + if (tool.type !== 'function') { + throw new error_1.OpenAIError(`Currently only \`function\` tool types support auto-parsing; Received \`${tool.type}\``); + } + if (tool.function.strict !== true) { + throw new error_1.OpenAIError(`The \`${tool.function.name}\` tool is not marked with \`strict: true\`. Only strict function tools can be auto-parsed`); + } + } +} +exports.validateInputTools = validateInputTools; +//# sourceMappingURL=parser.js.map \ No newline at end of file diff --git a/node_modules/openai/lib/parser.js.map b/node_modules/openai/lib/parser.js.map new file mode 100644 index 0000000..a788977 --- /dev/null +++ b/node_modules/openai/lib/parser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parser.js","sourceRoot":"","sources":["../src/lib/parser.ts"],"names":[],"mappings":";;;AAeA,uCAAgG;AAkBhG,SAAgB,2BAA2B,CACzC,eAAyC,EACzC,MAAoC;IAEpC,MAAM,GAAG,GAAG,EAAE,GAAG,eAAe,EAAE,CAAC;IAEnC,MAAM,CAAC,gBAAgB,CAAC,GAAG,EAAE;QAC3B,MAAM,EAAE;YACN,KAAK,EAAE,gCAAgC;YACvC,UAAU,EAAE,KAAK;SAClB;QACD,SAAS,EAAE;YACT,KAAK,EAAE,MAAM;YACb,UAAU,EAAE,KAAK;SAClB;KACF,CAAC,CAAC;IAEH,OAAO,GAA2C,CAAC;AACrD,CAAC;AAlBD,kEAkBC;AAED,SAAgB,4BAA4B,CAC1C,eAAoB;IAEpB,OAAO,eAAe,EAAE,CAAC,QAAQ,CAAC,KAAK,gCAAgC,CAAC;AAC1E,CAAC;AAJD,oEAIC;AAqBD,SAAgB,iBAAiB,CAC/B,IAAwB,EACxB,EACE,MAAM,EACN,QAAQ,GAIT;IAED,MAAM,GAAG,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;IAExB,MAAM,CAAC,gBAAgB,CAAC,GAAG,EAAE;QAC3B,MAAM,EAAE;YACN,KAAK,EAAE,qBAAqB;YAC5B,UAAU,EAAE,KAAK;SAClB;QACD,SAAS,EAAE;YACT,KAAK,EAAE,MAAM;YACb,UAAU,EAAE,KAAK;SAClB;QACD,SAAS,EAAE;YACT,KAAK,EAAE,QAAQ;YACf,UAAU,EAAE,KAAK;SAClB;KACF,CAAC,CAAC;IAEH,OAAO,GAA+C,CAAC;AACzD,CAAC;AA5BD,8CA4BC;AAED,SAAgB,kBAAkB,CAAC,IAAS;IAC1C,OAAO,IAAI,EAAE,CAAC,QAAQ,CAAC,KAAK,qBAAqB,CAAC;AACpD,CAAC;AAFD,gDAEC;AAED,SAAgB,wBAAwB,CAGtC,UAA0B,EAAE,MAAc;IAC1C,IAAI,CAAC,MAAM,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,EAAE;QAC7C,OAAO;YACL,GAAG,UAAU;YACb,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;gBAC3C,GAAG,MAAM;gBACT,OAAO,EAAE,EAAE,GAAG,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,CAAC,OAAO,CAAC,UAAU,IAAI,EAAE,EAAE;aAC1F,CAAC,CAAC;SACJ,CAAC;KACH;IAED,OAAO,mBAAmB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AACjD,CAAC;AAfD,4DAeC;AAED,SAAgB,mBAAmB,CAGjC,UAA0B,EAAE,MAAc;IAC1C,MAAM,OAAO,GAAiC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAyB,EAAE;QACrG,IAAI,MAAM,CAAC,aAAa,KAAK,QAAQ,EAAE;YACrC,MAAM,IAAI,+BAAuB,EAAE,CAAC;SACrC;QAED,IAAI,MAAM,CAAC,aAAa,KAAK,gBAAgB,EAAE;YAC7C,MAAM,IAAI,sCAA8B,EAAE,CAAC;SAC5C;QAED,OAAO;YACL,GAAG,MAAM;YACT,OAAO,EAAE;gBACP,GAAG,MAAM,CAAC,OAAO;gBACjB,UAAU,EAAE,MAAM,CAAC,OAAO,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,aAAa,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,IAAI,EAAE;gBAC/F,MAAM,EACJ,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;oBACjD,mBAAmB,CAAC,MAAM,EAAE,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC;oBACrD,CAAC,CAAC,IAAI;aACT;SACF,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,OAAO,EAAE,GAAG,UAAU,EAAE,OAAO,EAAE,CAAC;AACpC,CAAC;AA3BD,kDA2BC;AAED,SAAS,mBAAmB,CAG1B,MAAc,EAAE,OAAe;IAC/B,IAAI,MAAM,CAAC,eAAe,EAAE,IAAI,KAAK,aAAa,EAAE;QAClD,OAAO,IAAI,CAAC;KACb;IAED,IAAI,MAAM,CAAC,eAAe,EAAE,IAAI,KAAK,aAAa,EAAE;QAClD,IAAI,WAAW,IAAI,MAAM,CAAC,eAAe,EAAE;YACzC,MAAM,eAAe,GAAG,MAAM,CAAC,eAAuD,CAAC;YAEvF,OAAO,eAAe,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;SAC3C;QAED,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;KAC5B;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAS,aAAa,CACpB,MAAc,EACd,QAAuC;IAEvC,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,KAAK,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IACzG,OAAO;QACL,GAAG,QAAQ;QACX,QAAQ,EAAE;YACR,GAAG,QAAQ,CAAC,QAAQ;YACpB,gBAAgB,EACd,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,SAAS,CAAC;gBAChF,CAAC,CAAC,SAAS,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,SAAS,CAAC;oBACtE,CAAC,CAAC,IAAI;SACT;KACF,CAAC;AACJ,CAAC;AAED,SAAgB,mBAAmB,CACjC,MAAqD,EACrD,QAAuC;IAEvC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,KAAK,CAAC;KACd;IAED,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,KAAK,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IACzG,OAAO,kBAAkB,CAAC,SAAS,CAAC,IAAI,SAAS,EAAE,QAAQ,CAAC,MAAM,IAAI,KAAK,CAAC;AAC9E,CAAC;AAVD,kDAUC;AAED,SAAgB,qBAAqB,CAAC,MAAqC;IACzE,IAAI,4BAA4B,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE;QACxD,OAAO,IAAI,CAAC;KACb;IAED,OAAO,CACL,MAAM,CAAC,KAAK,EAAE,IAAI,CAChB,CAAC,CAAC,EAAE,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,CAAC,QAAQ,CAAC,MAAM,KAAK,IAAI,CAAC,CACtF,IAAI,KAAK,CACX,CAAC;AACJ,CAAC;AAVD,sDAUC;AAED,SAAgB,kBAAkB,CAAC,KAAuC;IACxE,KAAK,MAAM,IAAI,IAAI,KAAK,IAAI,EAAE,EAAE;QAC9B,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,EAAE;YAC5B,MAAM,IAAI,mBAAW,CACnB,2EAA2E,IAAI,CAAC,IAAI,IAAI,CACzF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,KAAK,IAAI,EAAE;YACjC,MAAM,IAAI,mBAAW,CACnB,SAAS,IAAI,CAAC,QAAQ,CAAC,IAAI,4FAA4F,CACxH,CAAC;SACH;KACF;AACH,CAAC;AAdD,gDAcC"} \ No newline at end of file diff --git a/node_modules/openai/lib/parser.mjs b/node_modules/openai/lib/parser.mjs new file mode 100644 index 0000000..2b23676 --- /dev/null +++ b/node_modules/openai/lib/parser.mjs @@ -0,0 +1,121 @@ +import { ContentFilterFinishReasonError, LengthFinishReasonError, OpenAIError } from "../error.mjs"; +export function makeParseableResponseFormat(response_format, parser) { + const obj = { ...response_format }; + Object.defineProperties(obj, { + $brand: { + value: 'auto-parseable-response-format', + enumerable: false, + }, + $parseRaw: { + value: parser, + enumerable: false, + }, + }); + return obj; +} +export function isAutoParsableResponseFormat(response_format) { + return response_format?.['$brand'] === 'auto-parseable-response-format'; +} +export function makeParseableTool(tool, { parser, callback, }) { + const obj = { ...tool }; + Object.defineProperties(obj, { + $brand: { + value: 'auto-parseable-tool', + enumerable: false, + }, + $parseRaw: { + value: parser, + enumerable: false, + }, + $callback: { + value: callback, + enumerable: false, + }, + }); + return obj; +} +export function isAutoParsableTool(tool) { + return tool?.['$brand'] === 'auto-parseable-tool'; +} +export function maybeParseChatCompletion(completion, params) { + if (!params || !hasAutoParseableInput(params)) { + return { + ...completion, + choices: completion.choices.map((choice) => ({ + ...choice, + message: { ...choice.message, parsed: null, tool_calls: choice.message.tool_calls ?? [] }, + })), + }; + } + return parseChatCompletion(completion, params); +} +export function parseChatCompletion(completion, params) { + const choices = completion.choices.map((choice) => { + if (choice.finish_reason === 'length') { + throw new LengthFinishReasonError(); + } + if (choice.finish_reason === 'content_filter') { + throw new ContentFilterFinishReasonError(); + } + return { + ...choice, + message: { + ...choice.message, + tool_calls: choice.message.tool_calls?.map((toolCall) => parseToolCall(params, toolCall)) ?? [], + parsed: choice.message.content && !choice.message.refusal ? + parseResponseFormat(params, choice.message.content) + : null, + }, + }; + }); + return { ...completion, choices }; +} +function parseResponseFormat(params, content) { + if (params.response_format?.type !== 'json_schema') { + return null; + } + if (params.response_format?.type === 'json_schema') { + if ('$parseRaw' in params.response_format) { + const response_format = params.response_format; + return response_format.$parseRaw(content); + } + return JSON.parse(content); + } + return null; +} +function parseToolCall(params, toolCall) { + const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); + return { + ...toolCall, + function: { + ...toolCall.function, + parsed_arguments: isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCall.function.arguments) + : inputTool?.function.strict ? JSON.parse(toolCall.function.arguments) + : null, + }, + }; +} +export function shouldParseToolCall(params, toolCall) { + if (!params) { + return false; + } + const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); + return isAutoParsableTool(inputTool) || inputTool?.function.strict || false; +} +export function hasAutoParseableInput(params) { + if (isAutoParsableResponseFormat(params.response_format)) { + return true; + } + return (params.tools?.some((t) => isAutoParsableTool(t) || (t.type === 'function' && t.function.strict === true)) ?? false); +} +export function validateInputTools(tools) { + for (const tool of tools ?? []) { + if (tool.type !== 'function') { + throw new OpenAIError(`Currently only \`function\` tool types support auto-parsing; Received \`${tool.type}\``); + } + if (tool.function.strict !== true) { + throw new OpenAIError(`The \`${tool.function.name}\` tool is not marked with \`strict: true\`. Only strict function tools can be auto-parsed`); + } + } +} +//# sourceMappingURL=parser.mjs.map \ No newline at end of file diff --git a/node_modules/openai/lib/parser.mjs.map b/node_modules/openai/lib/parser.mjs.map new file mode 100644 index 0000000..2e67cf7 --- /dev/null +++ b/node_modules/openai/lib/parser.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"parser.mjs","sourceRoot":"","sources":["../src/lib/parser.ts"],"names":[],"mappings":"OAeO,EAAE,8BAA8B,EAAE,uBAAuB,EAAE,WAAW,EAAE;AAkB/E,MAAM,UAAU,2BAA2B,CACzC,eAAyC,EACzC,MAAoC;IAEpC,MAAM,GAAG,GAAG,EAAE,GAAG,eAAe,EAAE,CAAC;IAEnC,MAAM,CAAC,gBAAgB,CAAC,GAAG,EAAE;QAC3B,MAAM,EAAE;YACN,KAAK,EAAE,gCAAgC;YACvC,UAAU,EAAE,KAAK;SAClB;QACD,SAAS,EAAE;YACT,KAAK,EAAE,MAAM;YACb,UAAU,EAAE,KAAK;SAClB;KACF,CAAC,CAAC;IAEH,OAAO,GAA2C,CAAC;AACrD,CAAC;AAED,MAAM,UAAU,4BAA4B,CAC1C,eAAoB;IAEpB,OAAO,eAAe,EAAE,CAAC,QAAQ,CAAC,KAAK,gCAAgC,CAAC;AAC1E,CAAC;AAqBD,MAAM,UAAU,iBAAiB,CAC/B,IAAwB,EACxB,EACE,MAAM,EACN,QAAQ,GAIT;IAED,MAAM,GAAG,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;IAExB,MAAM,CAAC,gBAAgB,CAAC,GAAG,EAAE;QAC3B,MAAM,EAAE;YACN,KAAK,EAAE,qBAAqB;YAC5B,UAAU,EAAE,KAAK;SAClB;QACD,SAAS,EAAE;YACT,KAAK,EAAE,MAAM;YACb,UAAU,EAAE,KAAK;SAClB;QACD,SAAS,EAAE;YACT,KAAK,EAAE,QAAQ;YACf,UAAU,EAAE,KAAK;SAClB;KACF,CAAC,CAAC;IAEH,OAAO,GAA+C,CAAC;AACzD,CAAC;AAED,MAAM,UAAU,kBAAkB,CAAC,IAAS;IAC1C,OAAO,IAAI,EAAE,CAAC,QAAQ,CAAC,KAAK,qBAAqB,CAAC;AACpD,CAAC;AAED,MAAM,UAAU,wBAAwB,CAGtC,UAA0B,EAAE,MAAc;IAC1C,IAAI,CAAC,MAAM,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,EAAE;QAC7C,OAAO;YACL,GAAG,UAAU;YACb,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;gBAC3C,GAAG,MAAM;gBACT,OAAO,EAAE,EAAE,GAAG,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,CAAC,OAAO,CAAC,UAAU,IAAI,EAAE,EAAE;aAC1F,CAAC,CAAC;SACJ,CAAC;KACH;IAED,OAAO,mBAAmB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AACjD,CAAC;AAED,MAAM,UAAU,mBAAmB,CAGjC,UAA0B,EAAE,MAAc;IAC1C,MAAM,OAAO,GAAiC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAyB,EAAE;QACrG,IAAI,MAAM,CAAC,aAAa,KAAK,QAAQ,EAAE;YACrC,MAAM,IAAI,uBAAuB,EAAE,CAAC;SACrC;QAED,IAAI,MAAM,CAAC,aAAa,KAAK,gBAAgB,EAAE;YAC7C,MAAM,IAAI,8BAA8B,EAAE,CAAC;SAC5C;QAED,OAAO;YACL,GAAG,MAAM;YACT,OAAO,EAAE;gBACP,GAAG,MAAM,CAAC,OAAO;gBACjB,UAAU,EAAE,MAAM,CAAC,OAAO,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,aAAa,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,IAAI,EAAE;gBAC/F,MAAM,EACJ,MAAM,CAAC,OAAO,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;oBACjD,mBAAmB,CAAC,MAAM,EAAE,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC;oBACrD,CAAC,CAAC,IAAI;aACT;SACF,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,OAAO,EAAE,GAAG,UAAU,EAAE,OAAO,EAAE,CAAC;AACpC,CAAC;AAED,SAAS,mBAAmB,CAG1B,MAAc,EAAE,OAAe;IAC/B,IAAI,MAAM,CAAC,eAAe,EAAE,IAAI,KAAK,aAAa,EAAE;QAClD,OAAO,IAAI,CAAC;KACb;IAED,IAAI,MAAM,CAAC,eAAe,EAAE,IAAI,KAAK,aAAa,EAAE;QAClD,IAAI,WAAW,IAAI,MAAM,CAAC,eAAe,EAAE;YACzC,MAAM,eAAe,GAAG,MAAM,CAAC,eAAuD,CAAC;YAEvF,OAAO,eAAe,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;SAC3C;QAED,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;KAC5B;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAS,aAAa,CACpB,MAAc,EACd,QAAuC;IAEvC,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,KAAK,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IACzG,OAAO;QACL,GAAG,QAAQ;QACX,QAAQ,EAAE;YACR,GAAG,QAAQ,CAAC,QAAQ;YACpB,gBAAgB,EACd,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,SAAS,CAAC;gBAChF,CAAC,CAAC,SAAS,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,SAAS,CAAC;oBACtE,CAAC,CAAC,IAAI;SACT;KACF,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,mBAAmB,CACjC,MAAqD,EACrD,QAAuC;IAEvC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,KAAK,CAAC;KACd;IAED,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,KAAK,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IACzG,OAAO,kBAAkB,CAAC,SAAS,CAAC,IAAI,SAAS,EAAE,QAAQ,CAAC,MAAM,IAAI,KAAK,CAAC;AAC9E,CAAC;AAED,MAAM,UAAU,qBAAqB,CAAC,MAAqC;IACzE,IAAI,4BAA4B,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE;QACxD,OAAO,IAAI,CAAC;KACb;IAED,OAAO,CACL,MAAM,CAAC,KAAK,EAAE,IAAI,CAChB,CAAC,CAAC,EAAE,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,CAAC,QAAQ,CAAC,MAAM,KAAK,IAAI,CAAC,CACtF,IAAI,KAAK,CACX,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,kBAAkB,CAAC,KAAuC;IACxE,KAAK,MAAM,IAAI,IAAI,KAAK,IAAI,EAAE,EAAE;QAC9B,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,EAAE;YAC5B,MAAM,IAAI,WAAW,CACnB,2EAA2E,IAAI,CAAC,IAAI,IAAI,CACzF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,KAAK,IAAI,EAAE;YACjC,MAAM,IAAI,WAAW,CACnB,SAAS,IAAI,CAAC,QAAQ,CAAC,IAAI,4FAA4F,CACxH,CAAC;SACH;KACF;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/package.json b/node_modules/openai/package.json new file mode 100644 index 0000000..01fd92c --- /dev/null +++ b/node_modules/openai/package.json @@ -0,0 +1,112 @@ +{ + "name": "openai", + "version": "4.77.0", + "description": "The official TypeScript library for the OpenAI API", + "author": "OpenAI ", + "types": "./index.d.ts", + "main": "./index.js", + "type": "commonjs", + "repository": "github:openai/openai-node", + "license": "Apache-2.0", + "packageManager": "yarn@1.22.22", + "files": [ + "**/*" + ], + "private": false, + "scripts": { + "test": "./scripts/test", + "build": "./scripts/build", + "format": "prettier --write --cache --cache-strategy metadata . !dist", + "tsn": "ts-node -r tsconfig-paths/register", + "lint": "./scripts/lint", + "fix": "./scripts/format" + }, + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7" + }, + "sideEffects": [ + "./_shims/index.js", + "./_shims/index.mjs", + "./shims/node.js", + "./shims/node.mjs", + "./shims/web.js", + "./shims/web.mjs" + ], + "imports": { + "openai": ".", + "openai/*": "./src/*" + }, + "exports": { + "./_shims/auto/*": { + "deno": { + "types": "./_shims/auto/*.d.ts", + "require": "./_shims/auto/*.js", + "default": "./_shims/auto/*.mjs" + }, + "bun": { + "types": "./_shims/auto/*.d.ts", + "require": "./_shims/auto/*-bun.js", + "default": "./_shims/auto/*-bun.mjs" + }, + "browser": { + "types": "./_shims/auto/*.d.ts", + "require": "./_shims/auto/*.js", + "default": "./_shims/auto/*.mjs" + }, + "worker": { + "types": "./_shims/auto/*.d.ts", + "require": "./_shims/auto/*.js", + "default": "./_shims/auto/*.mjs" + }, + "workerd": { + "types": "./_shims/auto/*.d.ts", + "require": "./_shims/auto/*.js", + "default": "./_shims/auto/*.mjs" + }, + "node": { + "types": "./_shims/auto/*-node.d.ts", + "require": "./_shims/auto/*-node.js", + "default": "./_shims/auto/*-node.mjs" + }, + "types": "./_shims/auto/*.d.ts", + "require": "./_shims/auto/*.js", + "default": "./_shims/auto/*.mjs" + }, + ".": { + "require": { + "types": "./index.d.ts", + "default": "./index.js" + }, + "types": "./index.d.mts", + "default": "./index.mjs" + }, + "./*.mjs": { + "types": "./*.d.ts", + "default": "./*.mjs" + }, + "./*.js": { + "types": "./*.d.ts", + "default": "./*.js" + }, + "./*": { + "types": "./*.d.ts", + "require": "./*.js", + "default": "./*.mjs" + } + }, + "bin": "./bin/cli", + "peerDependencies": { + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + } +} diff --git a/node_modules/openai/pagination.d.ts b/node_modules/openai/pagination.d.ts new file mode 100644 index 0000000..b320a14 --- /dev/null +++ b/node_modules/openai/pagination.d.ts @@ -0,0 +1,37 @@ +import { AbstractPage, Response, APIClient, FinalRequestOptions, PageInfo } from "./core.js"; +export interface PageResponse { + data: Array; + object: string; +} +/** + * Note: no pagination actually occurs yet, this is for forwards-compatibility. + */ +export declare class Page extends AbstractPage implements PageResponse { + data: Array; + object: string; + constructor(client: APIClient, response: Response, body: PageResponse, options: FinalRequestOptions); + getPaginatedItems(): Item[]; + /** + * This page represents a response that isn't actually paginated at the API level + * so there will never be any next page params. + */ + nextPageParams(): null; + nextPageInfo(): null; +} +export interface CursorPageResponse { + data: Array; +} +export interface CursorPageParams { + after?: string; + limit?: number; +} +export declare class CursorPage extends AbstractPage implements CursorPageResponse { + data: Array; + constructor(client: APIClient, response: Response, body: CursorPageResponse, options: FinalRequestOptions); + getPaginatedItems(): Item[]; + nextPageParams(): Partial | null; + nextPageInfo(): PageInfo | null; +} +//# sourceMappingURL=pagination.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/pagination.d.ts.map b/node_modules/openai/pagination.d.ts.map new file mode 100644 index 0000000..b7512d1 --- /dev/null +++ b/node_modules/openai/pagination.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pagination.d.ts","sourceRoot":"","sources":["src/pagination.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,QAAQ,EAAE,SAAS,EAAE,mBAAmB,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAE1F,MAAM,WAAW,YAAY,CAAC,IAAI;IAChC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;IAElB,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,CAAE,SAAQ,YAAY,CAAC,IAAI,CAAE,YAAW,YAAY,CAAC,IAAI,CAAC;IAC9E,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;IAElB,MAAM,EAAE,MAAM,CAAC;gBAEH,MAAM,EAAE,SAAS,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,YAAY,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,mBAAmB;IAOzG,iBAAiB,IAAI,IAAI,EAAE;IAK3B;;;OAGG;IACH,cAAc,IAAI,IAAI;IAItB,YAAY,IAAI,IAAI;CAGrB;AAED,MAAM,WAAW,kBAAkB,CAAC,IAAI;IACtC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;CACnB;AAED,MAAM,WAAW,gBAAgB;IAC/B,KAAK,CAAC,EAAE,MAAM,CAAC;IAEf,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,qBAAa,UAAU,CAAC,IAAI,SAAS;IAAE,EAAE,EAAE,MAAM,CAAA;CAAE,CACjD,SAAQ,YAAY,CAAC,IAAI,CACzB,YAAW,kBAAkB,CAAC,IAAI,CAAC;IAEnC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;gBAGhB,MAAM,EAAE,SAAS,EACjB,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,kBAAkB,CAAC,IAAI,CAAC,EAC9B,OAAO,EAAE,mBAAmB;IAO9B,iBAAiB,IAAI,IAAI,EAAE;IAK3B,cAAc,IAAI,OAAO,CAAC,gBAAgB,CAAC,GAAG,IAAI;IASlD,YAAY,IAAI,QAAQ,GAAG,IAAI;CAahC"} \ No newline at end of file diff --git a/node_modules/openai/pagination.js b/node_modules/openai/pagination.js new file mode 100644 index 0000000..d597879 --- /dev/null +++ b/node_modules/openai/pagination.js @@ -0,0 +1,64 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CursorPage = exports.Page = void 0; +const core_1 = require("./core.js"); +/** + * Note: no pagination actually occurs yet, this is for forwards-compatibility. + */ +class Page extends core_1.AbstractPage { + constructor(client, response, body, options) { + super(client, response, body, options); + this.data = body.data || []; + this.object = body.object; + } + getPaginatedItems() { + return this.data ?? []; + } + // @deprecated Please use `nextPageInfo()` instead + /** + * This page represents a response that isn't actually paginated at the API level + * so there will never be any next page params. + */ + nextPageParams() { + return null; + } + nextPageInfo() { + return null; + } +} +exports.Page = Page; +class CursorPage extends core_1.AbstractPage { + constructor(client, response, body, options) { + super(client, response, body, options); + this.data = body.data || []; + } + getPaginatedItems() { + return this.data ?? []; + } + // @deprecated Please use `nextPageInfo()` instead + nextPageParams() { + const info = this.nextPageInfo(); + if (!info) + return null; + if ('params' in info) + return info.params; + const params = Object.fromEntries(info.url.searchParams); + if (!Object.keys(params).length) + return null; + return params; + } + nextPageInfo() { + const data = this.getPaginatedItems(); + if (!data.length) { + return null; + } + const id = data[data.length - 1]?.id; + if (!id) { + return null; + } + return { params: { after: id } }; + } +} +exports.CursorPage = CursorPage; +//# sourceMappingURL=pagination.js.map \ No newline at end of file diff --git a/node_modules/openai/pagination.js.map b/node_modules/openai/pagination.js.map new file mode 100644 index 0000000..dbc1787 --- /dev/null +++ b/node_modules/openai/pagination.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pagination.js","sourceRoot":"","sources":["src/pagination.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,oCAA0F;AAQ1F;;GAEG;AACH,MAAa,IAAW,SAAQ,mBAAkB;IAKhD,YAAY,MAAiB,EAAE,QAAkB,EAAE,IAAwB,EAAE,OAA4B;QACvG,KAAK,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QAEvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;QAC5B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;IAC5B,CAAC;IAED,iBAAiB;QACf,OAAO,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;IACzB,CAAC;IAED,kDAAkD;IAClD;;;OAGG;IACH,cAAc;QACZ,OAAO,IAAI,CAAC;IACd,CAAC;IAED,YAAY;QACV,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AA5BD,oBA4BC;AAYD,MAAa,UACX,SAAQ,mBAAkB;IAK1B,YACE,MAAiB,EACjB,QAAkB,EAClB,IAA8B,EAC9B,OAA4B;QAE5B,KAAK,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QAEvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;IAC9B,CAAC;IAED,iBAAiB;QACf,OAAO,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;IACzB,CAAC;IAED,kDAAkD;IAClD,cAAc;QACZ,MAAM,IAAI,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QACjC,IAAI,CAAC,IAAI;YAAE,OAAO,IAAI,CAAC;QACvB,IAAI,QAAQ,IAAI,IAAI;YAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QACzC,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;QACzD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM;YAAE,OAAO,IAAI,CAAC;QAC7C,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,YAAY;QACV,MAAM,IAAI,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;QACtC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,OAAO,IAAI,CAAC;SACb;QAED,MAAM,EAAE,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC;QACrC,IAAI,CAAC,EAAE,EAAE;YACP,OAAO,IAAI,CAAC;SACb;QAED,OAAO,EAAE,MAAM,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE,EAAE,CAAC;IACnC,CAAC;CACF;AA5CD,gCA4CC"} \ No newline at end of file diff --git a/node_modules/openai/pagination.mjs b/node_modules/openai/pagination.mjs new file mode 100644 index 0000000..bbacf0f --- /dev/null +++ b/node_modules/openai/pagination.mjs @@ -0,0 +1,59 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { AbstractPage } from "./core.mjs"; +/** + * Note: no pagination actually occurs yet, this is for forwards-compatibility. + */ +export class Page extends AbstractPage { + constructor(client, response, body, options) { + super(client, response, body, options); + this.data = body.data || []; + this.object = body.object; + } + getPaginatedItems() { + return this.data ?? []; + } + // @deprecated Please use `nextPageInfo()` instead + /** + * This page represents a response that isn't actually paginated at the API level + * so there will never be any next page params. + */ + nextPageParams() { + return null; + } + nextPageInfo() { + return null; + } +} +export class CursorPage extends AbstractPage { + constructor(client, response, body, options) { + super(client, response, body, options); + this.data = body.data || []; + } + getPaginatedItems() { + return this.data ?? []; + } + // @deprecated Please use `nextPageInfo()` instead + nextPageParams() { + const info = this.nextPageInfo(); + if (!info) + return null; + if ('params' in info) + return info.params; + const params = Object.fromEntries(info.url.searchParams); + if (!Object.keys(params).length) + return null; + return params; + } + nextPageInfo() { + const data = this.getPaginatedItems(); + if (!data.length) { + return null; + } + const id = data[data.length - 1]?.id; + if (!id) { + return null; + } + return { params: { after: id } }; + } +} +//# sourceMappingURL=pagination.mjs.map \ No newline at end of file diff --git a/node_modules/openai/pagination.mjs.map b/node_modules/openai/pagination.mjs.map new file mode 100644 index 0000000..352ce1b --- /dev/null +++ b/node_modules/openai/pagination.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"pagination.mjs","sourceRoot":"","sources":["src/pagination.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,YAAY,EAAsD;AAQ3E;;GAEG;AACH,MAAM,OAAO,IAAW,SAAQ,YAAkB;IAKhD,YAAY,MAAiB,EAAE,QAAkB,EAAE,IAAwB,EAAE,OAA4B;QACvG,KAAK,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QAEvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;QAC5B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;IAC5B,CAAC;IAED,iBAAiB;QACf,OAAO,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;IACzB,CAAC;IAED,kDAAkD;IAClD;;;OAGG;IACH,cAAc;QACZ,OAAO,IAAI,CAAC;IACd,CAAC;IAED,YAAY;QACV,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AAYD,MAAM,OAAO,UACX,SAAQ,YAAkB;IAK1B,YACE,MAAiB,EACjB,QAAkB,EAClB,IAA8B,EAC9B,OAA4B;QAE5B,KAAK,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QAEvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;IAC9B,CAAC;IAED,iBAAiB;QACf,OAAO,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;IACzB,CAAC;IAED,kDAAkD;IAClD,cAAc;QACZ,MAAM,IAAI,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QACjC,IAAI,CAAC,IAAI;YAAE,OAAO,IAAI,CAAC;QACvB,IAAI,QAAQ,IAAI,IAAI;YAAE,OAAO,IAAI,CAAC,MAAM,CAAC;QACzC,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;QACzD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM;YAAE,OAAO,IAAI,CAAC;QAC7C,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,YAAY;QACV,MAAM,IAAI,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;QACtC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,OAAO,IAAI,CAAC;SACb;QAED,MAAM,EAAE,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC;QACrC,IAAI,CAAC,EAAE,EAAE;YACP,OAAO,IAAI,CAAC;SACb;QAED,OAAO,EAAE,MAAM,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE,EAAE,CAAC;IACnC,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resource.d.ts b/node_modules/openai/resource.d.ts new file mode 100644 index 0000000..c20cf10 --- /dev/null +++ b/node_modules/openai/resource.d.ts @@ -0,0 +1,6 @@ +import type { OpenAI } from "./index.js"; +export declare class APIResource { + protected _client: OpenAI; + constructor(client: OpenAI); +} +//# sourceMappingURL=resource.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resource.d.ts.map b/node_modules/openai/resource.d.ts.map new file mode 100644 index 0000000..7441ff7 --- /dev/null +++ b/node_modules/openai/resource.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"resource.d.ts","sourceRoot":"","sources":["src/resource.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AAEtC,qBAAa,WAAW;IACtB,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC;gBAEd,MAAM,EAAE,MAAM;CAG3B"} \ No newline at end of file diff --git a/node_modules/openai/resource.js b/node_modules/openai/resource.js new file mode 100644 index 0000000..edccb48 --- /dev/null +++ b/node_modules/openai/resource.js @@ -0,0 +1,11 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.APIResource = void 0; +class APIResource { + constructor(client) { + this._client = client; + } +} +exports.APIResource = APIResource; +//# sourceMappingURL=resource.js.map \ No newline at end of file diff --git a/node_modules/openai/resource.js.map b/node_modules/openai/resource.js.map new file mode 100644 index 0000000..537778d --- /dev/null +++ b/node_modules/openai/resource.js.map @@ -0,0 +1 @@ +{"version":3,"file":"resource.js","sourceRoot":"","sources":["src/resource.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAItF,MAAa,WAAW;IAGtB,YAAY,MAAc;QACxB,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;IACxB,CAAC;CACF;AAND,kCAMC"} \ No newline at end of file diff --git a/node_modules/openai/resource.mjs b/node_modules/openai/resource.mjs new file mode 100644 index 0000000..98ee0dc --- /dev/null +++ b/node_modules/openai/resource.mjs @@ -0,0 +1,7 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export class APIResource { + constructor(client) { + this._client = client; + } +} +//# sourceMappingURL=resource.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resource.mjs.map b/node_modules/openai/resource.mjs.map new file mode 100644 index 0000000..05061fc --- /dev/null +++ b/node_modules/openai/resource.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"resource.mjs","sourceRoot":"","sources":["src/resource.ts"],"names":[],"mappings":"AAAA,sFAAsF;AAItF,MAAM,OAAO,WAAW;IAGtB,YAAY,MAAc;QACxB,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;IACxB,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/audio.d.ts b/node_modules/openai/resources/audio/audio.d.ts new file mode 100644 index 0000000..9726830 --- /dev/null +++ b/node_modules/openai/resources/audio/audio.d.ts @@ -0,0 +1,25 @@ +import { APIResource } from "../../resource.js"; +import * as SpeechAPI from "./speech.js"; +import { Speech, SpeechCreateParams, SpeechModel } from "./speech.js"; +import * as TranscriptionsAPI from "./transcriptions.js"; +import { Transcription, TranscriptionCreateParams, TranscriptionCreateResponse, TranscriptionSegment, TranscriptionVerbose, TranscriptionWord, Transcriptions } from "./transcriptions.js"; +import * as TranslationsAPI from "./translations.js"; +import { Translation, TranslationCreateParams, TranslationCreateResponse, TranslationVerbose, Translations } from "./translations.js"; +export declare class Audio extends APIResource { + transcriptions: TranscriptionsAPI.Transcriptions; + translations: TranslationsAPI.Translations; + speech: SpeechAPI.Speech; +} +export type AudioModel = 'whisper-1'; +/** + * The format of the output, in one of these options: `json`, `text`, `srt`, + * `verbose_json`, or `vtt`. + */ +export type AudioResponseFormat = 'json' | 'text' | 'srt' | 'verbose_json' | 'vtt'; +export declare namespace Audio { + export { type AudioModel as AudioModel, type AudioResponseFormat as AudioResponseFormat }; + export { Transcriptions as Transcriptions, type Transcription as Transcription, type TranscriptionSegment as TranscriptionSegment, type TranscriptionVerbose as TranscriptionVerbose, type TranscriptionWord as TranscriptionWord, type TranscriptionCreateResponse as TranscriptionCreateResponse, type TranscriptionCreateParams as TranscriptionCreateParams, }; + export { Translations as Translations, type Translation as Translation, type TranslationVerbose as TranslationVerbose, type TranslationCreateResponse as TranslationCreateResponse, type TranslationCreateParams as TranslationCreateParams, }; + export { Speech as Speech, type SpeechModel as SpeechModel, type SpeechCreateParams as SpeechCreateParams }; +} +//# sourceMappingURL=audio.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/audio.d.ts.map b/node_modules/openai/resources/audio/audio.d.ts.map new file mode 100644 index 0000000..c2cd68a --- /dev/null +++ b/node_modules/openai/resources/audio/audio.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"audio.d.ts","sourceRoot":"","sources":["../../src/resources/audio/audio.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,SAAS,MAAM,UAAU,CAAC;AACtC,OAAO,EAAE,MAAM,EAAE,kBAAkB,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AACnE,OAAO,KAAK,iBAAiB,MAAM,kBAAkB,CAAC;AACtD,OAAO,EACL,aAAa,EACb,yBAAyB,EACzB,2BAA2B,EAC3B,oBAAoB,EACpB,oBAAoB,EACpB,iBAAiB,EACjB,cAAc,EACf,MAAM,kBAAkB,CAAC;AAC1B,OAAO,KAAK,eAAe,MAAM,gBAAgB,CAAC;AAClD,OAAO,EACL,WAAW,EACX,uBAAuB,EACvB,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,EACb,MAAM,gBAAgB,CAAC;AAExB,qBAAa,KAAM,SAAQ,WAAW;IACpC,cAAc,EAAE,iBAAiB,CAAC,cAAc,CAAsD;IACtG,YAAY,EAAE,eAAe,CAAC,YAAY,CAAkD;IAC5F,MAAM,EAAE,SAAS,CAAC,MAAM,CAAsC;CAC/D;AAED,MAAM,MAAM,UAAU,GAAG,WAAW,CAAC;AAErC;;;GAGG;AACH,MAAM,MAAM,mBAAmB,GAAG,MAAM,GAAG,MAAM,GAAG,KAAK,GAAG,cAAc,GAAG,KAAK,CAAC;AAMnF,MAAM,CAAC,OAAO,WAAW,KAAK,CAAC;IAC7B,OAAO,EAAE,KAAK,UAAU,IAAI,UAAU,EAAE,KAAK,mBAAmB,IAAI,mBAAmB,EAAE,CAAC;IAE1F,OAAO,EACL,cAAc,IAAI,cAAc,EAChC,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,yBAAyB,IAAI,yBAAyB,GAC5D,CAAC;IAEF,OAAO,EACL,YAAY,IAAI,YAAY,EAC5B,KAAK,WAAW,IAAI,WAAW,EAC/B,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,uBAAuB,IAAI,uBAAuB,GACxD,CAAC;IAEF,OAAO,EAAE,MAAM,IAAI,MAAM,EAAE,KAAK,WAAW,IAAI,WAAW,EAAE,KAAK,kBAAkB,IAAI,kBAAkB,EAAE,CAAC;CAC7G"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/audio.js b/node_modules/openai/resources/audio/audio.js new file mode 100644 index 0000000..d2ec090 --- /dev/null +++ b/node_modules/openai/resources/audio/audio.js @@ -0,0 +1,47 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Audio = void 0; +const resource_1 = require("../../resource.js"); +const SpeechAPI = __importStar(require("./speech.js")); +const speech_1 = require("./speech.js"); +const TranscriptionsAPI = __importStar(require("./transcriptions.js")); +const transcriptions_1 = require("./transcriptions.js"); +const TranslationsAPI = __importStar(require("./translations.js")); +const translations_1 = require("./translations.js"); +class Audio extends resource_1.APIResource { + constructor() { + super(...arguments); + this.transcriptions = new TranscriptionsAPI.Transcriptions(this._client); + this.translations = new TranslationsAPI.Translations(this._client); + this.speech = new SpeechAPI.Speech(this._client); + } +} +exports.Audio = Audio; +Audio.Transcriptions = transcriptions_1.Transcriptions; +Audio.Translations = translations_1.Translations; +Audio.Speech = speech_1.Speech; +//# sourceMappingURL=audio.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/audio.js.map b/node_modules/openai/resources/audio/audio.js.map new file mode 100644 index 0000000..9323ed7 --- /dev/null +++ b/node_modules/openai/resources/audio/audio.js.map @@ -0,0 +1 @@ +{"version":3,"file":"audio.js","sourceRoot":"","sources":["../../src/resources/audio/audio.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,gDAA6C;AAC7C,uDAAsC;AACtC,wCAAmE;AACnE,uEAAsD;AACtD,wDAQ0B;AAC1B,mEAAkD;AAClD,oDAMwB;AAExB,MAAa,KAAM,SAAQ,sBAAW;IAAtC;;QACE,mBAAc,GAAqC,IAAI,iBAAiB,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACtG,iBAAY,GAAiC,IAAI,eAAe,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAC5F,WAAM,GAAqB,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAChE,CAAC;CAAA;AAJD,sBAIC;AAUD,KAAK,CAAC,cAAc,GAAG,+BAAc,CAAC;AACtC,KAAK,CAAC,YAAY,GAAG,2BAAY,CAAC;AAClC,KAAK,CAAC,MAAM,GAAG,eAAM,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/audio.mjs b/node_modules/openai/resources/audio/audio.mjs new file mode 100644 index 0000000..dc8b8fc --- /dev/null +++ b/node_modules/openai/resources/audio/audio.mjs @@ -0,0 +1,20 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +import * as SpeechAPI from "./speech.mjs"; +import { Speech } from "./speech.mjs"; +import * as TranscriptionsAPI from "./transcriptions.mjs"; +import { Transcriptions, } from "./transcriptions.mjs"; +import * as TranslationsAPI from "./translations.mjs"; +import { Translations, } from "./translations.mjs"; +export class Audio extends APIResource { + constructor() { + super(...arguments); + this.transcriptions = new TranscriptionsAPI.Transcriptions(this._client); + this.translations = new TranslationsAPI.Translations(this._client); + this.speech = new SpeechAPI.Speech(this._client); + } +} +Audio.Transcriptions = Transcriptions; +Audio.Translations = Translations; +Audio.Speech = Speech; +//# sourceMappingURL=audio.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/audio.mjs.map b/node_modules/openai/resources/audio/audio.mjs.map new file mode 100644 index 0000000..c0f832c --- /dev/null +++ b/node_modules/openai/resources/audio/audio.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"audio.mjs","sourceRoot":"","sources":["../../src/resources/audio/audio.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,KAAK,SAAS;OACd,EAAE,MAAM,EAAmC;OAC3C,KAAK,iBAAiB;OACtB,EAOL,cAAc,GACf;OACM,KAAK,eAAe;OACpB,EAKL,YAAY,GACb;AAED,MAAM,OAAO,KAAM,SAAQ,WAAW;IAAtC;;QACE,mBAAc,GAAqC,IAAI,iBAAiB,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACtG,iBAAY,GAAiC,IAAI,eAAe,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAC5F,WAAM,GAAqB,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAChE,CAAC;CAAA;AAUD,KAAK,CAAC,cAAc,GAAG,cAAc,CAAC;AACtC,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC;AAClC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/index.d.ts b/node_modules/openai/resources/audio/index.d.ts new file mode 100644 index 0000000..82edc8b --- /dev/null +++ b/node_modules/openai/resources/audio/index.d.ts @@ -0,0 +1,5 @@ +export { Audio, type AudioModel, type AudioResponseFormat } from "./audio.js"; +export { Speech, type SpeechModel, type SpeechCreateParams } from "./speech.js"; +export { Transcriptions, type Transcription, type TranscriptionSegment, type TranscriptionVerbose, type TranscriptionWord, type TranscriptionCreateResponse, type TranscriptionCreateParams, } from "./transcriptions.js"; +export { Translations, type Translation, type TranslationVerbose, type TranslationCreateResponse, type TranslationCreateParams, } from "./translations.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/index.d.ts.map b/node_modules/openai/resources/audio/index.d.ts.map new file mode 100644 index 0000000..480fbc1 --- /dev/null +++ b/node_modules/openai/resources/audio/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/resources/audio/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,KAAK,EAAE,KAAK,UAAU,EAAE,KAAK,mBAAmB,EAAE,MAAM,SAAS,CAAC;AAC3E,OAAO,EAAE,MAAM,EAAE,KAAK,WAAW,EAAE,KAAK,kBAAkB,EAAE,MAAM,UAAU,CAAC;AAC7E,OAAO,EACL,cAAc,EACd,KAAK,aAAa,EAClB,KAAK,oBAAoB,EACzB,KAAK,oBAAoB,EACzB,KAAK,iBAAiB,EACtB,KAAK,2BAA2B,EAChC,KAAK,yBAAyB,GAC/B,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EACL,YAAY,EACZ,KAAK,WAAW,EAChB,KAAK,kBAAkB,EACvB,KAAK,yBAAyB,EAC9B,KAAK,uBAAuB,GAC7B,MAAM,gBAAgB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/index.js b/node_modules/openai/resources/audio/index.js new file mode 100644 index 0000000..1e38c2f --- /dev/null +++ b/node_modules/openai/resources/audio/index.js @@ -0,0 +1,13 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Translations = exports.Transcriptions = exports.Speech = exports.Audio = void 0; +var audio_1 = require("./audio.js"); +Object.defineProperty(exports, "Audio", { enumerable: true, get: function () { return audio_1.Audio; } }); +var speech_1 = require("./speech.js"); +Object.defineProperty(exports, "Speech", { enumerable: true, get: function () { return speech_1.Speech; } }); +var transcriptions_1 = require("./transcriptions.js"); +Object.defineProperty(exports, "Transcriptions", { enumerable: true, get: function () { return transcriptions_1.Transcriptions; } }); +var translations_1 = require("./translations.js"); +Object.defineProperty(exports, "Translations", { enumerable: true, get: function () { return translations_1.Translations; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/index.js.map b/node_modules/openai/resources/audio/index.js.map new file mode 100644 index 0000000..78ed806 --- /dev/null +++ b/node_modules/openai/resources/audio/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/resources/audio/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,oCAA2E;AAAlE,8FAAA,KAAK,OAAA;AACd,sCAA6E;AAApE,gGAAA,MAAM,OAAA;AACf,sDAQ0B;AAPxB,gHAAA,cAAc,OAAA;AAQhB,kDAMwB;AALtB,4GAAA,YAAY,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/index.mjs b/node_modules/openai/resources/audio/index.mjs new file mode 100644 index 0000000..1ecb5e4 --- /dev/null +++ b/node_modules/openai/resources/audio/index.mjs @@ -0,0 +1,6 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { Audio } from "./audio.mjs"; +export { Speech } from "./speech.mjs"; +export { Transcriptions, } from "./transcriptions.mjs"; +export { Translations, } from "./translations.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/index.mjs.map b/node_modules/openai/resources/audio/index.mjs.map new file mode 100644 index 0000000..98cb7f2 --- /dev/null +++ b/node_modules/openai/resources/audio/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../src/resources/audio/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,KAAK,EAA6C;OACpD,EAAE,MAAM,EAA6C;OACrD,EACL,cAAc,GAOf;OACM,EACL,YAAY,GAKb"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/speech.d.ts b/node_modules/openai/resources/audio/speech.d.ts new file mode 100644 index 0000000..2a7e364 --- /dev/null +++ b/node_modules/openai/resources/audio/speech.d.ts @@ -0,0 +1,42 @@ +import { APIResource } from "../../resource.js"; +import * as Core from "../../core.js"; +import { type Response } from "../../_shims/index.js"; +export declare class Speech extends APIResource { + /** + * Generates audio from the input text. + */ + create(body: SpeechCreateParams, options?: Core.RequestOptions): Core.APIPromise; +} +export type SpeechModel = 'tts-1' | 'tts-1-hd'; +export interface SpeechCreateParams { + /** + * The text to generate audio for. The maximum length is 4096 characters. + */ + input: string; + /** + * One of the available [TTS models](https://platform.openai.com/docs/models#tts): + * `tts-1` or `tts-1-hd` + */ + model: (string & {}) | SpeechModel; + /** + * The voice to use when generating the audio. Supported voices are `alloy`, + * `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are + * available in the + * [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). + */ + voice: 'alloy' | 'echo' | 'fable' | 'onyx' | 'nova' | 'shimmer'; + /** + * The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, + * `wav`, and `pcm`. + */ + response_format?: 'mp3' | 'opus' | 'aac' | 'flac' | 'wav' | 'pcm'; + /** + * The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is + * the default. + */ + speed?: number; +} +export declare namespace Speech { + export { type SpeechModel as SpeechModel, type SpeechCreateParams as SpeechCreateParams }; +} +//# sourceMappingURL=speech.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/speech.d.ts.map b/node_modules/openai/resources/audio/speech.d.ts.map new file mode 100644 index 0000000..d9a09f4 --- /dev/null +++ b/node_modules/openai/resources/audio/speech.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"speech.d.ts","sourceRoot":"","sources":["../../src/resources/audio/speech.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,IAAI,MAAM,YAAY,CAAC;AACnC,OAAO,EAAE,KAAK,QAAQ,EAAE,MAAM,oBAAoB,CAAC;AAEnD,qBAAa,MAAO,SAAQ,WAAW;IACrC;;OAEG;IACH,MAAM,CAAC,IAAI,EAAE,kBAAkB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC;CAG3F;AAED,MAAM,MAAM,WAAW,GAAG,OAAO,GAAG,UAAU,CAAC;AAE/C,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;;OAGG;IACH,KAAK,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,WAAW,CAAC;IAEnC;;;;;OAKG;IACH,KAAK,EAAE,OAAO,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;IAEhE;;;OAGG;IACH,eAAe,CAAC,EAAE,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,KAAK,CAAC;IAElE;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,CAAC,OAAO,WAAW,MAAM,CAAC;IAC9B,OAAO,EAAE,KAAK,WAAW,IAAI,WAAW,EAAE,KAAK,kBAAkB,IAAI,kBAAkB,EAAE,CAAC;CAC3F"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/speech.js b/node_modules/openai/resources/audio/speech.js new file mode 100644 index 0000000..48cada9 --- /dev/null +++ b/node_modules/openai/resources/audio/speech.js @@ -0,0 +1,15 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Speech = void 0; +const resource_1 = require("../../resource.js"); +class Speech extends resource_1.APIResource { + /** + * Generates audio from the input text. + */ + create(body, options) { + return this._client.post('/audio/speech', { body, ...options, __binaryResponse: true }); + } +} +exports.Speech = Speech; +//# sourceMappingURL=speech.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/speech.js.map b/node_modules/openai/resources/audio/speech.js.map new file mode 100644 index 0000000..e775cd4 --- /dev/null +++ b/node_modules/openai/resources/audio/speech.js.map @@ -0,0 +1 @@ +{"version":3,"file":"speech.js","sourceRoot":"","sources":["../../src/resources/audio/speech.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,gDAA6C;AAI7C,MAAa,MAAO,SAAQ,sBAAW;IACrC;;OAEG;IACH,MAAM,CAAC,IAAwB,EAAE,OAA6B;QAC5D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,eAAe,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,gBAAgB,EAAE,IAAI,EAAE,CAAC,CAAC;IAC1F,CAAC;CACF;AAPD,wBAOC"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/speech.mjs b/node_modules/openai/resources/audio/speech.mjs new file mode 100644 index 0000000..5410bc0 --- /dev/null +++ b/node_modules/openai/resources/audio/speech.mjs @@ -0,0 +1,11 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +export class Speech extends APIResource { + /** + * Generates audio from the input text. + */ + create(body, options) { + return this._client.post('/audio/speech', { body, ...options, __binaryResponse: true }); + } +} +//# sourceMappingURL=speech.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/speech.mjs.map b/node_modules/openai/resources/audio/speech.mjs.map new file mode 100644 index 0000000..54841f3 --- /dev/null +++ b/node_modules/openai/resources/audio/speech.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"speech.mjs","sourceRoot":"","sources":["../../src/resources/audio/speech.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;AAItB,MAAM,OAAO,MAAO,SAAQ,WAAW;IACrC;;OAEG;IACH,MAAM,CAAC,IAAwB,EAAE,OAA6B;QAC5D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,eAAe,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,gBAAgB,EAAE,IAAI,EAAE,CAAC,CAAC;IAC1F,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/transcriptions.d.ts b/node_modules/openai/resources/audio/transcriptions.d.ts new file mode 100644 index 0000000..73f10e9 --- /dev/null +++ b/node_modules/openai/resources/audio/transcriptions.d.ts @@ -0,0 +1,162 @@ +import { APIResource } from "../../resource.js"; +import * as Core from "../../core.js"; +import * as AudioAPI from "./audio.js"; +export declare class Transcriptions extends APIResource { + /** + * Transcribes audio into the input language. + */ + create(body: TranscriptionCreateParams<'json' | undefined>, options?: Core.RequestOptions): Core.APIPromise; + create(body: TranscriptionCreateParams<'verbose_json'>, options?: Core.RequestOptions): Core.APIPromise; + create(body: TranscriptionCreateParams<'srt' | 'vtt' | 'text'>, options?: Core.RequestOptions): Core.APIPromise; + create(body: TranscriptionCreateParams, options?: Core.RequestOptions): Core.APIPromise; +} +/** + * Represents a transcription response returned by model, based on the provided + * input. + */ +export interface Transcription { + /** + * The transcribed text. + */ + text: string; +} +export interface TranscriptionSegment { + /** + * Unique identifier of the segment. + */ + id: number; + /** + * Average logprob of the segment. If the value is lower than -1, consider the + * logprobs failed. + */ + avg_logprob: number; + /** + * Compression ratio of the segment. If the value is greater than 2.4, consider the + * compression failed. + */ + compression_ratio: number; + /** + * End time of the segment in seconds. + */ + end: number; + /** + * Probability of no speech in the segment. If the value is higher than 1.0 and the + * `avg_logprob` is below -1, consider this segment silent. + */ + no_speech_prob: number; + /** + * Seek offset of the segment. + */ + seek: number; + /** + * Start time of the segment in seconds. + */ + start: number; + /** + * Temperature parameter used for generating the segment. + */ + temperature: number; + /** + * Text content of the segment. + */ + text: string; + /** + * Array of token IDs for the text content. + */ + tokens: Array; +} +/** + * Represents a verbose json transcription response returned by model, based on the + * provided input. + */ +export interface TranscriptionVerbose { + /** + * The duration of the input audio. + */ + duration: string; + /** + * The language of the input audio. + */ + language: string; + /** + * The transcribed text. + */ + text: string; + /** + * Segments of the transcribed text and their corresponding details. + */ + segments?: Array; + /** + * Extracted words and their corresponding timestamps. + */ + words?: Array; +} +export interface TranscriptionWord { + /** + * End time of the word in seconds. + */ + end: number; + /** + * Start time of the word in seconds. + */ + start: number; + /** + * The text content of the word. + */ + word: string; +} +/** + * Represents a transcription response returned by model, based on the provided + * input. + */ +export type TranscriptionCreateResponse = Transcription | TranscriptionVerbose; +export interface TranscriptionCreateParams { + /** + * The audio file object (not file name) to transcribe, in one of these formats: + * flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + */ + file: Core.Uploadable; + /** + * ID of the model to use. Only `whisper-1` (which is powered by our open source + * Whisper V2 model) is currently available. + */ + model: (string & {}) | AudioAPI.AudioModel; + /** + * The language of the input audio. Supplying the input language in + * [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) format will + * improve accuracy and latency. + */ + language?: string; + /** + * An optional text to guide the model's style or continue a previous audio + * segment. The + * [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + * should match the audio language. + */ + prompt?: string; + /** + * The format of the output, in one of these options: `json`, `text`, `srt`, + * `verbose_json`, or `vtt`. + */ + response_format?: ResponseFormat; + /** + * The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + * output more random, while lower values like 0.2 will make it more focused and + * deterministic. If set to 0, the model will use + * [log probability](https://en.wikipedia.org/wiki/Log_probability) to + * automatically increase the temperature until certain thresholds are hit. + */ + temperature?: number; + /** + * The timestamp granularities to populate for this transcription. + * `response_format` must be set `verbose_json` to use timestamp granularities. + * Either or both of these options are supported: `word`, or `segment`. Note: There + * is no additional latency for segment timestamps, but generating word timestamps + * incurs additional latency. + */ + timestamp_granularities?: Array<'word' | 'segment'>; +} +export declare namespace Transcriptions { + export { type Transcription as Transcription, type TranscriptionSegment as TranscriptionSegment, type TranscriptionVerbose as TranscriptionVerbose, type TranscriptionWord as TranscriptionWord, type TranscriptionCreateResponse as TranscriptionCreateResponse, type TranscriptionCreateParams as TranscriptionCreateParams, }; +} +//# sourceMappingURL=transcriptions.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/transcriptions.d.ts.map b/node_modules/openai/resources/audio/transcriptions.d.ts.map new file mode 100644 index 0000000..530e96c --- /dev/null +++ b/node_modules/openai/resources/audio/transcriptions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"transcriptions.d.ts","sourceRoot":"","sources":["../../src/resources/audio/transcriptions.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,IAAI,MAAM,YAAY,CAAC;AACnC,OAAO,KAAK,QAAQ,MAAM,SAAS,CAAC;AAEpC,qBAAa,cAAe,SAAQ,WAAW;IAC7C;;OAEG;IACH,MAAM,CACJ,IAAI,EAAE,yBAAyB,CAAC,MAAM,GAAG,SAAS,CAAC,EACnD,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC;IACjC,MAAM,CACJ,IAAI,EAAE,yBAAyB,CAAC,cAAc,CAAC,EAC/C,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,oBAAoB,CAAC;IACxC,MAAM,CACJ,IAAI,EAAE,yBAAyB,CAAC,KAAK,GAAG,KAAK,GAAG,MAAM,CAAC,EACvD,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAC1B,MAAM,CAAC,IAAI,EAAE,yBAAyB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC;CAOvG;AAED;;;GAGG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd;AAED,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;;OAGG;IACH,WAAW,EAAE,MAAM,CAAC;IAEpB;;;OAGG;IACH,iBAAiB,EAAE,MAAM,CAAC;IAE1B;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IAEZ;;;OAGG;IACH,cAAc,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,MAAM,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;CACvB;AAED;;;GAGG;AACH,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,QAAQ,CAAC,EAAE,KAAK,CAAC,oBAAoB,CAAC,CAAC;IAEvC;;OAEG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC,iBAAiB,CAAC,CAAC;CAClC;AAED,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IAEZ;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;;GAGG;AACH,MAAM,MAAM,2BAA2B,GAAG,aAAa,GAAG,oBAAoB,CAAC;AAE/E,MAAM,WAAW,yBAAyB,CACxC,cAAc,SAAS,QAAQ,CAAC,mBAAmB,GAAG,SAAS,GAAG,QAAQ,CAAC,mBAAmB,GAAG,SAAS;IAE1G;;;OAGG;IACH,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC;IAEtB;;;OAGG;IACH,KAAK,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC;IAE3C;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAElB;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,eAAe,CAAC,EAAE,cAAc,CAAC;IAEjC;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;OAMG;IACH,uBAAuB,CAAC,EAAE,KAAK,CAAC,MAAM,GAAG,SAAS,CAAC,CAAC;CACrD;AAED,MAAM,CAAC,OAAO,WAAW,cAAc,CAAC;IACtC,OAAO,EACL,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,yBAAyB,IAAI,yBAAyB,GAC5D,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/transcriptions.js b/node_modules/openai/resources/audio/transcriptions.js new file mode 100644 index 0000000..e414b78 --- /dev/null +++ b/node_modules/openai/resources/audio/transcriptions.js @@ -0,0 +1,36 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Transcriptions = void 0; +const resource_1 = require("../../resource.js"); +const Core = __importStar(require("../../core.js")); +class Transcriptions extends resource_1.APIResource { + create(body, options) { + return this._client.post('/audio/transcriptions', Core.multipartFormRequestOptions({ body, ...options })); + } +} +exports.Transcriptions = Transcriptions; +//# sourceMappingURL=transcriptions.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/transcriptions.js.map b/node_modules/openai/resources/audio/transcriptions.js.map new file mode 100644 index 0000000..87405c4 --- /dev/null +++ b/node_modules/openai/resources/audio/transcriptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"transcriptions.js","sourceRoot":"","sources":["../../src/resources/audio/transcriptions.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,gDAA6C;AAC7C,oDAAmC;AAGnC,MAAa,cAAe,SAAQ,sBAAW;IAiB7C,MAAM,CACJ,IAA+B,EAC/B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,uBAAuB,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IAC5G,CAAC;CACF;AAvBD,wCAuBC"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/transcriptions.mjs b/node_modules/openai/resources/audio/transcriptions.mjs new file mode 100644 index 0000000..3e0ac49 --- /dev/null +++ b/node_modules/openai/resources/audio/transcriptions.mjs @@ -0,0 +1,9 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +import * as Core from "../../core.mjs"; +export class Transcriptions extends APIResource { + create(body, options) { + return this._client.post('/audio/transcriptions', Core.multipartFormRequestOptions({ body, ...options })); + } +} +//# sourceMappingURL=transcriptions.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/transcriptions.mjs.map b/node_modules/openai/resources/audio/transcriptions.mjs.map new file mode 100644 index 0000000..805ec99 --- /dev/null +++ b/node_modules/openai/resources/audio/transcriptions.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"transcriptions.mjs","sourceRoot":"","sources":["../../src/resources/audio/transcriptions.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,KAAK,IAAI;AAGhB,MAAM,OAAO,cAAe,SAAQ,WAAW;IAiB7C,MAAM,CACJ,IAA+B,EAC/B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,uBAAuB,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IAC5G,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/translations.d.ts b/node_modules/openai/resources/audio/translations.d.ts new file mode 100644 index 0000000..fa4de35 --- /dev/null +++ b/node_modules/openai/resources/audio/translations.d.ts @@ -0,0 +1,71 @@ +import { APIResource } from "../../resource.js"; +import * as Core from "../../core.js"; +import * as AudioAPI from "./audio.js"; +import * as TranscriptionsAPI from "./transcriptions.js"; +export declare class Translations extends APIResource { + /** + * Translates audio into English. + */ + create(body: TranslationCreateParams<'json' | undefined>, options?: Core.RequestOptions): Core.APIPromise; + create(body: TranslationCreateParams<'verbose_json'>, options?: Core.RequestOptions): Core.APIPromise; + create(body: TranslationCreateParams<'text' | 'srt' | 'vtt'>, options?: Core.RequestOptions): Core.APIPromise; + create(body: TranslationCreateParams, options?: Core.RequestOptions): Core.APIPromise; +} +export interface Translation { + text: string; +} +export interface TranslationVerbose { + /** + * The duration of the input audio. + */ + duration: string; + /** + * The language of the output translation (always `english`). + */ + language: string; + /** + * The translated text. + */ + text: string; + /** + * Segments of the translated text and their corresponding details. + */ + segments?: Array; +} +export type TranslationCreateResponse = Translation | TranslationVerbose; +export interface TranslationCreateParams { + /** + * The audio file object (not file name) translate, in one of these formats: flac, + * mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + */ + file: Core.Uploadable; + /** + * ID of the model to use. Only `whisper-1` (which is powered by our open source + * Whisper V2 model) is currently available. + */ + model: (string & {}) | AudioAPI.AudioModel; + /** + * An optional text to guide the model's style or continue a previous audio + * segment. The + * [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + * should be in English. + */ + prompt?: string; + /** + * The format of the output, in one of these options: `json`, `text`, `srt`, + * `verbose_json`, or `vtt`. + */ + response_format?: ResponseFormat; + /** + * The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + * output more random, while lower values like 0.2 will make it more focused and + * deterministic. If set to 0, the model will use + * [log probability](https://en.wikipedia.org/wiki/Log_probability) to + * automatically increase the temperature until certain thresholds are hit. + */ + temperature?: number; +} +export declare namespace Translations { + export { type Translation as Translation, type TranslationVerbose as TranslationVerbose, type TranslationCreateResponse as TranslationCreateResponse, type TranslationCreateParams as TranslationCreateParams, }; +} +//# sourceMappingURL=translations.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/translations.d.ts.map b/node_modules/openai/resources/audio/translations.d.ts.map new file mode 100644 index 0000000..7666736 --- /dev/null +++ b/node_modules/openai/resources/audio/translations.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"translations.d.ts","sourceRoot":"","sources":["../../src/resources/audio/translations.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,IAAI,MAAM,YAAY,CAAC;AACnC,OAAO,KAAK,QAAQ,MAAM,SAAS,CAAC;AACpC,OAAO,KAAK,iBAAiB,MAAM,kBAAkB,CAAC;AAEtD,qBAAa,YAAa,SAAQ,WAAW;IAC3C;;OAEG;IACH,MAAM,CACJ,IAAI,EAAE,uBAAuB,CAAC,MAAM,GAAG,SAAS,CAAC,EACjD,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC;IAC/B,MAAM,CACJ,IAAI,EAAE,uBAAuB,CAAC,cAAc,CAAC,EAC7C,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,kBAAkB,CAAC;IACtC,MAAM,CACJ,IAAI,EAAE,uBAAuB,CAAC,MAAM,GAAG,KAAK,GAAG,KAAK,CAAC,EACrD,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAC1B,MAAM,CAAC,IAAI,EAAE,uBAAuB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC;CAOnG;AAED,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC;CACd;AAED,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,QAAQ,CAAC,EAAE,KAAK,CAAC,iBAAiB,CAAC,oBAAoB,CAAC,CAAC;CAC1D;AAED,MAAM,MAAM,yBAAyB,GAAG,WAAW,GAAG,kBAAkB,CAAC;AAEzE,MAAM,WAAW,uBAAuB,CACtC,cAAc,SAAS,QAAQ,CAAC,mBAAmB,GAAG,SAAS,GAAG,QAAQ,CAAC,mBAAmB,GAAG,SAAS;IAE1G;;;OAGG;IACH,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC;IAEtB;;;OAGG;IACH,KAAK,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC;IAE3C;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,eAAe,CAAC,EAAE,cAAc,CAAC;IAEjC;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,CAAC,OAAO,WAAW,YAAY,CAAC;IACpC,OAAO,EACL,KAAK,WAAW,IAAI,WAAW,EAC/B,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,uBAAuB,IAAI,uBAAuB,GACxD,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/translations.js b/node_modules/openai/resources/audio/translations.js new file mode 100644 index 0000000..935e5f7 --- /dev/null +++ b/node_modules/openai/resources/audio/translations.js @@ -0,0 +1,36 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Translations = void 0; +const resource_1 = require("../../resource.js"); +const Core = __importStar(require("../../core.js")); +class Translations extends resource_1.APIResource { + create(body, options) { + return this._client.post('/audio/translations', Core.multipartFormRequestOptions({ body, ...options })); + } +} +exports.Translations = Translations; +//# sourceMappingURL=translations.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/translations.js.map b/node_modules/openai/resources/audio/translations.js.map new file mode 100644 index 0000000..49d8734 --- /dev/null +++ b/node_modules/openai/resources/audio/translations.js.map @@ -0,0 +1 @@ +{"version":3,"file":"translations.js","sourceRoot":"","sources":["../../src/resources/audio/translations.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,gDAA6C;AAC7C,oDAAmC;AAInC,MAAa,YAAa,SAAQ,sBAAW;IAiB3C,MAAM,CACJ,IAA6B,EAC7B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,qBAAqB,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IAC1G,CAAC;CACF;AAvBD,oCAuBC"} \ No newline at end of file diff --git a/node_modules/openai/resources/audio/translations.mjs b/node_modules/openai/resources/audio/translations.mjs new file mode 100644 index 0000000..7c6c6a1 --- /dev/null +++ b/node_modules/openai/resources/audio/translations.mjs @@ -0,0 +1,9 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +import * as Core from "../../core.mjs"; +export class Translations extends APIResource { + create(body, options) { + return this._client.post('/audio/translations', Core.multipartFormRequestOptions({ body, ...options })); + } +} +//# sourceMappingURL=translations.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/audio/translations.mjs.map b/node_modules/openai/resources/audio/translations.mjs.map new file mode 100644 index 0000000..23430a5 --- /dev/null +++ b/node_modules/openai/resources/audio/translations.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"translations.mjs","sourceRoot":"","sources":["../../src/resources/audio/translations.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,KAAK,IAAI;AAIhB,MAAM,OAAO,YAAa,SAAQ,WAAW;IAiB3C,MAAM,CACJ,IAA6B,EAC7B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,qBAAqB,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IAC1G,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/batches.d.ts b/node_modules/openai/resources/batches.d.ts new file mode 100644 index 0000000..15e9803 --- /dev/null +++ b/node_modules/openai/resources/batches.d.ts @@ -0,0 +1,186 @@ +import { APIResource } from "../resource.js"; +import * as Core from "../core.js"; +import * as BatchesAPI from "./batches.js"; +import { CursorPage, type CursorPageParams } from "../pagination.js"; +export declare class Batches extends APIResource { + /** + * Creates and executes a batch from an uploaded file of requests + */ + create(body: BatchCreateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Retrieves a batch. + */ + retrieve(batchId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * List your organization's batches. + */ + list(query?: BatchListParams, options?: Core.RequestOptions): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + /** + * Cancels an in-progress batch. The batch will be in status `cancelling` for up to + * 10 minutes, before changing to `cancelled`, where it will have partial results + * (if any) available in the output file. + */ + cancel(batchId: string, options?: Core.RequestOptions): Core.APIPromise; +} +export declare class BatchesPage extends CursorPage { +} +export interface Batch { + id: string; + /** + * The time frame within which the batch should be processed. + */ + completion_window: string; + /** + * The Unix timestamp (in seconds) for when the batch was created. + */ + created_at: number; + /** + * The OpenAI API endpoint used by the batch. + */ + endpoint: string; + /** + * The ID of the input file for the batch. + */ + input_file_id: string; + /** + * The object type, which is always `batch`. + */ + object: 'batch'; + /** + * The current status of the batch. + */ + status: 'validating' | 'failed' | 'in_progress' | 'finalizing' | 'completed' | 'expired' | 'cancelling' | 'cancelled'; + /** + * The Unix timestamp (in seconds) for when the batch was cancelled. + */ + cancelled_at?: number; + /** + * The Unix timestamp (in seconds) for when the batch started cancelling. + */ + cancelling_at?: number; + /** + * The Unix timestamp (in seconds) for when the batch was completed. + */ + completed_at?: number; + /** + * The ID of the file containing the outputs of requests with errors. + */ + error_file_id?: string; + errors?: Batch.Errors; + /** + * The Unix timestamp (in seconds) for when the batch expired. + */ + expired_at?: number; + /** + * The Unix timestamp (in seconds) for when the batch will expire. + */ + expires_at?: number; + /** + * The Unix timestamp (in seconds) for when the batch failed. + */ + failed_at?: number; + /** + * The Unix timestamp (in seconds) for when the batch started finalizing. + */ + finalizing_at?: number; + /** + * The Unix timestamp (in seconds) for when the batch started processing. + */ + in_progress_at?: number; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The ID of the file containing the outputs of successfully executed requests. + */ + output_file_id?: string; + /** + * The request counts for different statuses within the batch. + */ + request_counts?: BatchRequestCounts; +} +export declare namespace Batch { + interface Errors { + data?: Array; + /** + * The object type, which is always `list`. + */ + object?: string; + } +} +export interface BatchError { + /** + * An error code identifying the error type. + */ + code?: string; + /** + * The line number of the input file where the error occurred, if applicable. + */ + line?: number | null; + /** + * A human-readable message providing more details about the error. + */ + message?: string; + /** + * The name of the parameter that caused the error, if applicable. + */ + param?: string | null; +} +/** + * The request counts for different statuses within the batch. + */ +export interface BatchRequestCounts { + /** + * Number of requests that have been completed successfully. + */ + completed: number; + /** + * Number of requests that have failed. + */ + failed: number; + /** + * Total number of requests in the batch. + */ + total: number; +} +export interface BatchCreateParams { + /** + * The time frame within which the batch should be processed. Currently only `24h` + * is supported. + */ + completion_window: '24h'; + /** + * The endpoint to be used for all requests in the batch. Currently + * `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. + * Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 + * embedding inputs across all requests in the batch. + */ + endpoint: '/v1/chat/completions' | '/v1/embeddings' | '/v1/completions'; + /** + * The ID of an uploaded file that contains requests for the new batch. + * + * See [upload file](https://platform.openai.com/docs/api-reference/files/create) + * for how to upload a file. + * + * Your input file must be formatted as a + * [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), + * and must be uploaded with the purpose `batch`. The file can contain up to 50,000 + * requests, and can be up to 200 MB in size. + */ + input_file_id: string; + /** + * Optional custom metadata for the batch. + */ + metadata?: Record | null; +} +export interface BatchListParams extends CursorPageParams { +} +export declare namespace Batches { + export { type Batch as Batch, type BatchError as BatchError, type BatchRequestCounts as BatchRequestCounts, BatchesPage as BatchesPage, type BatchCreateParams as BatchCreateParams, type BatchListParams as BatchListParams, }; +} +//# sourceMappingURL=batches.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/batches.d.ts.map b/node_modules/openai/resources/batches.d.ts.map new file mode 100644 index 0000000..ac73206 --- /dev/null +++ b/node_modules/openai/resources/batches.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"batches.d.ts","sourceRoot":"","sources":["../src/resources/batches.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAE1C,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAChC,OAAO,KAAK,UAAU,MAAM,WAAW,CAAC;AACxC,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,eAAe,CAAC;AAElE,qBAAa,OAAQ,SAAQ,WAAW;IACtC;;OAEG;IACH,MAAM,CAAC,IAAI,EAAE,iBAAiB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC;IAItF;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC;IAIhF;;OAEG;IACH,IAAI,CAAC,KAAK,CAAC,EAAE,eAAe,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,EAAE,KAAK,CAAC;IAClG,IAAI,CAAC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,EAAE,KAAK,CAAC;IAWzE;;;;OAIG;IACH,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC;CAG/E;AAED,qBAAa,WAAY,SAAQ,UAAU,CAAC,KAAK,CAAC;CAAG;AAErD,MAAM,WAAW,KAAK;IACpB,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,iBAAiB,EAAE,MAAM,CAAC;IAE1B;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IAEtB;;OAEG;IACH,MAAM,EAAE,OAAO,CAAC;IAEhB;;OAEG;IACH,MAAM,EACF,YAAY,GACZ,QAAQ,GACR,aAAa,GACb,YAAY,GACZ,WAAW,GACX,SAAS,GACT,YAAY,GACZ,WAAW,CAAC;IAEhB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,MAAM,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC;IAEtB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;CACrC;AAED,yBAAiB,KAAK,CAAC;IACrB,UAAiB,MAAM;QACrB,IAAI,CAAC,EAAE,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;QAEpC;;WAEG;QACH,MAAM,CAAC,EAAE,MAAM,CAAC;KACjB;CACF;AAED,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAErB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,iBAAiB;IAChC;;;OAGG;IACH,iBAAiB,EAAE,KAAK,CAAC;IAEzB;;;;;OAKG;IACH,QAAQ,EAAE,sBAAsB,GAAG,gBAAgB,GAAG,iBAAiB,CAAC;IAExE;;;;;;;;;;OAUG;IACH,aAAa,EAAE,MAAM,CAAC;IAEtB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,IAAI,CAAC;CAC1C;AAED,MAAM,WAAW,eAAgB,SAAQ,gBAAgB;CAAG;AAI5D,MAAM,CAAC,OAAO,WAAW,OAAO,CAAC;IAC/B,OAAO,EACL,KAAK,KAAK,IAAI,KAAK,EACnB,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,WAAW,IAAI,WAAW,EAC1B,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,eAAe,IAAI,eAAe,GACxC,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/batches.js b/node_modules/openai/resources/batches.js new file mode 100644 index 0000000..97cc9cc --- /dev/null +++ b/node_modules/openai/resources/batches.js @@ -0,0 +1,41 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BatchesPage = exports.Batches = void 0; +const resource_1 = require("../resource.js"); +const core_1 = require("../core.js"); +const pagination_1 = require("../pagination.js"); +class Batches extends resource_1.APIResource { + /** + * Creates and executes a batch from an uploaded file of requests + */ + create(body, options) { + return this._client.post('/batches', { body, ...options }); + } + /** + * Retrieves a batch. + */ + retrieve(batchId, options) { + return this._client.get(`/batches/${batchId}`, options); + } + list(query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/batches', BatchesPage, { query, ...options }); + } + /** + * Cancels an in-progress batch. The batch will be in status `cancelling` for up to + * 10 minutes, before changing to `cancelled`, where it will have partial results + * (if any) available in the output file. + */ + cancel(batchId, options) { + return this._client.post(`/batches/${batchId}/cancel`, options); + } +} +exports.Batches = Batches; +class BatchesPage extends pagination_1.CursorPage { +} +exports.BatchesPage = BatchesPage; +Batches.BatchesPage = BatchesPage; +//# sourceMappingURL=batches.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/batches.js.map b/node_modules/openai/resources/batches.js.map new file mode 100644 index 0000000..eb76a5d --- /dev/null +++ b/node_modules/openai/resources/batches.js.map @@ -0,0 +1 @@ +{"version":3,"file":"batches.js","sourceRoot":"","sources":["../src/resources/batches.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,6CAA0C;AAC1C,qCAA2C;AAG3C,iDAAkE;AAElE,MAAa,OAAQ,SAAQ,sBAAW;IACtC;;OAEG;IACH,MAAM,CAAC,IAAuB,EAAE,OAA6B;QAC3D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,OAAe,EAAE,OAA6B;QACrD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,OAAO,EAAE,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;IAOD,IAAI,CACF,QAA+C,EAAE,EACjD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,UAAU,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACjF,CAAC;IAED;;;;OAIG;IACH,MAAM,CAAC,OAAe,EAAE,OAA6B;QACnD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,OAAO,SAAS,EAAE,OAAO,CAAC,CAAC;IAClE,CAAC;CACF;AAtCD,0BAsCC;AAED,MAAa,WAAY,SAAQ,uBAAiB;CAAG;AAArD,kCAAqD;AAsMrD,OAAO,CAAC,WAAW,GAAG,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/batches.mjs b/node_modules/openai/resources/batches.mjs new file mode 100644 index 0000000..0e75608 --- /dev/null +++ b/node_modules/openai/resources/batches.mjs @@ -0,0 +1,36 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../resource.mjs"; +import { isRequestOptions } from "../core.mjs"; +import { CursorPage } from "../pagination.mjs"; +export class Batches extends APIResource { + /** + * Creates and executes a batch from an uploaded file of requests + */ + create(body, options) { + return this._client.post('/batches', { body, ...options }); + } + /** + * Retrieves a batch. + */ + retrieve(batchId, options) { + return this._client.get(`/batches/${batchId}`, options); + } + list(query = {}, options) { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/batches', BatchesPage, { query, ...options }); + } + /** + * Cancels an in-progress batch. The batch will be in status `cancelling` for up to + * 10 minutes, before changing to `cancelled`, where it will have partial results + * (if any) available in the output file. + */ + cancel(batchId, options) { + return this._client.post(`/batches/${batchId}/cancel`, options); + } +} +export class BatchesPage extends CursorPage { +} +Batches.BatchesPage = BatchesPage; +//# sourceMappingURL=batches.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/batches.mjs.map b/node_modules/openai/resources/batches.mjs.map new file mode 100644 index 0000000..30baac6 --- /dev/null +++ b/node_modules/openai/resources/batches.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"batches.mjs","sourceRoot":"","sources":["../src/resources/batches.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OAGpB,EAAE,UAAU,EAAyB;AAE5C,MAAM,OAAO,OAAQ,SAAQ,WAAW;IACtC;;OAEG;IACH,MAAM,CAAC,IAAuB,EAAE,OAA6B;QAC3D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,OAAe,EAAE,OAA6B;QACrD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,OAAO,EAAE,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;IAOD,IAAI,CACF,QAA+C,EAAE,EACjD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,UAAU,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACjF,CAAC;IAED;;;;OAIG;IACH,MAAM,CAAC,OAAe,EAAE,OAA6B;QACnD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,OAAO,SAAS,EAAE,OAAO,CAAC,CAAC;IAClE,CAAC;CACF;AAED,MAAM,OAAO,WAAY,SAAQ,UAAiB;CAAG;AAsMrD,OAAO,CAAC,WAAW,GAAG,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/assistants.d.ts b/node_modules/openai/resources/beta/assistants.d.ts new file mode 100644 index 0000000..68a04cc --- /dev/null +++ b/node_modules/openai/resources/beta/assistants.d.ts @@ -0,0 +1,1140 @@ +import { APIResource } from "../../resource.js"; +import * as Core from "../../core.js"; +import * as Shared from "../shared.js"; +import * as ChatAPI from "../chat/chat.js"; +import * as MessagesAPI from "./threads/messages.js"; +import * as ThreadsAPI from "./threads/threads.js"; +import * as VectorStoresAPI from "./vector-stores/vector-stores.js"; +import * as RunsAPI from "./threads/runs/runs.js"; +import * as StepsAPI from "./threads/runs/steps.js"; +import { CursorPage, type CursorPageParams } from "../../pagination.js"; +export declare class Assistants extends APIResource { + /** + * Create an assistant with a model and instructions. + */ + create(body: AssistantCreateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Retrieves an assistant. + */ + retrieve(assistantId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Modifies an assistant. + */ + update(assistantId: string, body: AssistantUpdateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns a list of assistants. + */ + list(query?: AssistantListParams, options?: Core.RequestOptions): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + /** + * Delete an assistant. + */ + del(assistantId: string, options?: Core.RequestOptions): Core.APIPromise; +} +export declare class AssistantsPage extends CursorPage { +} +/** + * Represents an `assistant` that can call the model and use tools. + */ +export interface Assistant { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + /** + * The Unix timestamp (in seconds) for when the assistant was created. + */ + created_at: number; + /** + * The description of the assistant. The maximum length is 512 characters. + */ + description: string | null; + /** + * The system instructions that the assistant uses. The maximum length is 256,000 + * characters. + */ + instructions: string | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model: string; + /** + * The name of the assistant. The maximum length is 256 characters. + */ + name: string | null; + /** + * The object type, which is always `assistant`. + */ + object: 'assistant'; + /** + * A list of tool enabled on the assistant. There can be a maximum of 128 tools per + * assistant. Tools can be of types `code_interpreter`, `file_search`, or + * `function`. + */ + tools: Array; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: Assistant.ToolResources | null; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; +} +export declare namespace Assistant { + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter`` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } +} +export interface AssistantDeleted { + id: string; + deleted: boolean; + object: 'assistant.deleted'; +} +/** + * Represents an event emitted when streaming a Run. + * + * Each event in a server-sent events stream has an `event` and `data` property: + * + * ``` + * event: thread.created + * data: {"id": "thread_123", "object": "thread", ...} + * ``` + * + * We emit events whenever a new object is created, transitions to a new state, or + * is being streamed in parts (deltas). For example, we emit `thread.run.created` + * when a new run is created, `thread.run.completed` when a run completes, and so + * on. When an Assistant chooses to create a message during a run, we emit a + * `thread.message.created event`, a `thread.message.in_progress` event, many + * `thread.message.delta` events, and finally a `thread.message.completed` event. + * + * We may add additional events over time, so we recommend handling unknown events + * gracefully in your code. See the + * [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) + * to learn how to integrate the Assistants API with streaming. + */ +export type AssistantStreamEvent = AssistantStreamEvent.ThreadCreated | AssistantStreamEvent.ThreadRunCreated | AssistantStreamEvent.ThreadRunQueued | AssistantStreamEvent.ThreadRunInProgress | AssistantStreamEvent.ThreadRunRequiresAction | AssistantStreamEvent.ThreadRunCompleted | AssistantStreamEvent.ThreadRunIncomplete | AssistantStreamEvent.ThreadRunFailed | AssistantStreamEvent.ThreadRunCancelling | AssistantStreamEvent.ThreadRunCancelled | AssistantStreamEvent.ThreadRunExpired | AssistantStreamEvent.ThreadRunStepCreated | AssistantStreamEvent.ThreadRunStepInProgress | AssistantStreamEvent.ThreadRunStepDelta | AssistantStreamEvent.ThreadRunStepCompleted | AssistantStreamEvent.ThreadRunStepFailed | AssistantStreamEvent.ThreadRunStepCancelled | AssistantStreamEvent.ThreadRunStepExpired | AssistantStreamEvent.ThreadMessageCreated | AssistantStreamEvent.ThreadMessageInProgress | AssistantStreamEvent.ThreadMessageDelta | AssistantStreamEvent.ThreadMessageCompleted | AssistantStreamEvent.ThreadMessageIncomplete | AssistantStreamEvent.ErrorEvent; +export declare namespace AssistantStreamEvent { + /** + * Occurs when a new + * [thread](https://platform.openai.com/docs/api-reference/threads/object) is + * created. + */ + interface ThreadCreated { + /** + * Represents a thread that contains + * [messages](https://platform.openai.com/docs/api-reference/messages). + */ + data: ThreadsAPI.Thread; + event: 'thread.created'; + /** + * Whether to enable input audio transcription. + */ + enabled?: boolean; + } + /** + * Occurs when a new + * [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + */ + interface ThreadRunCreated { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.created'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `queued` status. + */ + interface ThreadRunQueued { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.queued'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to an `in_progress` status. + */ + interface ThreadRunInProgress { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.in_progress'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `requires_action` status. + */ + interface ThreadRunRequiresAction { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.requires_action'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * is completed. + */ + interface ThreadRunCompleted { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.completed'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * ends with status `incomplete`. + */ + interface ThreadRunIncomplete { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.incomplete'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * fails. + */ + interface ThreadRunFailed { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.failed'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `cancelling` status. + */ + interface ThreadRunCancelling { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.cancelling'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * is cancelled. + */ + interface ThreadRunCancelled { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.cancelled'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * expires. + */ + interface ThreadRunExpired { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.expired'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is created. + */ + interface ThreadRunStepCreated { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.created'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * moves to an `in_progress` state. + */ + interface ThreadRunStepInProgress { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.in_progress'; + } + /** + * Occurs when parts of a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * are being streamed. + */ + interface ThreadRunStepDelta { + /** + * Represents a run step delta i.e. any changed fields on a run step during + * streaming. + */ + data: StepsAPI.RunStepDeltaEvent; + event: 'thread.run.step.delta'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is completed. + */ + interface ThreadRunStepCompleted { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.completed'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * fails. + */ + interface ThreadRunStepFailed { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.failed'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is cancelled. + */ + interface ThreadRunStepCancelled { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.cancelled'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * expires. + */ + interface ThreadRunStepExpired { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.expired'; + } + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * created. + */ + interface ThreadMessageCreated { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + event: 'thread.message.created'; + } + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) moves + * to an `in_progress` state. + */ + interface ThreadMessageInProgress { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + event: 'thread.message.in_progress'; + } + /** + * Occurs when parts of a + * [Message](https://platform.openai.com/docs/api-reference/messages/object) are + * being streamed. + */ + interface ThreadMessageDelta { + /** + * Represents a message delta i.e. any changed fields on a message during + * streaming. + */ + data: MessagesAPI.MessageDeltaEvent; + event: 'thread.message.delta'; + } + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * completed. + */ + interface ThreadMessageCompleted { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + event: 'thread.message.completed'; + } + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) ends + * before it is completed. + */ + interface ThreadMessageIncomplete { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + event: 'thread.message.incomplete'; + } + /** + * Occurs when an + * [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. + * This can happen due to an internal server error or a timeout. + */ + interface ErrorEvent { + data: Shared.ErrorObject; + event: 'error'; + } +} +export type AssistantTool = CodeInterpreterTool | FileSearchTool | FunctionTool; +export interface CodeInterpreterTool { + /** + * The type of tool being defined: `code_interpreter` + */ + type: 'code_interpreter'; +} +export interface FileSearchTool { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + /** + * Overrides for the file search tool. + */ + file_search?: FileSearchTool.FileSearch; +} +export declare namespace FileSearchTool { + /** + * Overrides for the file search tool. + */ + interface FileSearch { + /** + * The maximum number of results the file search tool should output. The default is + * 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between + * 1 and 50 inclusive. + * + * Note that the file search tool may output fewer than `max_num_results` results. + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + max_num_results?: number; + /** + * The ranking options for the file search. If not specified, the file search tool + * will use the `auto` ranker and a score_threshold of 0. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + ranking_options?: FileSearch.RankingOptions; + } + namespace FileSearch { + /** + * The ranking options for the file search. If not specified, the file search tool + * will use the `auto` ranker and a score_threshold of 0. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + interface RankingOptions { + /** + * The score threshold for the file search. All values must be a floating point + * number between 0 and 1. + */ + score_threshold: number; + /** + * The ranker to use for the file search. If not specified will use the `auto` + * ranker. + */ + ranker?: 'auto' | 'default_2024_08_21'; + } + } +} +export interface FunctionTool { + function: Shared.FunctionDefinition; + /** + * The type of tool being defined: `function` + */ + type: 'function'; +} +/** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * created. + */ +export type MessageStreamEvent = MessageStreamEvent.ThreadMessageCreated | MessageStreamEvent.ThreadMessageInProgress | MessageStreamEvent.ThreadMessageDelta | MessageStreamEvent.ThreadMessageCompleted | MessageStreamEvent.ThreadMessageIncomplete; +export declare namespace MessageStreamEvent { + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * created. + */ + interface ThreadMessageCreated { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + event: 'thread.message.created'; + } + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) moves + * to an `in_progress` state. + */ + interface ThreadMessageInProgress { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + event: 'thread.message.in_progress'; + } + /** + * Occurs when parts of a + * [Message](https://platform.openai.com/docs/api-reference/messages/object) are + * being streamed. + */ + interface ThreadMessageDelta { + /** + * Represents a message delta i.e. any changed fields on a message during + * streaming. + */ + data: MessagesAPI.MessageDeltaEvent; + event: 'thread.message.delta'; + } + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * completed. + */ + interface ThreadMessageCompleted { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + event: 'thread.message.completed'; + } + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) ends + * before it is completed. + */ + interface ThreadMessageIncomplete { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + event: 'thread.message.incomplete'; + } +} +/** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is created. + */ +export type RunStepStreamEvent = RunStepStreamEvent.ThreadRunStepCreated | RunStepStreamEvent.ThreadRunStepInProgress | RunStepStreamEvent.ThreadRunStepDelta | RunStepStreamEvent.ThreadRunStepCompleted | RunStepStreamEvent.ThreadRunStepFailed | RunStepStreamEvent.ThreadRunStepCancelled | RunStepStreamEvent.ThreadRunStepExpired; +export declare namespace RunStepStreamEvent { + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is created. + */ + interface ThreadRunStepCreated { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.created'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * moves to an `in_progress` state. + */ + interface ThreadRunStepInProgress { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.in_progress'; + } + /** + * Occurs when parts of a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * are being streamed. + */ + interface ThreadRunStepDelta { + /** + * Represents a run step delta i.e. any changed fields on a run step during + * streaming. + */ + data: StepsAPI.RunStepDeltaEvent; + event: 'thread.run.step.delta'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is completed. + */ + interface ThreadRunStepCompleted { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.completed'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * fails. + */ + interface ThreadRunStepFailed { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.failed'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is cancelled. + */ + interface ThreadRunStepCancelled { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.cancelled'; + } + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * expires. + */ + interface ThreadRunStepExpired { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + event: 'thread.run.step.expired'; + } +} +/** + * Occurs when a new + * [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + */ +export type RunStreamEvent = RunStreamEvent.ThreadRunCreated | RunStreamEvent.ThreadRunQueued | RunStreamEvent.ThreadRunInProgress | RunStreamEvent.ThreadRunRequiresAction | RunStreamEvent.ThreadRunCompleted | RunStreamEvent.ThreadRunIncomplete | RunStreamEvent.ThreadRunFailed | RunStreamEvent.ThreadRunCancelling | RunStreamEvent.ThreadRunCancelled | RunStreamEvent.ThreadRunExpired; +export declare namespace RunStreamEvent { + /** + * Occurs when a new + * [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + */ + interface ThreadRunCreated { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.created'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `queued` status. + */ + interface ThreadRunQueued { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.queued'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to an `in_progress` status. + */ + interface ThreadRunInProgress { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.in_progress'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `requires_action` status. + */ + interface ThreadRunRequiresAction { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.requires_action'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * is completed. + */ + interface ThreadRunCompleted { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.completed'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * ends with status `incomplete`. + */ + interface ThreadRunIncomplete { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.incomplete'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * fails. + */ + interface ThreadRunFailed { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.failed'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `cancelling` status. + */ + interface ThreadRunCancelling { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.cancelling'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * is cancelled. + */ + interface ThreadRunCancelled { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.cancelled'; + } + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * expires. + */ + interface ThreadRunExpired { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + event: 'thread.run.expired'; + } +} +/** + * Occurs when a new + * [thread](https://platform.openai.com/docs/api-reference/threads/object) is + * created. + */ +export interface ThreadStreamEvent { + /** + * Represents a thread that contains + * [messages](https://platform.openai.com/docs/api-reference/messages). + */ + data: ThreadsAPI.Thread; + event: 'thread.created'; + /** + * Whether to enable input audio transcription. + */ + enabled?: boolean; +} +export interface AssistantCreateParams { + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model: (string & {}) | ChatAPI.ChatModel; + /** + * The description of the assistant. The maximum length is 512 characters. + */ + description?: string | null; + /** + * The system instructions that the assistant uses. The maximum length is 256,000 + * characters. + */ + instructions?: string | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The name of the assistant. The maximum length is 256 characters. + */ + name?: string | null; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: AssistantCreateParams.ToolResources | null; + /** + * A list of tool enabled on the assistant. There can be a maximum of 128 tools per + * assistant. Tools can be of types `code_interpreter`, `file_search`, or + * `function`. + */ + tools?: Array; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; +} +export declare namespace AssistantCreateParams { + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this assistant. There can be a maximum of 1 + * vector store attached to the assistant. + */ + vector_stores?: Array; + } + namespace FileSearch { + interface VectorStore { + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } +} +export interface AssistantUpdateParams { + /** + * The description of the assistant. The maximum length is 512 characters. + */ + description?: string | null; + /** + * The system instructions that the assistant uses. The maximum length is 256,000 + * characters. + */ + instructions?: string | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model?: string; + /** + * The name of the assistant. The maximum length is 256 characters. + */ + name?: string | null; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: AssistantUpdateParams.ToolResources | null; + /** + * A list of tool enabled on the assistant. There can be a maximum of 128 tools per + * assistant. Tools can be of types `code_interpreter`, `file_search`, or + * `function`. + */ + tools?: Array; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; +} +export declare namespace AssistantUpdateParams { + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * Overrides the list of + * [file](https://platform.openai.com/docs/api-reference/files) IDs made available + * to the `code_interpreter` tool. There can be a maximum of 20 files associated + * with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * Overrides the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } +} +export interface AssistantListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} +export declare namespace Assistants { + export { type Assistant as Assistant, type AssistantDeleted as AssistantDeleted, type AssistantStreamEvent as AssistantStreamEvent, type AssistantTool as AssistantTool, type CodeInterpreterTool as CodeInterpreterTool, type FileSearchTool as FileSearchTool, type FunctionTool as FunctionTool, type MessageStreamEvent as MessageStreamEvent, type RunStepStreamEvent as RunStepStreamEvent, type RunStreamEvent as RunStreamEvent, type ThreadStreamEvent as ThreadStreamEvent, AssistantsPage as AssistantsPage, type AssistantCreateParams as AssistantCreateParams, type AssistantUpdateParams as AssistantUpdateParams, type AssistantListParams as AssistantListParams, }; +} +//# sourceMappingURL=assistants.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/assistants.d.ts.map b/node_modules/openai/resources/beta/assistants.d.ts.map new file mode 100644 index 0000000..a621725 --- /dev/null +++ b/node_modules/openai/resources/beta/assistants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"assistants.d.ts","sourceRoot":"","sources":["../../src/resources/beta/assistants.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,KAAK,IAAI,MAAM,YAAY,CAAC;AACnC,OAAO,KAAK,MAAM,MAAM,WAAW,CAAC;AACpC,OAAO,KAAK,OAAO,MAAM,cAAc,CAAC;AACxC,OAAO,KAAK,WAAW,MAAM,oBAAoB,CAAC;AAClD,OAAO,KAAK,UAAU,MAAM,mBAAmB,CAAC;AAChD,OAAO,KAAK,eAAe,MAAM,+BAA+B,CAAC;AACjE,OAAO,KAAK,OAAO,MAAM,qBAAqB,CAAC;AAC/C,OAAO,KAAK,QAAQ,MAAM,sBAAsB,CAAC;AACjD,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AAErE,qBAAa,UAAW,SAAQ,WAAW;IACzC;;OAEG;IACH,MAAM,CAAC,IAAI,EAAE,qBAAqB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;IAQ9F;;OAEG;IACH,QAAQ,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;IAOxF;;OAEG;IACH,MAAM,CACJ,WAAW,EAAE,MAAM,EACnB,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;IAQ7B;;OAEG;IACH,IAAI,CACF,KAAK,CAAC,EAAE,mBAAmB,EAC3B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,SAAS,CAAC;IAC9C,IAAI,CAAC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,SAAS,CAAC;IAehF;;OAEG;IACH,GAAG,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,gBAAgB,CAAC;CAM3F;AAED,qBAAa,cAAe,SAAQ,UAAU,CAAC,SAAS,CAAC;CAAG;AAE5D;;GAEG;AACH,MAAM,WAAW,SAAS;IACxB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;;OAGG;IACH,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;OAKG;IACH,QAAQ,EAAE,OAAO,GAAG,IAAI,CAAC;IAEzB;;;;;;OAMG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IAEpB;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IAEpB;;;;OAIG;IACH,KAAK,EAAE,KAAK,CAAC,aAAa,CAAC,CAAC;IAE5B;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,eAAe,CAAC,EAAE,UAAU,CAAC,6BAA6B,GAAG,IAAI,CAAC;IAElE;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;OAKG;IACH,cAAc,CAAC,EAAE,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC;IAEhD;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACvB;AAED,yBAAiB,SAAS,CAAC;IACzB;;;;;OAKG;IACH,UAAiB,aAAa;QAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;QAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;KACxC;IAED,UAAiB,aAAa,CAAC;QAC7B,UAAiB,eAAe;YAC9B;;;;eAIG;YACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,UAAiB,UAAU;YACzB;;;;;eAKG;YACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAClC;KACF;CACF;AAED,MAAM,WAAW,gBAAgB;IAC/B,EAAE,EAAE,MAAM,CAAC;IAEX,OAAO,EAAE,OAAO,CAAC;IAEjB,MAAM,EAAE,mBAAmB,CAAC;CAC7B;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,MAAM,oBAAoB,GAC5B,oBAAoB,CAAC,aAAa,GAClC,oBAAoB,CAAC,gBAAgB,GACrC,oBAAoB,CAAC,eAAe,GACpC,oBAAoB,CAAC,mBAAmB,GACxC,oBAAoB,CAAC,uBAAuB,GAC5C,oBAAoB,CAAC,kBAAkB,GACvC,oBAAoB,CAAC,mBAAmB,GACxC,oBAAoB,CAAC,eAAe,GACpC,oBAAoB,CAAC,mBAAmB,GACxC,oBAAoB,CAAC,kBAAkB,GACvC,oBAAoB,CAAC,gBAAgB,GACrC,oBAAoB,CAAC,oBAAoB,GACzC,oBAAoB,CAAC,uBAAuB,GAC5C,oBAAoB,CAAC,kBAAkB,GACvC,oBAAoB,CAAC,sBAAsB,GAC3C,oBAAoB,CAAC,mBAAmB,GACxC,oBAAoB,CAAC,sBAAsB,GAC3C,oBAAoB,CAAC,oBAAoB,GACzC,oBAAoB,CAAC,oBAAoB,GACzC,oBAAoB,CAAC,uBAAuB,GAC5C,oBAAoB,CAAC,kBAAkB,GACvC,oBAAoB,CAAC,sBAAsB,GAC3C,oBAAoB,CAAC,uBAAuB,GAC5C,oBAAoB,CAAC,UAAU,CAAC;AAEpC,yBAAiB,oBAAoB,CAAC;IACpC;;;;OAIG;IACH,UAAiB,aAAa;QAC5B;;;WAGG;QACH,IAAI,EAAE,UAAU,CAAC,MAAM,CAAC;QAExB,KAAK,EAAE,gBAAgB,CAAC;QAExB;;WAEG;QACH,OAAO,CAAC,EAAE,OAAO,CAAC;KACnB;IAED;;;OAGG;IACH,UAAiB,gBAAgB;QAC/B;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,oBAAoB,CAAC;KAC7B;IAED;;;OAGG;IACH,UAAiB,eAAe;QAC9B;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,mBAAmB,CAAC;KAC5B;IAED;;;OAGG;IACH,UAAiB,mBAAmB;QAClC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,wBAAwB,CAAC;KACjC;IAED;;;OAGG;IACH,UAAiB,uBAAuB;QACtC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,4BAA4B,CAAC;KACrC;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,sBAAsB,CAAC;KAC/B;IAED;;;OAGG;IACH,UAAiB,mBAAmB;QAClC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,uBAAuB,CAAC;KAChC;IAED;;;OAGG;IACH,UAAiB,eAAe;QAC9B;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,mBAAmB,CAAC;KAC5B;IAED;;;OAGG;IACH,UAAiB,mBAAmB;QAClC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,uBAAuB,CAAC;KAChC;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,sBAAsB,CAAC;KAC/B;IAED;;;OAGG;IACH,UAAiB,gBAAgB;QAC/B;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,oBAAoB,CAAC;KAC7B;IAED;;;;OAIG;IACH,UAAiB,oBAAoB;QACnC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,yBAAyB,CAAC;KAClC;IAED;;;;OAIG;IACH,UAAiB,uBAAuB;QACtC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,6BAA6B,CAAC;KACtC;IAED;;;;OAIG;IACH,UAAiB,kBAAkB;QACjC;;;WAGG;QACH,IAAI,EAAE,QAAQ,CAAC,iBAAiB,CAAC;QAEjC,KAAK,EAAE,uBAAuB,CAAC;KAChC;IAED;;;;OAIG;IACH,UAAiB,sBAAsB;QACrC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,2BAA2B,CAAC;KACpC;IAED;;;;OAIG;IACH,UAAiB,mBAAmB;QAClC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,wBAAwB,CAAC;KACjC;IAED;;;;OAIG;IACH,UAAiB,sBAAsB;QACrC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,2BAA2B,CAAC;KACpC;IAED;;;;OAIG;IACH,UAAiB,oBAAoB;QACnC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,yBAAyB,CAAC;KAClC;IAED;;;;OAIG;IACH,UAAiB,oBAAoB;QACnC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC;QAE1B,KAAK,EAAE,wBAAwB,CAAC;KACjC;IAED;;;;OAIG;IACH,UAAiB,uBAAuB;QACtC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC;QAE1B,KAAK,EAAE,4BAA4B,CAAC;KACrC;IAED;;;;OAIG;IACH,UAAiB,kBAAkB;QACjC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,iBAAiB,CAAC;QAEpC,KAAK,EAAE,sBAAsB,CAAC;KAC/B;IAED;;;;OAIG;IACH,UAAiB,sBAAsB;QACrC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC;QAE1B,KAAK,EAAE,0BAA0B,CAAC;KACnC;IAED;;;;OAIG;IACH,UAAiB,uBAAuB;QACtC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC;QAE1B,KAAK,EAAE,2BAA2B,CAAC;KACpC;IAED;;;;OAIG;IACH,UAAiB,UAAU;QACzB,IAAI,EAAE,MAAM,CAAC,WAAW,CAAC;QAEzB,KAAK,EAAE,OAAO,CAAC;KAChB;CACF;AAED,MAAM,MAAM,aAAa,GAAG,mBAAmB,GAAG,cAAc,GAAG,YAAY,CAAC;AAEhF,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,IAAI,EAAE,kBAAkB,CAAC;CAC1B;AAED,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,IAAI,EAAE,aAAa,CAAC;IAEpB;;OAEG;IACH,WAAW,CAAC,EAAE,cAAc,CAAC,UAAU,CAAC;CACzC;AAED,yBAAiB,cAAc,CAAC;IAC9B;;OAEG;IACH,UAAiB,UAAU;QACzB;;;;;;;;;WASG;QACH,eAAe,CAAC,EAAE,MAAM,CAAC;QAEzB;;;;;;;WAOG;QACH,eAAe,CAAC,EAAE,UAAU,CAAC,cAAc,CAAC;KAC7C;IAED,UAAiB,UAAU,CAAC;QAC1B;;;;;;;WAOG;QACH,UAAiB,cAAc;YAC7B;;;eAGG;YACH,eAAe,EAAE,MAAM,CAAC;YAExB;;;eAGG;YACH,MAAM,CAAC,EAAE,MAAM,GAAG,oBAAoB,CAAC;SACxC;KACF;CACF;AAED,MAAM,WAAW,YAAY;IAC3B,QAAQ,EAAE,MAAM,CAAC,kBAAkB,CAAC;IAEpC;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;CAClB;AAED;;;;GAIG;AACH,MAAM,MAAM,kBAAkB,GAC1B,kBAAkB,CAAC,oBAAoB,GACvC,kBAAkB,CAAC,uBAAuB,GAC1C,kBAAkB,CAAC,kBAAkB,GACrC,kBAAkB,CAAC,sBAAsB,GACzC,kBAAkB,CAAC,uBAAuB,CAAC;AAE/C,yBAAiB,kBAAkB,CAAC;IAClC;;;;OAIG;IACH,UAAiB,oBAAoB;QACnC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC;QAE1B,KAAK,EAAE,wBAAwB,CAAC;KACjC;IAED;;;;OAIG;IACH,UAAiB,uBAAuB;QACtC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC;QAE1B,KAAK,EAAE,4BAA4B,CAAC;KACrC;IAED;;;;OAIG;IACH,UAAiB,kBAAkB;QACjC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,iBAAiB,CAAC;QAEpC,KAAK,EAAE,sBAAsB,CAAC;KAC/B;IAED;;;;OAIG;IACH,UAAiB,sBAAsB;QACrC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC;QAE1B,KAAK,EAAE,0BAA0B,CAAC;KACnC;IAED;;;;OAIG;IACH,UAAiB,uBAAuB;QACtC;;;WAGG;QACH,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC;QAE1B,KAAK,EAAE,2BAA2B,CAAC;KACpC;CACF;AAED;;;;GAIG;AACH,MAAM,MAAM,kBAAkB,GAC1B,kBAAkB,CAAC,oBAAoB,GACvC,kBAAkB,CAAC,uBAAuB,GAC1C,kBAAkB,CAAC,kBAAkB,GACrC,kBAAkB,CAAC,sBAAsB,GACzC,kBAAkB,CAAC,mBAAmB,GACtC,kBAAkB,CAAC,sBAAsB,GACzC,kBAAkB,CAAC,oBAAoB,CAAC;AAE5C,yBAAiB,kBAAkB,CAAC;IAClC;;;;OAIG;IACH,UAAiB,oBAAoB;QACnC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,yBAAyB,CAAC;KAClC;IAED;;;;OAIG;IACH,UAAiB,uBAAuB;QACtC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,6BAA6B,CAAC;KACtC;IAED;;;;OAIG;IACH,UAAiB,kBAAkB;QACjC;;;WAGG;QACH,IAAI,EAAE,QAAQ,CAAC,iBAAiB,CAAC;QAEjC,KAAK,EAAE,uBAAuB,CAAC;KAChC;IAED;;;;OAIG;IACH,UAAiB,sBAAsB;QACrC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,2BAA2B,CAAC;KACpC;IAED;;;;OAIG;IACH,UAAiB,mBAAmB;QAClC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,wBAAwB,CAAC;KACjC;IAED;;;;OAIG;IACH,UAAiB,sBAAsB;QACrC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,2BAA2B,CAAC;KACpC;IAED;;;;OAIG;IACH,UAAiB,oBAAoB;QACnC;;WAEG;QACH,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC;QAEvB,KAAK,EAAE,yBAAyB,CAAC;KAClC;CACF;AAED;;;GAGG;AACH,MAAM,MAAM,cAAc,GACtB,cAAc,CAAC,gBAAgB,GAC/B,cAAc,CAAC,eAAe,GAC9B,cAAc,CAAC,mBAAmB,GAClC,cAAc,CAAC,uBAAuB,GACtC,cAAc,CAAC,kBAAkB,GACjC,cAAc,CAAC,mBAAmB,GAClC,cAAc,CAAC,eAAe,GAC9B,cAAc,CAAC,mBAAmB,GAClC,cAAc,CAAC,kBAAkB,GACjC,cAAc,CAAC,gBAAgB,CAAC;AAEpC,yBAAiB,cAAc,CAAC;IAC9B;;;OAGG;IACH,UAAiB,gBAAgB;QAC/B;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,oBAAoB,CAAC;KAC7B;IAED;;;OAGG;IACH,UAAiB,eAAe;QAC9B;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,mBAAmB,CAAC;KAC5B;IAED;;;OAGG;IACH,UAAiB,mBAAmB;QAClC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,wBAAwB,CAAC;KACjC;IAED;;;OAGG;IACH,UAAiB,uBAAuB;QACtC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,4BAA4B,CAAC;KACrC;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,sBAAsB,CAAC;KAC/B;IAED;;;OAGG;IACH,UAAiB,mBAAmB;QAClC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,uBAAuB,CAAC;KAChC;IAED;;;OAGG;IACH,UAAiB,eAAe;QAC9B;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,mBAAmB,CAAC;KAC5B;IAED;;;OAGG;IACH,UAAiB,mBAAmB;QAClC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,uBAAuB,CAAC;KAChC;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,sBAAsB,CAAC;KAC/B;IAED;;;OAGG;IACH,UAAiB,gBAAgB;QAC/B;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC;QAElB,KAAK,EAAE,oBAAoB,CAAC;KAC7B;CACF;AAED;;;;GAIG;AACH,MAAM,WAAW,iBAAiB;IAChC;;;OAGG;IACH,IAAI,EAAE,UAAU,CAAC,MAAM,CAAC;IAExB,KAAK,EAAE,gBAAgB,CAAC;IAExB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;CACnB;AAED,MAAM,WAAW,qBAAqB;IACpC;;;;;;OAMG;IACH,KAAK,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,SAAS,CAAC;IAEzC;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAErB;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,eAAe,CAAC,EAAE,UAAU,CAAC,6BAA6B,GAAG,IAAI,CAAC;IAElE;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;OAKG;IACH,cAAc,CAAC,EAAE,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;IAE5D;;;;OAIG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,CAAC;IAE7B;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACvB;AAED,yBAAiB,qBAAqB,CAAC;IACrC;;;;;OAKG;IACH,UAAiB,aAAa;QAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;QAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;KACxC;IAED,UAAiB,aAAa,CAAC;QAC7B,UAAiB,eAAe;YAC9B;;;;eAIG;YACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,UAAiB,UAAU;YACzB;;;;;eAKG;YACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;YAEjC;;;;;eAKG;YACH,aAAa,CAAC,EAAE,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC;SAC/C;QAED,UAAiB,UAAU,CAAC;YAC1B,UAAiB,WAAW;gBAC1B;;;mBAGG;gBACH,iBAAiB,CAAC,EAAE,eAAe,CAAC,yBAAyB,CAAC;gBAE9D;;;;mBAIG;gBACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;gBAEzB;;;;;mBAKG;gBACH,QAAQ,CAAC,EAAE,OAAO,CAAC;aACpB;SACF;KACF;CACF;AAED,MAAM,WAAW,qBAAqB;IACpC;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAErB;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,eAAe,CAAC,EAAE,UAAU,CAAC,6BAA6B,GAAG,IAAI,CAAC;IAElE;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;OAKG;IACH,cAAc,CAAC,EAAE,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;IAE5D;;;;OAIG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,CAAC;IAE7B;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACvB;AAED,yBAAiB,qBAAqB,CAAC;IACrC;;;;;OAKG;IACH,UAAiB,aAAa;QAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;QAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;KACxC;IAED,UAAiB,aAAa,CAAC;QAC7B,UAAiB,eAAe;YAC9B;;;;;eAKG;YACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,UAAiB,UAAU;YACzB;;;;;eAKG;YACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAClC;KACF;CACF;AAED,MAAM,WAAW,mBAAoB,SAAQ,gBAAgB;IAC3D;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CACxB;AAID,MAAM,CAAC,OAAO,WAAW,UAAU,CAAC;IAClC,OAAO,EACL,KAAK,SAAS,IAAI,SAAS,EAC3B,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,YAAY,IAAI,YAAY,EACjC,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,cAAc,IAAI,cAAc,EAChC,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,mBAAmB,IAAI,mBAAmB,GAChD,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/assistants.js b/node_modules/openai/resources/beta/assistants.js new file mode 100644 index 0000000..d686811 --- /dev/null +++ b/node_modules/openai/resources/beta/assistants.js @@ -0,0 +1,63 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AssistantsPage = exports.Assistants = void 0; +const resource_1 = require("../../resource.js"); +const core_1 = require("../../core.js"); +const pagination_1 = require("../../pagination.js"); +class Assistants extends resource_1.APIResource { + /** + * Create an assistant with a model and instructions. + */ + create(body, options) { + return this._client.post('/assistants', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves an assistant. + */ + retrieve(assistantId, options) { + return this._client.get(`/assistants/${assistantId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies an assistant. + */ + update(assistantId, body, options) { + return this._client.post(`/assistants/${assistantId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/assistants', AssistantsPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Delete an assistant. + */ + del(assistantId, options) { + return this._client.delete(`/assistants/${assistantId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} +exports.Assistants = Assistants; +class AssistantsPage extends pagination_1.CursorPage { +} +exports.AssistantsPage = AssistantsPage; +Assistants.AssistantsPage = AssistantsPage; +//# sourceMappingURL=assistants.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/assistants.js.map b/node_modules/openai/resources/beta/assistants.js.map new file mode 100644 index 0000000..975e794 --- /dev/null +++ b/node_modules/openai/resources/beta/assistants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"assistants.js","sourceRoot":"","sources":["../../src/resources/beta/assistants.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,gDAA6C;AAC7C,wCAA8C;AAS9C,oDAAqE;AAErE,MAAa,UAAW,SAAQ,sBAAW;IACzC;;OAEG;IACH,MAAM,CAAC,IAA2B,EAAE,OAA6B;QAC/D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE;YACtC,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,WAAmB,EAAE,OAA6B;QACzD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,WAAW,EAAE,EAAE;YACpD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,WAAmB,EACnB,IAA2B,EAC3B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,eAAe,WAAW,EAAE,EAAE;YACrD,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAUD,IAAI,CACF,QAAmD,EAAE,EACrD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,aAAa,EAAE,cAAc,EAAE;YAC5D,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,WAAmB,EAAE,OAA6B;QACpD,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,WAAW,EAAE,EAAE;YACvD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;CACF;AApED,gCAoEC;AAED,MAAa,cAAe,SAAQ,uBAAqB;CAAG;AAA5D,wCAA4D;AAiyC5D,UAAU,CAAC,cAAc,GAAG,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/assistants.mjs b/node_modules/openai/resources/beta/assistants.mjs new file mode 100644 index 0000000..371b082 --- /dev/null +++ b/node_modules/openai/resources/beta/assistants.mjs @@ -0,0 +1,58 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +import { isRequestOptions } from "../../core.mjs"; +import { CursorPage } from "../../pagination.mjs"; +export class Assistants extends APIResource { + /** + * Create an assistant with a model and instructions. + */ + create(body, options) { + return this._client.post('/assistants', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves an assistant. + */ + retrieve(assistantId, options) { + return this._client.get(`/assistants/${assistantId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies an assistant. + */ + update(assistantId, body, options) { + return this._client.post(`/assistants/${assistantId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(query = {}, options) { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/assistants', AssistantsPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Delete an assistant. + */ + del(assistantId, options) { + return this._client.delete(`/assistants/${assistantId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} +export class AssistantsPage extends CursorPage { +} +Assistants.AssistantsPage = AssistantsPage; +//# sourceMappingURL=assistants.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/assistants.mjs.map b/node_modules/openai/resources/beta/assistants.mjs.map new file mode 100644 index 0000000..cfdfdb8 --- /dev/null +++ b/node_modules/openai/resources/beta/assistants.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"assistants.mjs","sourceRoot":"","sources":["../../src/resources/beta/assistants.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OASpB,EAAE,UAAU,EAAyB;AAE5C,MAAM,OAAO,UAAW,SAAQ,WAAW;IACzC;;OAEG;IACH,MAAM,CAAC,IAA2B,EAAE,OAA6B;QAC/D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE;YACtC,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,WAAmB,EAAE,OAA6B;QACzD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,WAAW,EAAE,EAAE;YACpD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,WAAmB,EACnB,IAA2B,EAC3B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,eAAe,WAAW,EAAE,EAAE;YACrD,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAUD,IAAI,CACF,QAAmD,EAAE,EACrD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,aAAa,EAAE,cAAc,EAAE;YAC5D,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,WAAmB,EAAE,OAA6B;QACpD,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,WAAW,EAAE,EAAE;YACvD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;CACF;AAED,MAAM,OAAO,cAAe,SAAQ,UAAqB;CAAG;AAiyC5D,UAAU,CAAC,cAAc,GAAG,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/beta.d.ts b/node_modules/openai/resources/beta/beta.d.ts new file mode 100644 index 0000000..01313f9 --- /dev/null +++ b/node_modules/openai/resources/beta/beta.d.ts @@ -0,0 +1,22 @@ +import { APIResource } from "../../resource.js"; +import * as AssistantsAPI from "./assistants.js"; +import * as ChatAPI from "./chat/chat.js"; +import { Assistant, AssistantCreateParams, AssistantDeleted, AssistantListParams, AssistantStreamEvent, AssistantTool, AssistantUpdateParams, Assistants, AssistantsPage, CodeInterpreterTool, FileSearchTool, FunctionTool, MessageStreamEvent, RunStepStreamEvent, RunStreamEvent, ThreadStreamEvent } from "./assistants.js"; +import * as ThreadsAPI from "./threads/threads.js"; +import { AssistantResponseFormatOption, AssistantToolChoice, AssistantToolChoiceFunction, AssistantToolChoiceOption, Thread, ThreadCreateAndRunParams, ThreadCreateAndRunParamsNonStreaming, ThreadCreateAndRunParamsStreaming, ThreadCreateAndRunPollParams, ThreadCreateAndRunStreamParams, ThreadCreateParams, ThreadDeleted, ThreadUpdateParams, Threads } from "./threads/threads.js"; +import * as VectorStoresAPI from "./vector-stores/vector-stores.js"; +import { AutoFileChunkingStrategyParam, FileChunkingStrategy, FileChunkingStrategyParam, OtherFileChunkingStrategyObject, StaticFileChunkingStrategy, StaticFileChunkingStrategyObject, StaticFileChunkingStrategyParam, VectorStore, VectorStoreCreateParams, VectorStoreDeleted, VectorStoreListParams, VectorStoreUpdateParams, VectorStores, VectorStoresPage } from "./vector-stores/vector-stores.js"; +import { Chat } from "./chat/chat.js"; +export declare class Beta extends APIResource { + vectorStores: VectorStoresAPI.VectorStores; + chat: ChatAPI.Chat; + assistants: AssistantsAPI.Assistants; + threads: ThreadsAPI.Threads; +} +export declare namespace Beta { + export { VectorStores as VectorStores, type AutoFileChunkingStrategyParam as AutoFileChunkingStrategyParam, type FileChunkingStrategy as FileChunkingStrategy, type FileChunkingStrategyParam as FileChunkingStrategyParam, type OtherFileChunkingStrategyObject as OtherFileChunkingStrategyObject, type StaticFileChunkingStrategy as StaticFileChunkingStrategy, type StaticFileChunkingStrategyObject as StaticFileChunkingStrategyObject, type StaticFileChunkingStrategyParam as StaticFileChunkingStrategyParam, type VectorStore as VectorStore, type VectorStoreDeleted as VectorStoreDeleted, VectorStoresPage as VectorStoresPage, type VectorStoreCreateParams as VectorStoreCreateParams, type VectorStoreUpdateParams as VectorStoreUpdateParams, type VectorStoreListParams as VectorStoreListParams, }; + export { Chat }; + export { Assistants as Assistants, type Assistant as Assistant, type AssistantDeleted as AssistantDeleted, type AssistantStreamEvent as AssistantStreamEvent, type AssistantTool as AssistantTool, type CodeInterpreterTool as CodeInterpreterTool, type FileSearchTool as FileSearchTool, type FunctionTool as FunctionTool, type MessageStreamEvent as MessageStreamEvent, type RunStepStreamEvent as RunStepStreamEvent, type RunStreamEvent as RunStreamEvent, type ThreadStreamEvent as ThreadStreamEvent, AssistantsPage as AssistantsPage, type AssistantCreateParams as AssistantCreateParams, type AssistantUpdateParams as AssistantUpdateParams, type AssistantListParams as AssistantListParams, }; + export { Threads as Threads, type AssistantResponseFormatOption as AssistantResponseFormatOption, type AssistantToolChoice as AssistantToolChoice, type AssistantToolChoiceFunction as AssistantToolChoiceFunction, type AssistantToolChoiceOption as AssistantToolChoiceOption, type Thread as Thread, type ThreadDeleted as ThreadDeleted, type ThreadCreateParams as ThreadCreateParams, type ThreadUpdateParams as ThreadUpdateParams, type ThreadCreateAndRunParams as ThreadCreateAndRunParams, type ThreadCreateAndRunParamsNonStreaming as ThreadCreateAndRunParamsNonStreaming, type ThreadCreateAndRunParamsStreaming as ThreadCreateAndRunParamsStreaming, type ThreadCreateAndRunPollParams, type ThreadCreateAndRunStreamParams, }; +} +//# sourceMappingURL=beta.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/beta.d.ts.map b/node_modules/openai/resources/beta/beta.d.ts.map new file mode 100644 index 0000000..15daba7 --- /dev/null +++ b/node_modules/openai/resources/beta/beta.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"beta.d.ts","sourceRoot":"","sources":["../../src/resources/beta/beta.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,aAAa,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,OAAO,MAAM,aAAa,CAAC;AACvC,OAAO,EACL,SAAS,EACT,qBAAqB,EACrB,gBAAgB,EAChB,mBAAmB,EACnB,oBAAoB,EACpB,aAAa,EACb,qBAAqB,EACrB,UAAU,EACV,cAAc,EACd,mBAAmB,EACnB,cAAc,EACd,YAAY,EACZ,kBAAkB,EAClB,kBAAkB,EAClB,cAAc,EACd,iBAAiB,EAClB,MAAM,cAAc,CAAC;AACtB,OAAO,KAAK,UAAU,MAAM,mBAAmB,CAAC;AAChD,OAAO,EACL,6BAA6B,EAC7B,mBAAmB,EACnB,2BAA2B,EAC3B,yBAAyB,EACzB,MAAM,EACN,wBAAwB,EACxB,oCAAoC,EACpC,iCAAiC,EACjC,4BAA4B,EAC5B,8BAA8B,EAC9B,kBAAkB,EAClB,aAAa,EACb,kBAAkB,EAClB,OAAO,EACR,MAAM,mBAAmB,CAAC;AAC3B,OAAO,KAAK,eAAe,MAAM,+BAA+B,CAAC;AACjE,OAAO,EACL,6BAA6B,EAC7B,oBAAoB,EACpB,yBAAyB,EACzB,+BAA+B,EAC/B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,EAC/B,WAAW,EACX,uBAAuB,EACvB,kBAAkB,EAClB,qBAAqB,EACrB,uBAAuB,EACvB,YAAY,EACZ,gBAAgB,EACjB,MAAM,+BAA+B,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAC;AAEnC,qBAAa,IAAK,SAAQ,WAAW;IACnC,YAAY,EAAE,eAAe,CAAC,YAAY,CAAkD;IAC5F,IAAI,EAAE,OAAO,CAAC,IAAI,CAAkC;IACpD,UAAU,EAAE,aAAa,CAAC,UAAU,CAA8C;IAClF,OAAO,EAAE,UAAU,CAAC,OAAO,CAAwC;CACpE;AAQD,MAAM,CAAC,OAAO,WAAW,IAAI,CAAC;IAC5B,OAAO,EACL,YAAY,IAAI,YAAY,EAC5B,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,+BAA+B,IAAI,+BAA+B,EACvE,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,+BAA+B,IAAI,+BAA+B,EACvE,KAAK,WAAW,IAAI,WAAW,EAC/B,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,gBAAgB,IAAI,gBAAgB,EACpC,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,qBAAqB,IAAI,qBAAqB,GACpD,CAAC;IAEF,OAAO,EAAE,IAAI,EAAE,CAAC;IAEhB,OAAO,EACL,UAAU,IAAI,UAAU,EACxB,KAAK,SAAS,IAAI,SAAS,EAC3B,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,YAAY,IAAI,YAAY,EACjC,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,cAAc,IAAI,cAAc,EAChC,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,mBAAmB,IAAI,mBAAmB,GAChD,CAAC;IAEF,OAAO,EACL,OAAO,IAAI,OAAO,EAClB,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,MAAM,IAAI,MAAM,EACrB,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,oCAAoC,IAAI,oCAAoC,EACjF,KAAK,iCAAiC,IAAI,iCAAiC,EAC3E,KAAK,4BAA4B,EACjC,KAAK,8BAA8B,GACpC,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/beta.js b/node_modules/openai/resources/beta/beta.js new file mode 100644 index 0000000..38f9297 --- /dev/null +++ b/node_modules/openai/resources/beta/beta.js @@ -0,0 +1,52 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Beta = void 0; +const resource_1 = require("../../resource.js"); +const AssistantsAPI = __importStar(require("./assistants.js")); +const ChatAPI = __importStar(require("./chat/chat.js")); +const assistants_1 = require("./assistants.js"); +const ThreadsAPI = __importStar(require("./threads/threads.js")); +const threads_1 = require("./threads/threads.js"); +const VectorStoresAPI = __importStar(require("./vector-stores/vector-stores.js")); +const vector_stores_1 = require("./vector-stores/vector-stores.js"); +const chat_1 = require("./chat/chat.js"); +class Beta extends resource_1.APIResource { + constructor() { + super(...arguments); + this.vectorStores = new VectorStoresAPI.VectorStores(this._client); + this.chat = new ChatAPI.Chat(this._client); + this.assistants = new AssistantsAPI.Assistants(this._client); + this.threads = new ThreadsAPI.Threads(this._client); + } +} +exports.Beta = Beta; +Beta.VectorStores = vector_stores_1.VectorStores; +Beta.VectorStoresPage = vector_stores_1.VectorStoresPage; +Beta.Assistants = assistants_1.Assistants; +Beta.AssistantsPage = assistants_1.AssistantsPage; +Beta.Threads = threads_1.Threads; +//# sourceMappingURL=beta.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/beta.js.map b/node_modules/openai/resources/beta/beta.js.map new file mode 100644 index 0000000..559a5f6 --- /dev/null +++ b/node_modules/openai/resources/beta/beta.js.map @@ -0,0 +1 @@ +{"version":3,"file":"beta.js","sourceRoot":"","sources":["../../src/resources/beta/beta.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,gDAA6C;AAC7C,+DAA8C;AAC9C,wDAAuC;AACvC,gDAiBsB;AACtB,iEAAgD;AAChD,kDAe2B;AAC3B,kFAAiE;AACjE,oEAeuC;AACvC,yCAAmC;AAEnC,MAAa,IAAK,SAAQ,sBAAW;IAArC;;QACE,iBAAY,GAAiC,IAAI,eAAe,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAC5F,SAAI,GAAiB,IAAI,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACpD,eAAU,GAA6B,IAAI,aAAa,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAClF,YAAO,GAAuB,IAAI,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACrE,CAAC;CAAA;AALD,oBAKC;AAED,IAAI,CAAC,YAAY,GAAG,4BAAY,CAAC;AACjC,IAAI,CAAC,gBAAgB,GAAG,gCAAgB,CAAC;AACzC,IAAI,CAAC,UAAU,GAAG,uBAAU,CAAC;AAC7B,IAAI,CAAC,cAAc,GAAG,2BAAc,CAAC;AACrC,IAAI,CAAC,OAAO,GAAG,iBAAO,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/beta.mjs b/node_modules/openai/resources/beta/beta.mjs new file mode 100644 index 0000000..b6678df --- /dev/null +++ b/node_modules/openai/resources/beta/beta.mjs @@ -0,0 +1,25 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +import * as AssistantsAPI from "./assistants.mjs"; +import * as ChatAPI from "./chat/chat.mjs"; +import { Assistants, AssistantsPage, } from "./assistants.mjs"; +import * as ThreadsAPI from "./threads/threads.mjs"; +import { Threads, } from "./threads/threads.mjs"; +import * as VectorStoresAPI from "./vector-stores/vector-stores.mjs"; +import { VectorStores, VectorStoresPage, } from "./vector-stores/vector-stores.mjs"; +import { Chat } from "./chat/chat.mjs"; +export class Beta extends APIResource { + constructor() { + super(...arguments); + this.vectorStores = new VectorStoresAPI.VectorStores(this._client); + this.chat = new ChatAPI.Chat(this._client); + this.assistants = new AssistantsAPI.Assistants(this._client); + this.threads = new ThreadsAPI.Threads(this._client); + } +} +Beta.VectorStores = VectorStores; +Beta.VectorStoresPage = VectorStoresPage; +Beta.Assistants = Assistants; +Beta.AssistantsPage = AssistantsPage; +Beta.Threads = Threads; +//# sourceMappingURL=beta.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/beta.mjs.map b/node_modules/openai/resources/beta/beta.mjs.map new file mode 100644 index 0000000..b7f000f --- /dev/null +++ b/node_modules/openai/resources/beta/beta.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"beta.mjs","sourceRoot":"","sources":["../../src/resources/beta/beta.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,KAAK,aAAa;OAClB,KAAK,OAAO;OACZ,EAQL,UAAU,EACV,cAAc,GAQf;OACM,KAAK,UAAU;OACf,EAcL,OAAO,GACR;OACM,KAAK,eAAe;OACpB,EAaL,YAAY,EACZ,gBAAgB,GACjB;OACM,EAAE,IAAI,EAAE;AAEf,MAAM,OAAO,IAAK,SAAQ,WAAW;IAArC;;QACE,iBAAY,GAAiC,IAAI,eAAe,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAC5F,SAAI,GAAiB,IAAI,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACpD,eAAU,GAA6B,IAAI,aAAa,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAClF,YAAO,GAAuB,IAAI,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACrE,CAAC;CAAA;AAED,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;AACjC,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;AACzC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC;AACrC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/chat.d.ts b/node_modules/openai/resources/beta/chat/chat.d.ts new file mode 100644 index 0000000..e0bfdfe --- /dev/null +++ b/node_modules/openai/resources/beta/chat/chat.d.ts @@ -0,0 +1,9 @@ +import { APIResource } from "../../../resource.js"; +import * as CompletionsAPI from "./completions.js"; +export declare class Chat extends APIResource { + completions: CompletionsAPI.Completions; +} +export declare namespace Chat { + export import Completions = CompletionsAPI.Completions; +} +//# sourceMappingURL=chat.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/chat.d.ts.map b/node_modules/openai/resources/beta/chat/chat.d.ts.map new file mode 100644 index 0000000..d2bc1ae --- /dev/null +++ b/node_modules/openai/resources/beta/chat/chat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"chat.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/chat/chat.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAChD,OAAO,KAAK,cAAc,MAAM,eAAe,CAAC;AAEhD,qBAAa,IAAK,SAAQ,WAAW;IACnC,WAAW,EAAE,cAAc,CAAC,WAAW,CAAgD;CACxF;AAED,yBAAiB,IAAI,CAAC;IACpB,MAAM,QAAQ,WAAW,GAAG,cAAc,CAAC,WAAW,CAAC;CACxD"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/chat.js b/node_modules/openai/resources/beta/chat/chat.js new file mode 100644 index 0000000..f163c2a --- /dev/null +++ b/node_modules/openai/resources/beta/chat/chat.js @@ -0,0 +1,40 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Chat = void 0; +const resource_1 = require("../../../resource.js"); +const CompletionsAPI = __importStar(require("./completions.js")); +class Chat extends resource_1.APIResource { + constructor() { + super(...arguments); + this.completions = new CompletionsAPI.Completions(this._client); + } +} +exports.Chat = Chat; +(function (Chat) { + Chat.Completions = CompletionsAPI.Completions; +})(Chat = exports.Chat || (exports.Chat = {})); +//# sourceMappingURL=chat.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/chat.js.map b/node_modules/openai/resources/beta/chat/chat.js.map new file mode 100644 index 0000000..263039e --- /dev/null +++ b/node_modules/openai/resources/beta/chat/chat.js.map @@ -0,0 +1 @@ +{"version":3,"file":"chat.js","sourceRoot":"","sources":["../../../src/resources/beta/chat/chat.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,mDAAgD;AAChD,iEAAgD;AAEhD,MAAa,IAAK,SAAQ,sBAAW;IAArC;;QACE,gBAAW,GAA+B,IAAI,cAAc,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACzF,CAAC;CAAA;AAFD,oBAEC;AAED,WAAiB,IAAI;IACL,gBAAW,GAAG,cAAc,CAAC,WAAW,CAAC;AACzD,CAAC,EAFgB,IAAI,GAAJ,YAAI,KAAJ,YAAI,QAEpB"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/chat.mjs b/node_modules/openai/resources/beta/chat/chat.mjs new file mode 100644 index 0000000..8394dd6 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/chat.mjs @@ -0,0 +1,13 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../resource.mjs"; +import * as CompletionsAPI from "./completions.mjs"; +export class Chat extends APIResource { + constructor() { + super(...arguments); + this.completions = new CompletionsAPI.Completions(this._client); + } +} +(function (Chat) { + Chat.Completions = CompletionsAPI.Completions; +})(Chat || (Chat = {})); +//# sourceMappingURL=chat.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/chat.mjs.map b/node_modules/openai/resources/beta/chat/chat.mjs.map new file mode 100644 index 0000000..760ec6c --- /dev/null +++ b/node_modules/openai/resources/beta/chat/chat.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"chat.mjs","sourceRoot":"","sources":["../../../src/resources/beta/chat/chat.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,KAAK,cAAc;AAE1B,MAAM,OAAO,IAAK,SAAQ,WAAW;IAArC;;QACE,gBAAW,GAA+B,IAAI,cAAc,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACzF,CAAC;CAAA;AAED,WAAiB,IAAI;IACL,gBAAW,GAAG,cAAc,CAAC,WAAW,CAAC;AACzD,CAAC,EAFgB,IAAI,KAAJ,IAAI,QAEpB"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/completions.d.ts b/node_modules/openai/resources/beta/chat/completions.d.ts new file mode 100644 index 0000000..6a57f62 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/completions.d.ts @@ -0,0 +1,58 @@ +import * as Core from "../../../core.js"; +import { APIResource } from "../../../resource.js"; +import { ChatCompletionRunner, ChatCompletionFunctionRunnerParams } from "../../../lib/ChatCompletionRunner.js"; +import { ChatCompletionStreamingRunner, ChatCompletionStreamingFunctionRunnerParams } from "../../../lib/ChatCompletionStreamingRunner.js"; +import { BaseFunctionsArgs } from "../../../lib/RunnableFunction.js"; +import { RunnerOptions } from "../../../lib/AbstractChatCompletionRunner.js"; +import { ChatCompletionToolRunnerParams } from "../../../lib/ChatCompletionRunner.js"; +import { ChatCompletionStreamingToolRunnerParams } from "../../../lib/ChatCompletionStreamingRunner.js"; +import { ChatCompletionStream, type ChatCompletionStreamParams } from "../../../lib/ChatCompletionStream.js"; +import { ChatCompletion, ChatCompletionCreateParamsNonStreaming, ChatCompletionMessage, ChatCompletionMessageToolCall } from "../../chat/completions.js"; +import { ExtractParsedContentFromParams } from "../../../lib/parser.js"; +export { ChatCompletionStreamingRunner, type ChatCompletionStreamingFunctionRunnerParams, } from "../../../lib/ChatCompletionStreamingRunner.js"; +export { type RunnableFunction, type RunnableFunctions, type RunnableFunctionWithParse, type RunnableFunctionWithoutParse, ParsingFunction, ParsingToolFunction, } from "../../../lib/RunnableFunction.js"; +export { type ChatCompletionToolRunnerParams } from "../../../lib/ChatCompletionRunner.js"; +export { type ChatCompletionStreamingToolRunnerParams } from "../../../lib/ChatCompletionStreamingRunner.js"; +export { ChatCompletionStream, type ChatCompletionStreamParams } from "../../../lib/ChatCompletionStream.js"; +export { ChatCompletionRunner, type ChatCompletionFunctionRunnerParams, } from "../../../lib/ChatCompletionRunner.js"; +export interface ParsedFunction extends ChatCompletionMessageToolCall.Function { + parsed_arguments?: unknown; +} +export interface ParsedFunctionToolCall extends ChatCompletionMessageToolCall { + function: ParsedFunction; +} +export interface ParsedChatCompletionMessage extends ChatCompletionMessage { + parsed: ParsedT | null; + tool_calls: Array; +} +export interface ParsedChoice extends ChatCompletion.Choice { + message: ParsedChatCompletionMessage; +} +export interface ParsedChatCompletion extends ChatCompletion { + choices: Array>; +} +export type ChatCompletionParseParams = ChatCompletionCreateParamsNonStreaming; +export declare class Completions extends APIResource { + parse>(body: Params, options?: Core.RequestOptions): Core.APIPromise>; + /** + * @deprecated - use `runTools` instead. + */ + runFunctions(body: ChatCompletionFunctionRunnerParams, options?: Core.RequestOptions): ChatCompletionRunner; + runFunctions(body: ChatCompletionStreamingFunctionRunnerParams, options?: Core.RequestOptions): ChatCompletionStreamingRunner; + /** + * A convenience helper for using tool calls with the /chat/completions endpoint + * which automatically calls the JavaScript functions you provide and sends their + * results back to the /chat/completions endpoint, looping as long as the model + * requests function calls. + * + * For more details and examples, see + * [the docs](https://github.com/openai/openai-node#automated-function-calls) + */ + runTools, ParsedT = ExtractParsedContentFromParams>(body: Params, options?: RunnerOptions): ChatCompletionRunner; + runTools, ParsedT = ExtractParsedContentFromParams>(body: Params, options?: RunnerOptions): ChatCompletionStreamingRunner; + /** + * Creates a chat completion stream + */ + stream>(body: Params, options?: Core.RequestOptions): ChatCompletionStream; +} +//# sourceMappingURL=completions.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/completions.d.ts.map b/node_modules/openai/resources/beta/chat/completions.d.ts.map new file mode 100644 index 0000000..7ac11fa --- /dev/null +++ b/node_modules/openai/resources/beta/chat/completions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"completions.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/chat/completions.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,IAAI,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAChD,OAAO,EAAE,oBAAoB,EAAE,kCAAkC,EAAE,MAAM,mCAAmC,CAAC;AAC7G,OAAO,EACL,6BAA6B,EAC7B,2CAA2C,EAC5C,MAAM,4CAA4C,CAAC;AACpD,OAAO,EAAE,iBAAiB,EAAE,MAAM,+BAA+B,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,2CAA2C,CAAC;AAC1E,OAAO,EAAE,8BAA8B,EAAE,MAAM,mCAAmC,CAAC;AACnF,OAAO,EAAE,uCAAuC,EAAE,MAAM,4CAA4C,CAAC;AACrG,OAAO,EAAE,oBAAoB,EAAE,KAAK,0BAA0B,EAAE,MAAM,mCAAmC,CAAC;AAC1G,OAAO,EACL,cAAc,EACd,sCAAsC,EACtC,qBAAqB,EACrB,6BAA6B,EAC9B,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,8BAA8B,EAA2C,MAAM,qBAAqB,CAAC;AAE9G,OAAO,EACL,6BAA6B,EAC7B,KAAK,2CAA2C,GACjD,MAAM,4CAA4C,CAAC;AACpD,OAAO,EACL,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACtB,KAAK,yBAAyB,EAC9B,KAAK,4BAA4B,EACjC,eAAe,EACf,mBAAmB,GACpB,MAAM,+BAA+B,CAAC;AACvC,OAAO,EAAE,KAAK,8BAA8B,EAAE,MAAM,mCAAmC,CAAC;AACxF,OAAO,EAAE,KAAK,uCAAuC,EAAE,MAAM,4CAA4C,CAAC;AAC1G,OAAO,EAAE,oBAAoB,EAAE,KAAK,0BAA0B,EAAE,MAAM,mCAAmC,CAAC;AAC1G,OAAO,EACL,oBAAoB,EACpB,KAAK,kCAAkC,GACxC,MAAM,mCAAmC,CAAC;AAE3C,MAAM,WAAW,cAAe,SAAQ,6BAA6B,CAAC,QAAQ;IAC5E,gBAAgB,CAAC,EAAE,OAAO,CAAC;CAC5B;AAED,MAAM,WAAW,sBAAuB,SAAQ,6BAA6B;IAC3E,QAAQ,EAAE,cAAc,CAAC;CAC1B;AAED,MAAM,WAAW,2BAA2B,CAAC,OAAO,CAAE,SAAQ,qBAAqB;IACjF,MAAM,EAAE,OAAO,GAAG,IAAI,CAAC;IACvB,UAAU,EAAE,KAAK,CAAC,sBAAsB,CAAC,CAAC;CAC3C;AAED,MAAM,WAAW,YAAY,CAAC,OAAO,CAAE,SAAQ,cAAc,CAAC,MAAM;IAClE,OAAO,EAAE,2BAA2B,CAAC,OAAO,CAAC,CAAC;CAC/C;AAED,MAAM,WAAW,oBAAoB,CAAC,OAAO,CAAE,SAAQ,cAAc;IACnE,OAAO,EAAE,KAAK,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC,CAAC;CACvC;AAED,MAAM,MAAM,yBAAyB,GAAG,sCAAsC,CAAC;AAE/E,qBAAa,WAAY,SAAQ,WAAW;IAC1C,KAAK,CAAC,MAAM,SAAS,yBAAyB,EAAE,OAAO,GAAG,8BAA8B,CAAC,MAAM,CAAC,EAC9F,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,oBAAoB,CAAC,OAAO,CAAC,CAAC;IAcjD;;OAEG;IACH,YAAY,CAAC,aAAa,SAAS,iBAAiB,EAClD,IAAI,EAAE,kCAAkC,CAAC,aAAa,CAAC,EACvD,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,oBAAoB,CAAC,IAAI,CAAC;IAC7B,YAAY,CAAC,aAAa,SAAS,iBAAiB,EAClD,IAAI,EAAE,2CAA2C,CAAC,aAAa,CAAC,EAChE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,6BAA6B,CAAC,IAAI,CAAC;IAqBtC;;;;;;;;OAQG;IACH,QAAQ,CACN,MAAM,SAAS,8BAA8B,CAAC,GAAG,CAAC,EAClD,OAAO,GAAG,8BAA8B,CAAC,MAAM,CAAC,EAChD,IAAI,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAAC,OAAO,CAAC;IAEvE,QAAQ,CACN,MAAM,SAAS,uCAAuC,CAAC,GAAG,CAAC,EAC3D,OAAO,GAAG,8BAA8B,CAAC,MAAM,CAAC,EAChD,IAAI,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,aAAa,GAAG,6BAA6B,CAAC,OAAO,CAAC;IAoBhF;;OAEG;IACH,MAAM,CAAC,MAAM,SAAS,0BAA0B,EAAE,OAAO,GAAG,8BAA8B,CAAC,MAAM,CAAC,EAChG,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,oBAAoB,CAAC,OAAO,CAAC;CAGjC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/completions.js b/node_modules/openai/resources/beta/chat/completions.js new file mode 100644 index 0000000..e2a5bb8 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/completions.js @@ -0,0 +1,52 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Completions = exports.ChatCompletionRunner = exports.ChatCompletionStream = exports.ParsingToolFunction = exports.ParsingFunction = exports.ChatCompletionStreamingRunner = void 0; +const resource_1 = require("../../../resource.js"); +const ChatCompletionRunner_1 = require("../../../lib/ChatCompletionRunner.js"); +const ChatCompletionStreamingRunner_1 = require("../../../lib/ChatCompletionStreamingRunner.js"); +const ChatCompletionStream_1 = require("../../../lib/ChatCompletionStream.js"); +const parser_1 = require("../../../lib/parser.js"); +var ChatCompletionStreamingRunner_2 = require("../../../lib/ChatCompletionStreamingRunner.js"); +Object.defineProperty(exports, "ChatCompletionStreamingRunner", { enumerable: true, get: function () { return ChatCompletionStreamingRunner_2.ChatCompletionStreamingRunner; } }); +var RunnableFunction_1 = require("../../../lib/RunnableFunction.js"); +Object.defineProperty(exports, "ParsingFunction", { enumerable: true, get: function () { return RunnableFunction_1.ParsingFunction; } }); +Object.defineProperty(exports, "ParsingToolFunction", { enumerable: true, get: function () { return RunnableFunction_1.ParsingToolFunction; } }); +var ChatCompletionStream_2 = require("../../../lib/ChatCompletionStream.js"); +Object.defineProperty(exports, "ChatCompletionStream", { enumerable: true, get: function () { return ChatCompletionStream_2.ChatCompletionStream; } }); +var ChatCompletionRunner_2 = require("../../../lib/ChatCompletionRunner.js"); +Object.defineProperty(exports, "ChatCompletionRunner", { enumerable: true, get: function () { return ChatCompletionRunner_2.ChatCompletionRunner; } }); +class Completions extends resource_1.APIResource { + parse(body, options) { + (0, parser_1.validateInputTools)(body.tools); + return this._client.chat.completions + .create(body, { + ...options, + headers: { + ...options?.headers, + 'X-Stainless-Helper-Method': 'beta.chat.completions.parse', + }, + }) + ._thenUnwrap((completion) => (0, parser_1.parseChatCompletion)(completion, body)); + } + runFunctions(body, options) { + if (body.stream) { + return ChatCompletionStreamingRunner_1.ChatCompletionStreamingRunner.runFunctions(this._client, body, options); + } + return ChatCompletionRunner_1.ChatCompletionRunner.runFunctions(this._client, body, options); + } + runTools(body, options) { + if (body.stream) { + return ChatCompletionStreamingRunner_1.ChatCompletionStreamingRunner.runTools(this._client, body, options); + } + return ChatCompletionRunner_1.ChatCompletionRunner.runTools(this._client, body, options); + } + /** + * Creates a chat completion stream + */ + stream(body, options) { + return ChatCompletionStream_1.ChatCompletionStream.createChatCompletion(this._client, body, options); + } +} +exports.Completions = Completions; +//# sourceMappingURL=completions.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/completions.js.map b/node_modules/openai/resources/beta/chat/completions.js.map new file mode 100644 index 0000000..9f03e10 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/completions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"completions.js","sourceRoot":"","sources":["../../../src/resources/beta/chat/completions.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAGtF,mDAAgD;AAChD,+EAA6G;AAC7G,iGAGoD;AAKpD,+EAA0G;AAO1G,mDAA8G;AAE9G,+FAGoD;AAFlD,8IAAA,6BAA6B,OAAA;AAG/B,qEAOuC;AAFrC,mHAAA,eAAe,OAAA;AACf,uHAAA,mBAAmB,OAAA;AAIrB,6EAA0G;AAAjG,4HAAA,oBAAoB,OAAA;AAC7B,6EAG2C;AAFzC,4HAAA,oBAAoB,OAAA;AA2BtB,MAAa,WAAY,SAAQ,sBAAW;IAC1C,KAAK,CACH,IAAY,EACZ,OAA6B;QAE7B,IAAA,2BAAkB,EAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAE/B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW;aACjC,MAAM,CAAC,IAAI,EAAE;YACZ,GAAG,OAAO;YACV,OAAO,EAAE;gBACP,GAAG,OAAO,EAAE,OAAO;gBACnB,2BAA2B,EAAE,6BAA6B;aAC3D;SACF,CAAC;aACD,WAAW,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC,IAAA,4BAAmB,EAAC,UAAU,EAAE,IAAI,CAAC,CAAC,CAAC;IACxE,CAAC;IAaD,YAAY,CACV,IAE8D,EAC9D,OAA6B;QAE7B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,OAAO,6DAA6B,CAAC,YAAY,CAC/C,IAAI,CAAC,OAAO,EACZ,IAAkE,EAClE,OAAO,CACR,CAAC;SACH;QACD,OAAO,2CAAoB,CAAC,YAAY,CACtC,IAAI,CAAC,OAAO,EACZ,IAAyD,EACzD,OAAO,CACR,CAAC;IACJ,CAAC;IAqBD,QAAQ,CAIN,IAAY,EACZ,OAAuB;QAEvB,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,OAAO,6DAA6B,CAAC,QAAQ,CAC3C,IAAI,CAAC,OAAO,EACZ,IAAoD,EACpD,OAAO,CACR,CAAC;SACH;QAED,OAAO,2CAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,IAA2C,EAAE,OAAO,CAAC,CAAC;IAC3G,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,IAAY,EACZ,OAA6B;QAE7B,OAAO,2CAAoB,CAAC,oBAAoB,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IAChF,CAAC;CACF;AA/FD,kCA+FC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/completions.mjs b/node_modules/openai/resources/beta/chat/completions.mjs new file mode 100644 index 0000000..ec80e2f --- /dev/null +++ b/node_modules/openai/resources/beta/chat/completions.mjs @@ -0,0 +1,43 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../resource.mjs"; +import { ChatCompletionRunner } from "../../../lib/ChatCompletionRunner.mjs"; +import { ChatCompletionStreamingRunner, } from "../../../lib/ChatCompletionStreamingRunner.mjs"; +import { ChatCompletionStream } from "../../../lib/ChatCompletionStream.mjs"; +import { parseChatCompletion, validateInputTools } from "../../../lib/parser.mjs"; +export { ChatCompletionStreamingRunner, } from "../../../lib/ChatCompletionStreamingRunner.mjs"; +export { ParsingFunction, ParsingToolFunction, } from "../../../lib/RunnableFunction.mjs"; +export { ChatCompletionStream } from "../../../lib/ChatCompletionStream.mjs"; +export { ChatCompletionRunner, } from "../../../lib/ChatCompletionRunner.mjs"; +export class Completions extends APIResource { + parse(body, options) { + validateInputTools(body.tools); + return this._client.chat.completions + .create(body, { + ...options, + headers: { + ...options?.headers, + 'X-Stainless-Helper-Method': 'beta.chat.completions.parse', + }, + }) + ._thenUnwrap((completion) => parseChatCompletion(completion, body)); + } + runFunctions(body, options) { + if (body.stream) { + return ChatCompletionStreamingRunner.runFunctions(this._client, body, options); + } + return ChatCompletionRunner.runFunctions(this._client, body, options); + } + runTools(body, options) { + if (body.stream) { + return ChatCompletionStreamingRunner.runTools(this._client, body, options); + } + return ChatCompletionRunner.runTools(this._client, body, options); + } + /** + * Creates a chat completion stream + */ + stream(body, options) { + return ChatCompletionStream.createChatCompletion(this._client, body, options); + } +} +//# sourceMappingURL=completions.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/completions.mjs.map b/node_modules/openai/resources/beta/chat/completions.mjs.map new file mode 100644 index 0000000..cd05327 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/completions.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"completions.mjs","sourceRoot":"","sources":["../../../src/resources/beta/chat/completions.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAG/E,EAAE,WAAW,EAAE;OACf,EAAE,oBAAoB,EAAsC;OAC5D,EACL,6BAA6B,GAE9B;OAKM,EAAE,oBAAoB,EAAmC;OAOzD,EAAkC,mBAAmB,EAAE,kBAAkB,EAAE;OAE3E,EACL,6BAA6B,GAE9B;OACM,EAKL,eAAe,EACf,mBAAmB,GACpB;OAGM,EAAE,oBAAoB,EAAmC;OACzD,EACL,oBAAoB,GAErB;AAyBD,MAAM,OAAO,WAAY,SAAQ,WAAW;IAC1C,KAAK,CACH,IAAY,EACZ,OAA6B;QAE7B,kBAAkB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAE/B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW;aACjC,MAAM,CAAC,IAAI,EAAE;YACZ,GAAG,OAAO;YACV,OAAO,EAAE;gBACP,GAAG,OAAO,EAAE,OAAO;gBACnB,2BAA2B,EAAE,6BAA6B;aAC3D;SACF,CAAC;aACD,WAAW,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC,mBAAmB,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC,CAAC;IACxE,CAAC;IAaD,YAAY,CACV,IAE8D,EAC9D,OAA6B;QAE7B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,OAAO,6BAA6B,CAAC,YAAY,CAC/C,IAAI,CAAC,OAAO,EACZ,IAAkE,EAClE,OAAO,CACR,CAAC;SACH;QACD,OAAO,oBAAoB,CAAC,YAAY,CACtC,IAAI,CAAC,OAAO,EACZ,IAAyD,EACzD,OAAO,CACR,CAAC;IACJ,CAAC;IAqBD,QAAQ,CAIN,IAAY,EACZ,OAAuB;QAEvB,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,OAAO,6BAA6B,CAAC,QAAQ,CAC3C,IAAI,CAAC,OAAO,EACZ,IAAoD,EACpD,OAAO,CACR,CAAC;SACH;QAED,OAAO,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,IAA2C,EAAE,OAAO,CAAC,CAAC;IAC3G,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,IAAY,EACZ,OAA6B;QAE7B,OAAO,oBAAoB,CAAC,oBAAoB,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IAChF,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/index.d.ts b/node_modules/openai/resources/beta/chat/index.d.ts new file mode 100644 index 0000000..3e75426 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/index.d.ts @@ -0,0 +1,3 @@ +export { Chat } from "./chat.js"; +export { Completions } from "./completions.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/index.d.ts.map b/node_modules/openai/resources/beta/chat/index.d.ts.map new file mode 100644 index 0000000..fd2699a --- /dev/null +++ b/node_modules/openai/resources/beta/chat/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/chat/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,IAAI,EAAE,MAAM,QAAQ,CAAC;AAC9B,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/index.js b/node_modules/openai/resources/beta/chat/index.js new file mode 100644 index 0000000..53c3d16 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/index.js @@ -0,0 +1,9 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Completions = exports.Chat = void 0; +var chat_1 = require("./chat.js"); +Object.defineProperty(exports, "Chat", { enumerable: true, get: function () { return chat_1.Chat; } }); +var completions_1 = require("./completions.js"); +Object.defineProperty(exports, "Completions", { enumerable: true, get: function () { return completions_1.Completions; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/index.js.map b/node_modules/openai/resources/beta/chat/index.js.map new file mode 100644 index 0000000..ea2dec0 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/resources/beta/chat/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,kCAA8B;AAArB,4FAAA,IAAI,OAAA;AACb,gDAA4C;AAAnC,0GAAA,WAAW,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/index.mjs b/node_modules/openai/resources/beta/chat/index.mjs new file mode 100644 index 0000000..b91c862 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/index.mjs @@ -0,0 +1,4 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { Chat } from "./chat.mjs"; +export { Completions } from "./completions.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/chat/index.mjs.map b/node_modules/openai/resources/beta/chat/index.mjs.map new file mode 100644 index 0000000..8357372 --- /dev/null +++ b/node_modules/openai/resources/beta/chat/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../../src/resources/beta/chat/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,IAAI,EAAE;OACR,EAAE,WAAW,EAAE"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/index.d.ts b/node_modules/openai/resources/beta/index.d.ts new file mode 100644 index 0000000..31cf05c --- /dev/null +++ b/node_modules/openai/resources/beta/index.d.ts @@ -0,0 +1,6 @@ +export { AssistantsPage, Assistants, type Assistant, type AssistantDeleted, type AssistantStreamEvent, type AssistantTool, type CodeInterpreterTool, type FileSearchTool, type FunctionTool, type MessageStreamEvent, type RunStepStreamEvent, type RunStreamEvent, type ThreadStreamEvent, type AssistantCreateParams, type AssistantUpdateParams, type AssistantListParams, } from "./assistants.js"; +export { Beta } from "./beta.js"; +export { Chat } from "./chat/index.js"; +export { Threads, type AssistantResponseFormatOption, type AssistantToolChoice, type AssistantToolChoiceFunction, type AssistantToolChoiceOption, type Thread, type ThreadDeleted, type ThreadCreateParams, type ThreadUpdateParams, type ThreadCreateAndRunParams, type ThreadCreateAndRunParamsNonStreaming, type ThreadCreateAndRunParamsStreaming, type ThreadCreateAndRunPollParams, type ThreadCreateAndRunStreamParams, } from "./threads/index.js"; +export { VectorStoresPage, VectorStores, type AutoFileChunkingStrategyParam, type FileChunkingStrategy, type FileChunkingStrategyParam, type OtherFileChunkingStrategyObject, type StaticFileChunkingStrategy, type StaticFileChunkingStrategyObject, type StaticFileChunkingStrategyParam, type VectorStore, type VectorStoreDeleted, type VectorStoreCreateParams, type VectorStoreUpdateParams, type VectorStoreListParams, } from "./vector-stores/index.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/index.d.ts.map b/node_modules/openai/resources/beta/index.d.ts.map new file mode 100644 index 0000000..72082f7 --- /dev/null +++ b/node_modules/openai/resources/beta/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/resources/beta/index.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,cAAc,EACd,UAAU,EACV,KAAK,SAAS,EACd,KAAK,gBAAgB,EACrB,KAAK,oBAAoB,EACzB,KAAK,aAAa,EAClB,KAAK,mBAAmB,EACxB,KAAK,cAAc,EACnB,KAAK,YAAY,EACjB,KAAK,kBAAkB,EACvB,KAAK,kBAAkB,EACvB,KAAK,cAAc,EACnB,KAAK,iBAAiB,EACtB,KAAK,qBAAqB,EAC1B,KAAK,qBAAqB,EAC1B,KAAK,mBAAmB,GACzB,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,IAAI,EAAE,MAAM,QAAQ,CAAC;AAC9B,OAAO,EAAE,IAAI,EAAE,MAAM,cAAc,CAAC;AACpC,OAAO,EACL,OAAO,EACP,KAAK,6BAA6B,EAClC,KAAK,mBAAmB,EACxB,KAAK,2BAA2B,EAChC,KAAK,yBAAyB,EAC9B,KAAK,MAAM,EACX,KAAK,aAAa,EAClB,KAAK,kBAAkB,EACvB,KAAK,kBAAkB,EACvB,KAAK,wBAAwB,EAC7B,KAAK,oCAAoC,EACzC,KAAK,iCAAiC,EACtC,KAAK,4BAA4B,EACjC,KAAK,8BAA8B,GACpC,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,gBAAgB,EAChB,YAAY,EACZ,KAAK,6BAA6B,EAClC,KAAK,oBAAoB,EACzB,KAAK,yBAAyB,EAC9B,KAAK,+BAA+B,EACpC,KAAK,0BAA0B,EAC/B,KAAK,gCAAgC,EACrC,KAAK,+BAA+B,EACpC,KAAK,WAAW,EAChB,KAAK,kBAAkB,EACvB,KAAK,uBAAuB,EAC5B,KAAK,uBAAuB,EAC5B,KAAK,qBAAqB,GAC3B,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/index.js b/node_modules/openai/resources/beta/index.js new file mode 100644 index 0000000..316bc8f --- /dev/null +++ b/node_modules/openai/resources/beta/index.js @@ -0,0 +1,17 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VectorStores = exports.VectorStoresPage = exports.Threads = exports.Chat = exports.Beta = exports.Assistants = exports.AssistantsPage = void 0; +var assistants_1 = require("./assistants.js"); +Object.defineProperty(exports, "AssistantsPage", { enumerable: true, get: function () { return assistants_1.AssistantsPage; } }); +Object.defineProperty(exports, "Assistants", { enumerable: true, get: function () { return assistants_1.Assistants; } }); +var beta_1 = require("./beta.js"); +Object.defineProperty(exports, "Beta", { enumerable: true, get: function () { return beta_1.Beta; } }); +var index_1 = require("./chat/index.js"); +Object.defineProperty(exports, "Chat", { enumerable: true, get: function () { return index_1.Chat; } }); +var index_2 = require("./threads/index.js"); +Object.defineProperty(exports, "Threads", { enumerable: true, get: function () { return index_2.Threads; } }); +var index_3 = require("./vector-stores/index.js"); +Object.defineProperty(exports, "VectorStoresPage", { enumerable: true, get: function () { return index_3.VectorStoresPage; } }); +Object.defineProperty(exports, "VectorStores", { enumerable: true, get: function () { return index_3.VectorStores; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/index.js.map b/node_modules/openai/resources/beta/index.js.map new file mode 100644 index 0000000..d7f9ed4 --- /dev/null +++ b/node_modules/openai/resources/beta/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/resources/beta/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,8CAiBsB;AAhBpB,4GAAA,cAAc,OAAA;AACd,wGAAA,UAAU,OAAA;AAgBZ,kCAA8B;AAArB,4FAAA,IAAI,OAAA;AACb,yCAAoC;AAA3B,6FAAA,IAAI,OAAA;AACb,4CAeyB;AAdvB,gGAAA,OAAO,OAAA;AAeT,kDAe+B;AAd7B,yGAAA,gBAAgB,OAAA;AAChB,qGAAA,YAAY,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/index.mjs b/node_modules/openai/resources/beta/index.mjs new file mode 100644 index 0000000..0f564fe --- /dev/null +++ b/node_modules/openai/resources/beta/index.mjs @@ -0,0 +1,7 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { AssistantsPage, Assistants, } from "./assistants.mjs"; +export { Beta } from "./beta.mjs"; +export { Chat } from "./chat/index.mjs"; +export { Threads, } from "./threads/index.mjs"; +export { VectorStoresPage, VectorStores, } from "./vector-stores/index.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/index.mjs.map b/node_modules/openai/resources/beta/index.mjs.map new file mode 100644 index 0000000..9ab9efc --- /dev/null +++ b/node_modules/openai/resources/beta/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../src/resources/beta/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EACL,cAAc,EACd,UAAU,GAeX;OACM,EAAE,IAAI,EAAE;OACR,EAAE,IAAI,EAAE;OACR,EACL,OAAO,GAcR;OACM,EACL,gBAAgB,EAChB,YAAY,GAab"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/index.d.ts b/node_modules/openai/resources/beta/threads/index.d.ts new file mode 100644 index 0000000..51114dc --- /dev/null +++ b/node_modules/openai/resources/beta/threads/index.d.ts @@ -0,0 +1,4 @@ +export { MessagesPage, Messages, type Annotation, type AnnotationDelta, type FileCitationAnnotation, type FileCitationDeltaAnnotation, type FilePathAnnotation, type FilePathDeltaAnnotation, type ImageFile, type ImageFileContentBlock, type ImageFileDelta, type ImageFileDeltaBlock, type ImageURL, type ImageURLContentBlock, type ImageURLDelta, type ImageURLDeltaBlock, type Message, type MessageContent, type MessageContentDelta, type MessageContentPartParam, type MessageDeleted, type MessageDelta, type MessageDeltaEvent, type RefusalContentBlock, type RefusalDeltaBlock, type Text, type TextContentBlock, type TextContentBlockParam, type TextDelta, type TextDeltaBlock, type MessageCreateParams, type MessageUpdateParams, type MessageListParams, } from "./messages.js"; +export { RunsPage, Runs, type RequiredActionFunctionToolCall, type Run, type RunStatus, type RunCreateParams, type RunCreateParamsNonStreaming, type RunCreateParamsStreaming, type RunUpdateParams, type RunListParams, type RunSubmitToolOutputsParams, type RunSubmitToolOutputsParamsNonStreaming, type RunSubmitToolOutputsParamsStreaming, type RunCreateAndPollParams, type RunCreateAndStreamParams, type RunStreamParams, type RunSubmitToolOutputsAndPollParams, type RunSubmitToolOutputsStreamParams, } from "./runs/index.js"; +export { Threads, type AssistantResponseFormatOption, type AssistantToolChoice, type AssistantToolChoiceFunction, type AssistantToolChoiceOption, type Thread, type ThreadDeleted, type ThreadCreateParams, type ThreadUpdateParams, type ThreadCreateAndRunParams, type ThreadCreateAndRunParamsNonStreaming, type ThreadCreateAndRunParamsStreaming, type ThreadCreateAndRunPollParams, type ThreadCreateAndRunStreamParams, } from "./threads.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/index.d.ts.map b/node_modules/openai/resources/beta/threads/index.d.ts.map new file mode 100644 index 0000000..e6e93a5 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/threads/index.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,YAAY,EACZ,QAAQ,EACR,KAAK,UAAU,EACf,KAAK,eAAe,EACpB,KAAK,sBAAsB,EAC3B,KAAK,2BAA2B,EAChC,KAAK,kBAAkB,EACvB,KAAK,uBAAuB,EAC5B,KAAK,SAAS,EACd,KAAK,qBAAqB,EAC1B,KAAK,cAAc,EACnB,KAAK,mBAAmB,EACxB,KAAK,QAAQ,EACb,KAAK,oBAAoB,EACzB,KAAK,aAAa,EAClB,KAAK,kBAAkB,EACvB,KAAK,OAAO,EACZ,KAAK,cAAc,EACnB,KAAK,mBAAmB,EACxB,KAAK,uBAAuB,EAC5B,KAAK,cAAc,EACnB,KAAK,YAAY,EACjB,KAAK,iBAAiB,EACtB,KAAK,mBAAmB,EACxB,KAAK,iBAAiB,EACtB,KAAK,IAAI,EACT,KAAK,gBAAgB,EACrB,KAAK,qBAAqB,EAC1B,KAAK,SAAS,EACd,KAAK,cAAc,EACnB,KAAK,mBAAmB,EACxB,KAAK,mBAAmB,EACxB,KAAK,iBAAiB,GACvB,MAAM,YAAY,CAAC;AACpB,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,KAAK,8BAA8B,EACnC,KAAK,GAAG,EACR,KAAK,SAAS,EACd,KAAK,eAAe,EACpB,KAAK,2BAA2B,EAChC,KAAK,wBAAwB,EAC7B,KAAK,eAAe,EACpB,KAAK,aAAa,EAClB,KAAK,0BAA0B,EAC/B,KAAK,sCAAsC,EAC3C,KAAK,mCAAmC,EACxC,KAAK,sBAAsB,EAC3B,KAAK,wBAAwB,EAC7B,KAAK,eAAe,EACpB,KAAK,iCAAiC,EACtC,KAAK,gCAAgC,GACtC,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,OAAO,EACP,KAAK,6BAA6B,EAClC,KAAK,mBAAmB,EACxB,KAAK,2BAA2B,EAChC,KAAK,yBAAyB,EAC9B,KAAK,MAAM,EACX,KAAK,aAAa,EAClB,KAAK,kBAAkB,EACvB,KAAK,kBAAkB,EACvB,KAAK,wBAAwB,EAC7B,KAAK,oCAAoC,EACzC,KAAK,iCAAiC,EACtC,KAAK,4BAA4B,EACjC,KAAK,8BAA8B,GACpC,MAAM,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/index.js b/node_modules/openai/resources/beta/threads/index.js new file mode 100644 index 0000000..e1c2df7 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/index.js @@ -0,0 +1,13 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Threads = exports.Runs = exports.RunsPage = exports.Messages = exports.MessagesPage = void 0; +var messages_1 = require("./messages.js"); +Object.defineProperty(exports, "MessagesPage", { enumerable: true, get: function () { return messages_1.MessagesPage; } }); +Object.defineProperty(exports, "Messages", { enumerable: true, get: function () { return messages_1.Messages; } }); +var index_1 = require("./runs/index.js"); +Object.defineProperty(exports, "RunsPage", { enumerable: true, get: function () { return index_1.RunsPage; } }); +Object.defineProperty(exports, "Runs", { enumerable: true, get: function () { return index_1.Runs; } }); +var threads_1 = require("./threads.js"); +Object.defineProperty(exports, "Threads", { enumerable: true, get: function () { return threads_1.Threads; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/index.js.map b/node_modules/openai/resources/beta/threads/index.js.map new file mode 100644 index 0000000..ed998e9 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/resources/beta/threads/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,0CAkCoB;AAjClB,wGAAA,YAAY,OAAA;AACZ,oGAAA,QAAQ,OAAA;AAiCV,yCAmBsB;AAlBpB,iGAAA,QAAQ,OAAA;AACR,6FAAA,IAAI,OAAA;AAkBN,wCAemB;AAdjB,kGAAA,OAAO,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/index.mjs b/node_modules/openai/resources/beta/threads/index.mjs new file mode 100644 index 0000000..ce66f45 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/index.mjs @@ -0,0 +1,5 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { MessagesPage, Messages, } from "./messages.mjs"; +export { RunsPage, Runs, } from "./runs/index.mjs"; +export { Threads, } from "./threads.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/index.mjs.map b/node_modules/openai/resources/beta/threads/index.mjs.map new file mode 100644 index 0000000..fad00d4 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../../src/resources/beta/threads/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EACL,YAAY,EACZ,QAAQ,GAgCT;OACM,EACL,QAAQ,EACR,IAAI,GAiBL;OACM,EACL,OAAO,GAcR"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/messages.d.ts b/node_modules/openai/resources/beta/threads/messages.d.ts new file mode 100644 index 0000000..56f7a7f --- /dev/null +++ b/node_modules/openai/resources/beta/threads/messages.d.ts @@ -0,0 +1,558 @@ +import { APIResource } from "../../../resource.js"; +import * as Core from "../../../core.js"; +import * as AssistantsAPI from "../assistants.js"; +import { CursorPage, type CursorPageParams } from "../../../pagination.js"; +export declare class Messages extends APIResource { + /** + * Create a message. + */ + create(threadId: string, body: MessageCreateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Retrieve a message. + */ + retrieve(threadId: string, messageId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Modifies a message. + */ + update(threadId: string, messageId: string, body: MessageUpdateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns a list of messages for a given thread. + */ + list(threadId: string, query?: MessageListParams, options?: Core.RequestOptions): Core.PagePromise; + list(threadId: string, options?: Core.RequestOptions): Core.PagePromise; + /** + * Deletes a message. + */ + del(threadId: string, messageId: string, options?: Core.RequestOptions): Core.APIPromise; +} +export declare class MessagesPage extends CursorPage { +} +/** + * A citation within the message that points to a specific quote from a specific + * File associated with the assistant or the message. Generated when the assistant + * uses the "file_search" tool to search files. + */ +export type Annotation = FileCitationAnnotation | FilePathAnnotation; +/** + * A citation within the message that points to a specific quote from a specific + * File associated with the assistant or the message. Generated when the assistant + * uses the "file_search" tool to search files. + */ +export type AnnotationDelta = FileCitationDeltaAnnotation | FilePathDeltaAnnotation; +/** + * A citation within the message that points to a specific quote from a specific + * File associated with the assistant or the message. Generated when the assistant + * uses the "file_search" tool to search files. + */ +export interface FileCitationAnnotation { + end_index: number; + file_citation: FileCitationAnnotation.FileCitation; + start_index: number; + /** + * The text in the message content that needs to be replaced. + */ + text: string; + /** + * Always `file_citation`. + */ + type: 'file_citation'; +} +export declare namespace FileCitationAnnotation { + interface FileCitation { + /** + * The ID of the specific File the citation is from. + */ + file_id: string; + } +} +/** + * A citation within the message that points to a specific quote from a specific + * File associated with the assistant or the message. Generated when the assistant + * uses the "file_search" tool to search files. + */ +export interface FileCitationDeltaAnnotation { + /** + * The index of the annotation in the text content part. + */ + index: number; + /** + * Always `file_citation`. + */ + type: 'file_citation'; + end_index?: number; + file_citation?: FileCitationDeltaAnnotation.FileCitation; + start_index?: number; + /** + * The text in the message content that needs to be replaced. + */ + text?: string; +} +export declare namespace FileCitationDeltaAnnotation { + interface FileCitation { + /** + * The ID of the specific File the citation is from. + */ + file_id?: string; + /** + * The specific quote in the file. + */ + quote?: string; + } +} +/** + * A URL for the file that's generated when the assistant used the + * `code_interpreter` tool to generate a file. + */ +export interface FilePathAnnotation { + end_index: number; + file_path: FilePathAnnotation.FilePath; + start_index: number; + /** + * The text in the message content that needs to be replaced. + */ + text: string; + /** + * Always `file_path`. + */ + type: 'file_path'; +} +export declare namespace FilePathAnnotation { + interface FilePath { + /** + * The ID of the file that was generated. + */ + file_id: string; + } +} +/** + * A URL for the file that's generated when the assistant used the + * `code_interpreter` tool to generate a file. + */ +export interface FilePathDeltaAnnotation { + /** + * The index of the annotation in the text content part. + */ + index: number; + /** + * Always `file_path`. + */ + type: 'file_path'; + end_index?: number; + file_path?: FilePathDeltaAnnotation.FilePath; + start_index?: number; + /** + * The text in the message content that needs to be replaced. + */ + text?: string; +} +export declare namespace FilePathDeltaAnnotation { + interface FilePath { + /** + * The ID of the file that was generated. + */ + file_id?: string; + } +} +export interface ImageFile { + /** + * The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + * in the message content. Set `purpose="vision"` when uploading the File if you + * need to later display the file content. + */ + file_id: string; + /** + * Specifies the detail level of the image if specified by the user. `low` uses + * fewer tokens, you can opt in to high resolution using `high`. + */ + detail?: 'auto' | 'low' | 'high'; +} +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export interface ImageFileContentBlock { + image_file: ImageFile; + /** + * Always `image_file`. + */ + type: 'image_file'; +} +export interface ImageFileDelta { + /** + * Specifies the detail level of the image if specified by the user. `low` uses + * fewer tokens, you can opt in to high resolution using `high`. + */ + detail?: 'auto' | 'low' | 'high'; + /** + * The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + * in the message content. Set `purpose="vision"` when uploading the File if you + * need to later display the file content. + */ + file_id?: string; +} +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export interface ImageFileDeltaBlock { + /** + * The index of the content part in the message. + */ + index: number; + /** + * Always `image_file`. + */ + type: 'image_file'; + image_file?: ImageFileDelta; +} +export interface ImageURL { + /** + * The external URL of the image, must be a supported image types: jpeg, jpg, png, + * gif, webp. + */ + url: string; + /** + * Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + * to high resolution using `high`. Default value is `auto` + */ + detail?: 'auto' | 'low' | 'high'; +} +/** + * References an image URL in the content of a message. + */ +export interface ImageURLContentBlock { + image_url: ImageURL; + /** + * The type of the content part. + */ + type: 'image_url'; +} +export interface ImageURLDelta { + /** + * Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + * to high resolution using `high`. + */ + detail?: 'auto' | 'low' | 'high'; + /** + * The URL of the image, must be a supported image types: jpeg, jpg, png, gif, + * webp. + */ + url?: string; +} +/** + * References an image URL in the content of a message. + */ +export interface ImageURLDeltaBlock { + /** + * The index of the content part in the message. + */ + index: number; + /** + * Always `image_url`. + */ + type: 'image_url'; + image_url?: ImageURLDelta; +} +/** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ +export interface Message { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + /** + * If applicable, the ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) that + * authored this message. + */ + assistant_id: string | null; + /** + * A list of files attached to the message, and the tools they were added to. + */ + attachments: Array | null; + /** + * The Unix timestamp (in seconds) for when the message was completed. + */ + completed_at: number | null; + /** + * The content of the message in array of text and/or images. + */ + content: Array; + /** + * The Unix timestamp (in seconds) for when the message was created. + */ + created_at: number; + /** + * The Unix timestamp (in seconds) for when the message was marked as incomplete. + */ + incomplete_at: number | null; + /** + * On an incomplete message, details about why the message is incomplete. + */ + incomplete_details: Message.IncompleteDetails | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + /** + * The object type, which is always `thread.message`. + */ + object: 'thread.message'; + /** + * The entity that produced the message. One of `user` or `assistant`. + */ + role: 'user' | 'assistant'; + /** + * The ID of the [run](https://platform.openai.com/docs/api-reference/runs) + * associated with the creation of this message. Value is `null` when messages are + * created manually using the create message or create thread endpoints. + */ + run_id: string | null; + /** + * The status of the message, which can be either `in_progress`, `incomplete`, or + * `completed`. + */ + status: 'in_progress' | 'incomplete' | 'completed'; + /** + * The [thread](https://platform.openai.com/docs/api-reference/threads) ID that + * this message belongs to. + */ + thread_id: string; +} +export declare namespace Message { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + namespace Attachment { + interface AssistantToolsFileSearchTypeOnly { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } + /** + * On an incomplete message, details about why the message is incomplete. + */ + interface IncompleteDetails { + /** + * The reason the message is incomplete. + */ + reason: 'content_filter' | 'max_tokens' | 'run_cancelled' | 'run_expired' | 'run_failed'; + } +} +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export type MessageContent = ImageFileContentBlock | ImageURLContentBlock | TextContentBlock | RefusalContentBlock; +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export type MessageContentDelta = ImageFileDeltaBlock | TextDeltaBlock | RefusalDeltaBlock | ImageURLDeltaBlock; +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export type MessageContentPartParam = ImageFileContentBlock | ImageURLContentBlock | TextContentBlockParam; +export interface MessageDeleted { + id: string; + deleted: boolean; + object: 'thread.message.deleted'; +} +/** + * The delta containing the fields that have changed on the Message. + */ +export interface MessageDelta { + /** + * The content of the message in array of text and/or images. + */ + content?: Array; + /** + * The entity that produced the message. One of `user` or `assistant`. + */ + role?: 'user' | 'assistant'; +} +/** + * Represents a message delta i.e. any changed fields on a message during + * streaming. + */ +export interface MessageDeltaEvent { + /** + * The identifier of the message, which can be referenced in API endpoints. + */ + id: string; + /** + * The delta containing the fields that have changed on the Message. + */ + delta: MessageDelta; + /** + * The object type, which is always `thread.message.delta`. + */ + object: 'thread.message.delta'; +} +/** + * The refusal content generated by the assistant. + */ +export interface RefusalContentBlock { + refusal: string; + /** + * Always `refusal`. + */ + type: 'refusal'; +} +/** + * The refusal content that is part of a message. + */ +export interface RefusalDeltaBlock { + /** + * The index of the refusal part in the message. + */ + index: number; + /** + * Always `refusal`. + */ + type: 'refusal'; + refusal?: string; +} +export interface Text { + annotations: Array; + /** + * The data that makes up the text. + */ + value: string; +} +/** + * The text content that is part of a message. + */ +export interface TextContentBlock { + text: Text; + /** + * Always `text`. + */ + type: 'text'; +} +/** + * The text content that is part of a message. + */ +export interface TextContentBlockParam { + /** + * Text content to be sent to the model + */ + text: string; + /** + * Always `text`. + */ + type: 'text'; +} +export interface TextDelta { + annotations?: Array; + /** + * The data that makes up the text. + */ + value?: string; +} +/** + * The text content that is part of a message. + */ +export interface TextDeltaBlock { + /** + * The index of the content part in the message. + */ + index: number; + /** + * Always `text`. + */ + type: 'text'; + text?: TextDelta; +} +export interface MessageCreateParams { + /** + * The text contents of the message. + */ + content: string | Array; + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; +} +export declare namespace MessageCreateParams { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + namespace Attachment { + interface FileSearch { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } +} +export interface MessageUpdateParams { + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; +} +export interface MessageListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; + /** + * Filter messages by the run ID that generated them. + */ + run_id?: string; +} +export declare namespace Messages { + export { type Annotation as Annotation, type AnnotationDelta as AnnotationDelta, type FileCitationAnnotation as FileCitationAnnotation, type FileCitationDeltaAnnotation as FileCitationDeltaAnnotation, type FilePathAnnotation as FilePathAnnotation, type FilePathDeltaAnnotation as FilePathDeltaAnnotation, type ImageFile as ImageFile, type ImageFileContentBlock as ImageFileContentBlock, type ImageFileDelta as ImageFileDelta, type ImageFileDeltaBlock as ImageFileDeltaBlock, type ImageURL as ImageURL, type ImageURLContentBlock as ImageURLContentBlock, type ImageURLDelta as ImageURLDelta, type ImageURLDeltaBlock as ImageURLDeltaBlock, type Message as Message, type MessageContent as MessageContent, type MessageContentDelta as MessageContentDelta, type MessageContentPartParam as MessageContentPartParam, type MessageDeleted as MessageDeleted, type MessageDelta as MessageDelta, type MessageDeltaEvent as MessageDeltaEvent, type RefusalContentBlock as RefusalContentBlock, type RefusalDeltaBlock as RefusalDeltaBlock, type Text as Text, type TextContentBlock as TextContentBlock, type TextContentBlockParam as TextContentBlockParam, type TextDelta as TextDelta, type TextDeltaBlock as TextDeltaBlock, MessagesPage as MessagesPage, type MessageCreateParams as MessageCreateParams, type MessageUpdateParams as MessageUpdateParams, type MessageListParams as MessageListParams, }; +} +//# sourceMappingURL=messages.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/messages.d.ts.map b/node_modules/openai/resources/beta/threads/messages.d.ts.map new file mode 100644 index 0000000..6f84aca --- /dev/null +++ b/node_modules/openai/resources/beta/threads/messages.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"messages.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/threads/messages.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD,OAAO,KAAK,IAAI,MAAM,eAAe,CAAC;AACtC,OAAO,KAAK,aAAa,MAAM,eAAe,CAAC;AAC/C,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAExE,qBAAa,QAAS,SAAQ,WAAW;IACvC;;OAEG;IACH,MAAM,CACJ,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,mBAAmB,EACzB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC;IAQ3B;;OAEG;IACH,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC;IAOtG;;OAEG;IACH,MAAM,CACJ,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,IAAI,EAAE,mBAAmB,EACzB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC;IAQ3B;;OAEG;IACH,IAAI,CACF,QAAQ,EAAE,MAAM,EAChB,KAAK,CAAC,EAAE,iBAAiB,EACzB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,YAAY,EAAE,OAAO,CAAC;IAC1C,IAAI,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,YAAY,EAAE,OAAO,CAAC;IAgB9F;;OAEG;IACH,GAAG,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC;CAMzG;AAED,qBAAa,YAAa,SAAQ,UAAU,CAAC,OAAO,CAAC;CAAG;AAExD;;;;GAIG;AACH,MAAM,MAAM,UAAU,GAAG,sBAAsB,GAAG,kBAAkB,CAAC;AAErE;;;;GAIG;AACH,MAAM,MAAM,eAAe,GAAG,2BAA2B,GAAG,uBAAuB,CAAC;AAEpF;;;;GAIG;AACH,MAAM,WAAW,sBAAsB;IACrC,SAAS,EAAE,MAAM,CAAC;IAElB,aAAa,EAAE,sBAAsB,CAAC,YAAY,CAAC;IAEnD,WAAW,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,EAAE,eAAe,CAAC;CACvB;AAED,yBAAiB,sBAAsB,CAAC;IACtC,UAAiB,YAAY;QAC3B;;WAEG;QACH,OAAO,EAAE,MAAM,CAAC;KACjB;CACF;AAED;;;;GAIG;AACH,MAAM,WAAW,2BAA2B;IAC1C;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,eAAe,CAAC;IAEtB,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB,aAAa,CAAC,EAAE,2BAA2B,CAAC,YAAY,CAAC;IAEzD,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,yBAAiB,2BAA2B,CAAC;IAC3C,UAAiB,YAAY;QAC3B;;WAEG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC;QAEjB;;WAEG;QACH,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB;CACF;AAED;;;GAGG;AACH,MAAM,WAAW,kBAAkB;IACjC,SAAS,EAAE,MAAM,CAAC;IAElB,SAAS,EAAE,kBAAkB,CAAC,QAAQ,CAAC;IAEvC,WAAW,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;CACnB;AAED,yBAAiB,kBAAkB,CAAC;IAClC,UAAiB,QAAQ;QACvB;;WAEG;QACH,OAAO,EAAE,MAAM,CAAC;KACjB;CACF;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;IAElB,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB,SAAS,CAAC,EAAE,uBAAuB,CAAC,QAAQ,CAAC;IAE7C,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,yBAAiB,uBAAuB,CAAC;IACvC,UAAiB,QAAQ;QACvB;;WAEG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB;CACF;AAED,MAAM,WAAW,SAAS;IACxB;;;;OAIG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC;CAClC;AAED;;;GAGG;AACH,MAAM,WAAW,qBAAqB;IACpC,UAAU,EAAE,SAAS,CAAC;IAEtB;;OAEG;IACH,IAAI,EAAE,YAAY,CAAC;CACpB;AAED,MAAM,WAAW,cAAc;IAC7B;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC;IAEjC;;;;OAIG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,YAAY,CAAC;IAEnB,UAAU,CAAC,EAAE,cAAc,CAAC;CAC7B;AAED,MAAM,WAAW,QAAQ;IACvB;;;OAGG;IACH,GAAG,EAAE,MAAM,CAAC;IAEZ;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC;CAClC;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,SAAS,EAAE,QAAQ,CAAC;IAEpB;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;CACnB;AAED,MAAM,WAAW,aAAa;IAC5B;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC;IAEjC;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;IAElB,SAAS,CAAC,EAAE,aAAa,CAAC;CAC3B;AAED;;;GAGG;AACH,MAAM,WAAW,OAAO;IACtB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;;;OAIG;IACH,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,WAAW,EAAE,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;IAE9C;;OAEG;IACH,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,OAAO,EAAE,KAAK,CAAC,cAAc,CAAC,CAAC;IAE/B;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,aAAa,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;OAEG;IACH,kBAAkB,EAAE,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC;IAErD;;;;;OAKG;IACH,QAAQ,EAAE,OAAO,GAAG,IAAI,CAAC;IAEzB;;OAEG;IACH,MAAM,EAAE,gBAAgB,CAAC;IAEzB;;OAEG;IACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;IAE3B;;;;OAIG;IACH,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;OAGG;IACH,MAAM,EAAE,aAAa,GAAG,YAAY,GAAG,WAAW,CAAC;IAEnD;;;OAGG;IACH,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,yBAAiB,OAAO,CAAC;IACvB,UAAiB,UAAU;QACzB;;WAEG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC;QAEjB;;WAEG;QACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,UAAU,CAAC,gCAAgC,CAAC,CAAC;KAChG;IAED,UAAiB,UAAU,CAAC;QAC1B,UAAiB,gCAAgC;YAC/C;;eAEG;YACH,IAAI,EAAE,aAAa,CAAC;SACrB;KACF;IAED;;OAEG;IACH,UAAiB,iBAAiB;QAChC;;WAEG;QACH,MAAM,EAAE,gBAAgB,GAAG,YAAY,GAAG,eAAe,GAAG,aAAa,GAAG,YAAY,CAAC;KAC1F;CACF;AAED;;;GAGG;AACH,MAAM,MAAM,cAAc,GACtB,qBAAqB,GACrB,oBAAoB,GACpB,gBAAgB,GAChB,mBAAmB,CAAC;AAExB;;;GAGG;AACH,MAAM,MAAM,mBAAmB,GAC3B,mBAAmB,GACnB,cAAc,GACd,iBAAiB,GACjB,kBAAkB,CAAC;AAEvB;;;GAGG;AACH,MAAM,MAAM,uBAAuB,GAAG,qBAAqB,GAAG,oBAAoB,GAAG,qBAAqB,CAAC;AAE3G,MAAM,WAAW,cAAc;IAC7B,EAAE,EAAE,MAAM,CAAC;IAEX,OAAO,EAAE,OAAO,CAAC;IAEjB,MAAM,EAAE,wBAAwB,CAAC;CAClC;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,OAAO,CAAC,EAAE,KAAK,CAAC,mBAAmB,CAAC,CAAC;IAErC;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,WAAW,CAAC;CAC7B;AAED;;;GAGG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,KAAK,EAAE,YAAY,CAAC;IAEpB;;OAEG;IACH,MAAM,EAAE,sBAAsB,CAAC;CAChC;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,OAAO,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,IAAI,EAAE,SAAS,CAAC;CACjB;AAED;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,SAAS,CAAC;IAEhB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,IAAI;IACnB,WAAW,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;IAE/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,IAAI,CAAC;IAEX;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd;AAED,MAAM,WAAW,SAAS;IACxB,WAAW,CAAC,EAAE,KAAK,CAAC,eAAe,CAAC,CAAC;IAErC;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb,IAAI,CAAC,EAAE,SAAS,CAAC;CAClB;AAED,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,uBAAuB,CAAC,CAAC;IAEjD;;;;;;;OAOG;IACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;IAE3B;;OAEG;IACH,WAAW,CAAC,EAAE,KAAK,CAAC,mBAAmB,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;IAE3D;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;CAC3B;AAED,yBAAiB,mBAAmB,CAAC;IACnC,UAAiB,UAAU;QACzB;;WAEG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC;QAEjB;;WAEG;QACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;KAC1E;IAED,UAAiB,UAAU,CAAC;QAC1B,UAAiB,UAAU;YACzB;;eAEG;YACH,IAAI,EAAE,aAAa,CAAC;SACrB;KACF;CACF;AAED,MAAM,WAAW,mBAAmB;IAClC;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;CAC3B;AAED,MAAM,WAAW,iBAAkB,SAAQ,gBAAgB;IACzD;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;IAEvB;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAID,MAAM,CAAC,OAAO,WAAW,QAAQ,CAAC;IAChC,OAAO,EACL,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,SAAS,IAAI,SAAS,EAC3B,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,QAAQ,IAAI,QAAQ,EACzB,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,OAAO,IAAI,OAAO,EACvB,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,YAAY,IAAI,YAAY,EACjC,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,IAAI,IAAI,IAAI,EACjB,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,SAAS,IAAI,SAAS,EAC3B,KAAK,cAAc,IAAI,cAAc,EACrC,YAAY,IAAI,YAAY,EAC5B,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,iBAAiB,IAAI,iBAAiB,GAC5C,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/messages.js b/node_modules/openai/resources/beta/threads/messages.js new file mode 100644 index 0000000..1ab7c5e --- /dev/null +++ b/node_modules/openai/resources/beta/threads/messages.js @@ -0,0 +1,63 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessagesPage = exports.Messages = void 0; +const resource_1 = require("../../../resource.js"); +const core_1 = require("../../../core.js"); +const pagination_1 = require("../../../pagination.js"); +class Messages extends resource_1.APIResource { + /** + * Create a message. + */ + create(threadId, body, options) { + return this._client.post(`/threads/${threadId}/messages`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieve a message. + */ + retrieve(threadId, messageId, options) { + return this._client.get(`/threads/${threadId}/messages/${messageId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies a message. + */ + update(threadId, messageId, body, options) { + return this._client.post(`/threads/${threadId}/messages/${messageId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(threadId, query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list(threadId, {}, query); + } + return this._client.getAPIList(`/threads/${threadId}/messages`, MessagesPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Deletes a message. + */ + del(threadId, messageId, options) { + return this._client.delete(`/threads/${threadId}/messages/${messageId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} +exports.Messages = Messages; +class MessagesPage extends pagination_1.CursorPage { +} +exports.MessagesPage = MessagesPage; +Messages.MessagesPage = MessagesPage; +//# sourceMappingURL=messages.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/messages.js.map b/node_modules/openai/resources/beta/threads/messages.js.map new file mode 100644 index 0000000..6239f25 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/messages.js.map @@ -0,0 +1 @@ +{"version":3,"file":"messages.js","sourceRoot":"","sources":["../../../src/resources/beta/threads/messages.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,mDAAgD;AAChD,2CAAiD;AAGjD,uDAAwE;AAExE,MAAa,QAAS,SAAQ,sBAAW;IACvC;;OAEG;IACH,MAAM,CACJ,QAAgB,EAChB,IAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,WAAW,EAAE;YACxD,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,QAAgB,EAAE,SAAiB,EAAE,OAA6B;QACzE,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,QAAQ,aAAa,SAAS,EAAE,EAAE;YACpE,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,QAAgB,EAChB,SAAiB,EACjB,IAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,aAAa,SAAS,EAAE,EAAE;YACrE,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAWD,IAAI,CACF,QAAgB,EAChB,QAAiD,EAAE,EACnD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SACvC;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,QAAQ,WAAW,EAAE,YAAY,EAAE;YAC5E,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAAE,SAAiB,EAAE,OAA6B;QACpE,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,YAAY,QAAQ,aAAa,SAAS,EAAE,EAAE;YACvE,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;CACF;AA3ED,4BA2EC;AAED,MAAa,YAAa,SAAQ,uBAAmB;CAAG;AAAxD,oCAAwD;AA8nBxD,QAAQ,CAAC,YAAY,GAAG,YAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/messages.mjs b/node_modules/openai/resources/beta/threads/messages.mjs new file mode 100644 index 0000000..1778988 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/messages.mjs @@ -0,0 +1,58 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../resource.mjs"; +import { isRequestOptions } from "../../../core.mjs"; +import { CursorPage } from "../../../pagination.mjs"; +export class Messages extends APIResource { + /** + * Create a message. + */ + create(threadId, body, options) { + return this._client.post(`/threads/${threadId}/messages`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieve a message. + */ + retrieve(threadId, messageId, options) { + return this._client.get(`/threads/${threadId}/messages/${messageId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies a message. + */ + update(threadId, messageId, body, options) { + return this._client.post(`/threads/${threadId}/messages/${messageId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(threadId, query = {}, options) { + if (isRequestOptions(query)) { + return this.list(threadId, {}, query); + } + return this._client.getAPIList(`/threads/${threadId}/messages`, MessagesPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Deletes a message. + */ + del(threadId, messageId, options) { + return this._client.delete(`/threads/${threadId}/messages/${messageId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} +export class MessagesPage extends CursorPage { +} +Messages.MessagesPage = MessagesPage; +//# sourceMappingURL=messages.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/messages.mjs.map b/node_modules/openai/resources/beta/threads/messages.mjs.map new file mode 100644 index 0000000..ac9728c --- /dev/null +++ b/node_modules/openai/resources/beta/threads/messages.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"messages.mjs","sourceRoot":"","sources":["../../../src/resources/beta/threads/messages.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OAGpB,EAAE,UAAU,EAAyB;AAE5C,MAAM,OAAO,QAAS,SAAQ,WAAW;IACvC;;OAEG;IACH,MAAM,CACJ,QAAgB,EAChB,IAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,WAAW,EAAE;YACxD,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,QAAgB,EAAE,SAAiB,EAAE,OAA6B;QACzE,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,QAAQ,aAAa,SAAS,EAAE,EAAE;YACpE,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,QAAgB,EAChB,SAAiB,EACjB,IAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,aAAa,SAAS,EAAE,EAAE;YACrE,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAWD,IAAI,CACF,QAAgB,EAChB,QAAiD,EAAE,EACnD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SACvC;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,QAAQ,WAAW,EAAE,YAAY,EAAE;YAC5E,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAAE,SAAiB,EAAE,OAA6B;QACpE,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,YAAY,QAAQ,aAAa,SAAS,EAAE,EAAE;YACvE,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;CACF;AAED,MAAM,OAAO,YAAa,SAAQ,UAAmB;CAAG;AA8nBxD,QAAQ,CAAC,YAAY,GAAG,YAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/index.d.ts b/node_modules/openai/resources/beta/threads/runs/index.d.ts new file mode 100644 index 0000000..30de79d --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/index.d.ts @@ -0,0 +1,3 @@ +export { RunStepsPage, Steps, type CodeInterpreterLogs, type CodeInterpreterOutputImage, type CodeInterpreterToolCall, type CodeInterpreterToolCallDelta, type FileSearchToolCall, type FileSearchToolCallDelta, type FunctionToolCall, type FunctionToolCallDelta, type MessageCreationStepDetails, type RunStep, type RunStepDelta, type RunStepDeltaEvent, type RunStepDeltaMessageDelta, type RunStepInclude, type ToolCall, type ToolCallDelta, type ToolCallDeltaObject, type ToolCallsStepDetails, type StepRetrieveParams, type StepListParams, } from "./steps.js"; +export { RunsPage, Runs, type RequiredActionFunctionToolCall, type Run, type RunStatus, type RunCreateParams, type RunCreateParamsNonStreaming, type RunCreateParamsStreaming, type RunUpdateParams, type RunListParams, type RunCreateAndPollParams, type RunCreateAndStreamParams, type RunStreamParams, type RunSubmitToolOutputsParams, type RunSubmitToolOutputsParamsNonStreaming, type RunSubmitToolOutputsParamsStreaming, type RunSubmitToolOutputsAndPollParams, type RunSubmitToolOutputsStreamParams, } from "./runs.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/index.d.ts.map b/node_modules/openai/resources/beta/threads/runs/index.d.ts.map new file mode 100644 index 0000000..450bc2f --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/resources/beta/threads/runs/index.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,YAAY,EACZ,KAAK,EACL,KAAK,mBAAmB,EACxB,KAAK,0BAA0B,EAC/B,KAAK,uBAAuB,EAC5B,KAAK,4BAA4B,EACjC,KAAK,kBAAkB,EACvB,KAAK,uBAAuB,EAC5B,KAAK,gBAAgB,EACrB,KAAK,qBAAqB,EAC1B,KAAK,0BAA0B,EAC/B,KAAK,OAAO,EACZ,KAAK,YAAY,EACjB,KAAK,iBAAiB,EACtB,KAAK,wBAAwB,EAC7B,KAAK,cAAc,EACnB,KAAK,QAAQ,EACb,KAAK,aAAa,EAClB,KAAK,mBAAmB,EACxB,KAAK,oBAAoB,EACzB,KAAK,kBAAkB,EACvB,KAAK,cAAc,GACpB,MAAM,SAAS,CAAC;AACjB,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,KAAK,8BAA8B,EACnC,KAAK,GAAG,EACR,KAAK,SAAS,EACd,KAAK,eAAe,EACpB,KAAK,2BAA2B,EAChC,KAAK,wBAAwB,EAC7B,KAAK,eAAe,EACpB,KAAK,aAAa,EAClB,KAAK,sBAAsB,EAC3B,KAAK,wBAAwB,EAC7B,KAAK,eAAe,EACpB,KAAK,0BAA0B,EAC/B,KAAK,sCAAsC,EAC3C,KAAK,mCAAmC,EACxC,KAAK,iCAAiC,EACtC,KAAK,gCAAgC,GACtC,MAAM,QAAQ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/index.js b/node_modules/openai/resources/beta/threads/runs/index.js new file mode 100644 index 0000000..d4f1789 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/index.js @@ -0,0 +1,11 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Runs = exports.RunsPage = exports.Steps = exports.RunStepsPage = void 0; +var steps_1 = require("./steps.js"); +Object.defineProperty(exports, "RunStepsPage", { enumerable: true, get: function () { return steps_1.RunStepsPage; } }); +Object.defineProperty(exports, "Steps", { enumerable: true, get: function () { return steps_1.Steps; } }); +var runs_1 = require("./runs.js"); +Object.defineProperty(exports, "RunsPage", { enumerable: true, get: function () { return runs_1.RunsPage; } }); +Object.defineProperty(exports, "Runs", { enumerable: true, get: function () { return runs_1.Runs; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/index.js.map b/node_modules/openai/resources/beta/threads/runs/index.js.map new file mode 100644 index 0000000..0c4c655 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/resources/beta/threads/runs/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,oCAuBiB;AAtBf,qGAAA,YAAY,OAAA;AACZ,8FAAA,KAAK,OAAA;AAsBP,kCAmBgB;AAlBd,gGAAA,QAAQ,OAAA;AACR,4FAAA,IAAI,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/index.mjs b/node_modules/openai/resources/beta/threads/runs/index.mjs new file mode 100644 index 0000000..6d13e81 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/index.mjs @@ -0,0 +1,4 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { RunStepsPage, Steps, } from "./steps.mjs"; +export { RunsPage, Runs, } from "./runs.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/index.mjs.map b/node_modules/openai/resources/beta/threads/runs/index.mjs.map new file mode 100644 index 0000000..27a8b1f --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../../../src/resources/beta/threads/runs/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EACL,YAAY,EACZ,KAAK,GAqBN;OACM,EACL,QAAQ,EACR,IAAI,GAiBL"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/runs.d.ts b/node_modules/openai/resources/beta/threads/runs/runs.d.ts new file mode 100644 index 0000000..d7b1eff --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/runs.d.ts @@ -0,0 +1,1205 @@ +import { APIResource } from "../../../../resource.js"; +import { APIPromise } from "../../../../core.js"; +import * as Core from "../../../../core.js"; +import { AssistantStream, RunCreateParamsBaseStream } from "../../../../lib/AssistantStream.js"; +import { RunSubmitToolOutputsParamsStream } from "../../../../lib/AssistantStream.js"; +import * as RunsAPI from "./runs.js"; +import * as AssistantsAPI from "../../assistants.js"; +import * as ChatAPI from "../../../chat/chat.js"; +import * as MessagesAPI from "../messages.js"; +import * as ThreadsAPI from "../threads.js"; +import * as StepsAPI from "./steps.js"; +import { CodeInterpreterLogs, CodeInterpreterOutputImage, CodeInterpreterToolCall, CodeInterpreterToolCallDelta, FileSearchToolCall, FileSearchToolCallDelta, FunctionToolCall, FunctionToolCallDelta, MessageCreationStepDetails, RunStep, RunStepDelta, RunStepDeltaEvent, RunStepDeltaMessageDelta, RunStepInclude, RunStepsPage, StepListParams, StepRetrieveParams, Steps, ToolCall, ToolCallDelta, ToolCallDeltaObject, ToolCallsStepDetails } from "./steps.js"; +import { CursorPage, type CursorPageParams } from "../../../../pagination.js"; +import { Stream } from "../../../../streaming.js"; +export declare class Runs extends APIResource { + steps: StepsAPI.Steps; + /** + * Create a run. + */ + create(threadId: string, params: RunCreateParamsNonStreaming, options?: Core.RequestOptions): APIPromise; + create(threadId: string, params: RunCreateParamsStreaming, options?: Core.RequestOptions): APIPromise>; + create(threadId: string, params: RunCreateParamsBase, options?: Core.RequestOptions): APIPromise | Run>; + /** + * Retrieves a run. + */ + retrieve(threadId: string, runId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Modifies a run. + */ + update(threadId: string, runId: string, body: RunUpdateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns a list of runs belonging to a thread. + */ + list(threadId: string, query?: RunListParams, options?: Core.RequestOptions): Core.PagePromise; + list(threadId: string, options?: Core.RequestOptions): Core.PagePromise; + /** + * Cancels a run that is `in_progress`. + */ + cancel(threadId: string, runId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * A helper to create a run an poll for a terminal state. More information on Run + * lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + createAndPoll(threadId: string, body: RunCreateParamsNonStreaming, options?: Core.RequestOptions & { + pollIntervalMs?: number; + }): Promise; + /** + * Create a Run stream + * + * @deprecated use `stream` instead + */ + createAndStream(threadId: string, body: RunCreateParamsBaseStream, options?: Core.RequestOptions): AssistantStream; + /** + * A helper to poll a run status until it reaches a terminal state. More + * information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + poll(threadId: string, runId: string, options?: Core.RequestOptions & { + pollIntervalMs?: number; + }): Promise; + /** + * Create a Run stream + */ + stream(threadId: string, body: RunCreateParamsBaseStream, options?: Core.RequestOptions): AssistantStream; + /** + * When a run has the `status: "requires_action"` and `required_action.type` is + * `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + * tool calls once they're all completed. All outputs must be submitted in a single + * request. + */ + submitToolOutputs(threadId: string, runId: string, body: RunSubmitToolOutputsParamsNonStreaming, options?: Core.RequestOptions): APIPromise; + submitToolOutputs(threadId: string, runId: string, body: RunSubmitToolOutputsParamsStreaming, options?: Core.RequestOptions): APIPromise>; + submitToolOutputs(threadId: string, runId: string, body: RunSubmitToolOutputsParamsBase, options?: Core.RequestOptions): APIPromise | Run>; + /** + * A helper to submit a tool output to a run and poll for a terminal run state. + * More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + submitToolOutputsAndPoll(threadId: string, runId: string, body: RunSubmitToolOutputsParamsNonStreaming, options?: Core.RequestOptions & { + pollIntervalMs?: number; + }): Promise; + /** + * Submit the tool outputs from a previous run and stream the run to a terminal + * state. More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + submitToolOutputsStream(threadId: string, runId: string, body: RunSubmitToolOutputsParamsStream, options?: Core.RequestOptions): AssistantStream; +} +export declare class RunsPage extends CursorPage { +} +/** + * Tool call objects + */ +export interface RequiredActionFunctionToolCall { + /** + * The ID of the tool call. This ID must be referenced when you submit the tool + * outputs in using the + * [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + * endpoint. + */ + id: string; + /** + * The function definition. + */ + function: RequiredActionFunctionToolCall.Function; + /** + * The type of tool call the output is required for. For now, this is always + * `function`. + */ + type: 'function'; +} +export declare namespace RequiredActionFunctionToolCall { + /** + * The function definition. + */ + interface Function { + /** + * The arguments that the model expects you to pass to the function. + */ + arguments: string; + /** + * The name of the function. + */ + name: string; + } +} +/** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ +export interface Run { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + * execution of this run. + */ + assistant_id: string; + /** + * The Unix timestamp (in seconds) for when the run was cancelled. + */ + cancelled_at: number | null; + /** + * The Unix timestamp (in seconds) for when the run was completed. + */ + completed_at: number | null; + /** + * The Unix timestamp (in seconds) for when the run was created. + */ + created_at: number; + /** + * The Unix timestamp (in seconds) for when the run will expire. + */ + expires_at: number | null; + /** + * The Unix timestamp (in seconds) for when the run failed. + */ + failed_at: number | null; + /** + * Details on why the run is incomplete. Will be `null` if the run is not + * incomplete. + */ + incomplete_details: Run.IncompleteDetails | null; + /** + * The instructions that the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + * this run. + */ + instructions: string; + /** + * The last error associated with this run. Will be `null` if there are no errors. + */ + last_error: Run.LastError | null; + /** + * The maximum number of completion tokens specified to have been used over the + * course of the run. + */ + max_completion_tokens: number | null; + /** + * The maximum number of prompt tokens specified to have been used over the course + * of the run. + */ + max_prompt_tokens: number | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + /** + * The model that the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + * this run. + */ + model: string; + /** + * The object type, which is always `thread.run`. + */ + object: 'thread.run'; + /** + * Whether to enable + * [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + * during tool use. + */ + parallel_tool_calls: boolean; + /** + * Details on the action required to continue the run. Will be `null` if no action + * is required. + */ + required_action: Run.RequiredAction | null; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format: ThreadsAPI.AssistantResponseFormatOption | null; + /** + * The Unix timestamp (in seconds) for when the run was started. + */ + started_at: number | null; + /** + * The status of the run, which can be either `queued`, `in_progress`, + * `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + * `incomplete`, or `expired`. + */ + status: RunStatus; + /** + * The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + * that was executed on as a part of this run. + */ + thread_id: string; + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice: ThreadsAPI.AssistantToolChoiceOption | null; + /** + * The list of tools that the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + * this run. + */ + tools: Array; + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy: Run.TruncationStrategy | null; + /** + * Usage statistics related to the run. This value will be `null` if the run is not + * in a terminal state (i.e. `in_progress`, `queued`, etc.). + */ + usage: Run.Usage | null; + /** + * The sampling temperature used for this run. If not set, defaults to 1. + */ + temperature?: number | null; + /** + * The nucleus sampling value used for this run. If not set, defaults to 1. + */ + top_p?: number | null; +} +export declare namespace Run { + /** + * Details on why the run is incomplete. Will be `null` if the run is not + * incomplete. + */ + interface IncompleteDetails { + /** + * The reason why the run is incomplete. This will point to which specific token + * limit was reached over the course of the run. + */ + reason?: 'max_completion_tokens' | 'max_prompt_tokens'; + } + /** + * The last error associated with this run. Will be `null` if there are no errors. + */ + interface LastError { + /** + * One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. + */ + code: 'server_error' | 'rate_limit_exceeded' | 'invalid_prompt'; + /** + * A human-readable description of the error. + */ + message: string; + } + /** + * Details on the action required to continue the run. Will be `null` if no action + * is required. + */ + interface RequiredAction { + /** + * Details on the tool outputs needed for this run to continue. + */ + submit_tool_outputs: RequiredAction.SubmitToolOutputs; + /** + * For now, this is always `submit_tool_outputs`. + */ + type: 'submit_tool_outputs'; + } + namespace RequiredAction { + /** + * Details on the tool outputs needed for this run to continue. + */ + interface SubmitToolOutputs { + /** + * A list of the relevant tool calls. + */ + tool_calls: Array; + } + } + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } + /** + * Usage statistics related to the run. This value will be `null` if the run is not + * in a terminal state (i.e. `in_progress`, `queued`, etc.). + */ + interface Usage { + /** + * Number of completion tokens used over the course of the run. + */ + completion_tokens: number; + /** + * Number of prompt tokens used over the course of the run. + */ + prompt_tokens: number; + /** + * Total number of tokens used (prompt + completion). + */ + total_tokens: number; + } +} +/** + * The status of the run, which can be either `queued`, `in_progress`, + * `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + * `incomplete`, or `expired`. + */ +export type RunStatus = 'queued' | 'in_progress' | 'requires_action' | 'cancelling' | 'cancelled' | 'failed' | 'completed' | 'incomplete' | 'expired'; +export type RunCreateParams = RunCreateParamsNonStreaming | RunCreateParamsStreaming; +export interface RunCreateParamsBase { + /** + * Body param: The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + /** + * Query param: A list of additional fields to include in the response. Currently + * the only supported value is + * `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + * search result content. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + include?: Array; + /** + * Body param: Appends additional instructions at the end of the instructions for + * the run. This is useful for modifying the behavior on a per-run basis without + * overriding other instructions. + */ + additional_instructions?: string | null; + /** + * Body param: Adds additional messages to the thread before creating the run. + */ + additional_messages?: Array | null; + /** + * Body param: Overrides the + * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + * of the assistant. This is useful for modifying the behavior on a per-run basis. + */ + instructions?: string | null; + /** + * Body param: The maximum number of completion tokens that may be used over the + * course of the run. The run will make a best effort to use only the number of + * completion tokens specified, across multiple turns of the run. If the run + * exceeds the number of completion tokens specified, the run will end with status + * `incomplete`. See `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + /** + * Body param: The maximum number of prompt tokens that may be used over the course + * of the run. The run will make a best effort to use only the number of prompt + * tokens specified, across multiple turns of the run. If the run exceeds the + * number of prompt tokens specified, the run will end with status `incomplete`. + * See `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + /** + * Body param: Set of 16 key-value pairs that can be attached to an object. This + * can be useful for storing additional information about the object in a + * structured format. Keys can be a maximum of 64 characters long and values can be + * a maxium of 512 characters long. + */ + metadata?: unknown | null; + /** + * Body param: The ID of the + * [Model](https://platform.openai.com/docs/api-reference/models) to be used to + * execute this run. If a value is provided here, it will override the model + * associated with the assistant. If not, the model associated with the assistant + * will be used. + */ + model?: (string & {}) | ChatAPI.ChatModel | null; + /** + * Body param: Whether to enable + * [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + * during tool use. + */ + parallel_tool_calls?: boolean; + /** + * Body param: Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + /** + * Body param: If `true`, returns a stream of events that happen during the Run as + * server-sent events, terminating when the Run enters a terminal state with a + * `data: [DONE]` message. + */ + stream?: boolean | null; + /** + * Body param: What sampling temperature to use, between 0 and 2. Higher values + * like 0.8 will make the output more random, while lower values like 0.2 will make + * it more focused and deterministic. + */ + temperature?: number | null; + /** + * Body param: Controls which (if any) tool is called by the model. `none` means + * the model will not call any tools and instead generates a message. `auto` is the + * default value and means the model can pick between generating a message or + * calling one or more tools. `required` means the model must call one or more + * tools before responding to the user. Specifying a particular tool like + * `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: ThreadsAPI.AssistantToolChoiceOption | null; + /** + * Body param: Override the tools the assistant can use for this run. This is + * useful for modifying the behavior on a per-run basis. + */ + tools?: Array | null; + /** + * Body param: An alternative to sampling with temperature, called nucleus + * sampling, where the model considers the results of the tokens with top_p + * probability mass. So 0.1 means only the tokens comprising the top 10% + * probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + /** + * Body param: Controls for how a thread will be truncated prior to the run. Use + * this to control the intial context window of the run. + */ + truncation_strategy?: RunCreateParams.TruncationStrategy | null; +} +export declare namespace RunCreateParams { + interface AdditionalMessage { + /** + * The text contents of the message. + */ + content: string | Array; + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + namespace AdditionalMessage { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + namespace Attachment { + interface FileSearch { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } + } + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } + type RunCreateParamsNonStreaming = RunsAPI.RunCreateParamsNonStreaming; + type RunCreateParamsStreaming = RunsAPI.RunCreateParamsStreaming; +} +export interface RunCreateParamsNonStreaming extends RunCreateParamsBase { + /** + * Body param: If `true`, returns a stream of events that happen during the Run as + * server-sent events, terminating when the Run enters a terminal state with a + * `data: [DONE]` message. + */ + stream?: false | null; +} +export interface RunCreateParamsStreaming extends RunCreateParamsBase { + /** + * Body param: If `true`, returns a stream of events that happen during the Run as + * server-sent events, terminating when the Run enters a terminal state with a + * `data: [DONE]` message. + */ + stream: true; +} +export interface RunUpdateParams { + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; +} +export interface RunListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} +export interface RunCreateAndPollParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + /** + * Appends additional instructions at the end of the instructions for the run. This + * is useful for modifying the behavior on a per-run basis without overriding other + * instructions. + */ + additional_instructions?: string | null; + /** + * Adds additional messages to the thread before creating the run. + */ + additional_messages?: Array | null; + /** + * Overrides the + * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + * of the assistant. This is useful for modifying the behavior on a per-run basis. + */ + instructions?: string | null; + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: (string & {}) | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-0125-preview' | 'gpt-4-turbo-preview' | 'gpt-4-1106-preview' | 'gpt-4-vision-preview' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613' | null; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: ThreadsAPI.AssistantToolChoiceOption | null; + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array | null; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: RunCreateAndPollParams.TruncationStrategy | null; +} +export declare namespace RunCreateAndPollParams { + interface AdditionalMessage { + /** + * The text contents of the message. + */ + content: string | Array; + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + namespace AdditionalMessage { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} +export interface RunCreateAndStreamParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + /** + * Appends additional instructions at the end of the instructions for the run. This + * is useful for modifying the behavior on a per-run basis without overriding other + * instructions. + */ + additional_instructions?: string | null; + /** + * Adds additional messages to the thread before creating the run. + */ + additional_messages?: Array | null; + /** + * Overrides the + * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + * of the assistant. This is useful for modifying the behavior on a per-run basis. + */ + instructions?: string | null; + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: (string & {}) | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-0125-preview' | 'gpt-4-turbo-preview' | 'gpt-4-1106-preview' | 'gpt-4-vision-preview' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613' | null; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: ThreadsAPI.AssistantToolChoiceOption | null; + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array | null; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: RunCreateAndStreamParams.TruncationStrategy | null; +} +export declare namespace RunCreateAndStreamParams { + interface AdditionalMessage { + /** + * The text contents of the message. + */ + content: string | Array; + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + namespace AdditionalMessage { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} +export interface RunStreamParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + /** + * Appends additional instructions at the end of the instructions for the run. This + * is useful for modifying the behavior on a per-run basis without overriding other + * instructions. + */ + additional_instructions?: string | null; + /** + * Adds additional messages to the thread before creating the run. + */ + additional_messages?: Array | null; + /** + * Overrides the + * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + * of the assistant. This is useful for modifying the behavior on a per-run basis. + */ + instructions?: string | null; + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: (string & {}) | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-0125-preview' | 'gpt-4-turbo-preview' | 'gpt-4-1106-preview' | 'gpt-4-vision-preview' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613' | null; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: ThreadsAPI.AssistantToolChoiceOption | null; + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array | null; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: RunStreamParams.TruncationStrategy | null; +} +export declare namespace RunStreamParams { + interface AdditionalMessage { + /** + * The text contents of the message. + */ + content: string | Array; + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + namespace AdditionalMessage { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} +export type RunSubmitToolOutputsParams = RunSubmitToolOutputsParamsNonStreaming | RunSubmitToolOutputsParamsStreaming; +export interface RunSubmitToolOutputsParamsBase { + /** + * A list of tools for which the outputs are being submitted. + */ + tool_outputs: Array; + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream?: boolean | null; +} +export declare namespace RunSubmitToolOutputsParams { + interface ToolOutput { + /** + * The output of the tool call to be submitted to continue the run. + */ + output?: string; + /** + * The ID of the tool call in the `required_action` object within the run object + * the output is being submitted for. + */ + tool_call_id?: string; + } + type RunSubmitToolOutputsParamsNonStreaming = RunsAPI.RunSubmitToolOutputsParamsNonStreaming; + type RunSubmitToolOutputsParamsStreaming = RunsAPI.RunSubmitToolOutputsParamsStreaming; +} +export interface RunSubmitToolOutputsParamsNonStreaming extends RunSubmitToolOutputsParamsBase { + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream?: false | null; +} +export interface RunSubmitToolOutputsParamsStreaming extends RunSubmitToolOutputsParamsBase { + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream: true; +} +export interface RunSubmitToolOutputsAndPollParams { + /** + * A list of tools for which the outputs are being submitted. + */ + tool_outputs: Array; +} +export declare namespace RunSubmitToolOutputsAndPollParams { + interface ToolOutput { + /** + * The output of the tool call to be submitted to continue the run. + */ + output?: string; + /** + * The ID of the tool call in the `required_action` object within the run object + * the output is being submitted for. + */ + tool_call_id?: string; + } +} +export interface RunSubmitToolOutputsStreamParams { + /** + * A list of tools for which the outputs are being submitted. + */ + tool_outputs: Array; +} +export declare namespace RunSubmitToolOutputsStreamParams { + interface ToolOutput { + /** + * The output of the tool call to be submitted to continue the run. + */ + output?: string; + /** + * The ID of the tool call in the `required_action` object within the run object + * the output is being submitted for. + */ + tool_call_id?: string; + } +} +export declare namespace Runs { + export { type RequiredActionFunctionToolCall as RequiredActionFunctionToolCall, type Run as Run, type RunStatus as RunStatus, RunsPage as RunsPage, type RunCreateParams as RunCreateParams, type RunCreateParamsNonStreaming as RunCreateParamsNonStreaming, type RunCreateParamsStreaming as RunCreateParamsStreaming, type RunUpdateParams as RunUpdateParams, type RunListParams as RunListParams, type RunCreateAndPollParams, type RunCreateAndStreamParams, type RunStreamParams, type RunSubmitToolOutputsParams as RunSubmitToolOutputsParams, type RunSubmitToolOutputsParamsNonStreaming as RunSubmitToolOutputsParamsNonStreaming, type RunSubmitToolOutputsParamsStreaming as RunSubmitToolOutputsParamsStreaming, type RunSubmitToolOutputsAndPollParams, type RunSubmitToolOutputsStreamParams, }; + export { Steps as Steps, type CodeInterpreterLogs as CodeInterpreterLogs, type CodeInterpreterOutputImage as CodeInterpreterOutputImage, type CodeInterpreterToolCall as CodeInterpreterToolCall, type CodeInterpreterToolCallDelta as CodeInterpreterToolCallDelta, type FileSearchToolCall as FileSearchToolCall, type FileSearchToolCallDelta as FileSearchToolCallDelta, type FunctionToolCall as FunctionToolCall, type FunctionToolCallDelta as FunctionToolCallDelta, type MessageCreationStepDetails as MessageCreationStepDetails, type RunStep as RunStep, type RunStepDelta as RunStepDelta, type RunStepDeltaEvent as RunStepDeltaEvent, type RunStepDeltaMessageDelta as RunStepDeltaMessageDelta, type RunStepInclude as RunStepInclude, type ToolCall as ToolCall, type ToolCallDelta as ToolCallDelta, type ToolCallDeltaObject as ToolCallDeltaObject, type ToolCallsStepDetails as ToolCallsStepDetails, RunStepsPage as RunStepsPage, type StepRetrieveParams as StepRetrieveParams, type StepListParams as StepListParams, }; +} +//# sourceMappingURL=runs.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/runs.d.ts.map b/node_modules/openai/resources/beta/threads/runs/runs.d.ts.map new file mode 100644 index 0000000..ccca5e5 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/runs.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"runs.d.ts","sourceRoot":"","sources":["../../../../src/resources/beta/threads/runs/runs.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAEnD,OAAO,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC9C,OAAO,KAAK,IAAI,MAAM,kBAAkB,CAAC;AACzC,OAAO,EAAE,eAAe,EAAE,yBAAyB,EAAE,MAAM,iCAAiC,CAAC;AAE7F,OAAO,EAAE,gCAAgC,EAAE,MAAM,iCAAiC,CAAC;AACnF,OAAO,KAAK,OAAO,MAAM,QAAQ,CAAC;AAClC,OAAO,KAAK,aAAa,MAAM,kBAAkB,CAAC;AAClD,OAAO,KAAK,OAAO,MAAM,oBAAoB,CAAC;AAC9C,OAAO,KAAK,WAAW,MAAM,aAAa,CAAC;AAC3C,OAAO,KAAK,UAAU,MAAM,YAAY,CAAC;AACzC,OAAO,KAAK,QAAQ,MAAM,SAAS,CAAC;AACpC,OAAO,EACL,mBAAmB,EACnB,0BAA0B,EAC1B,uBAAuB,EACvB,4BAA4B,EAC5B,kBAAkB,EAClB,uBAAuB,EACvB,gBAAgB,EAChB,qBAAqB,EACrB,0BAA0B,EAC1B,OAAO,EACP,YAAY,EACZ,iBAAiB,EACjB,wBAAwB,EACxB,cAAc,EACd,YAAY,EACZ,cAAc,EACd,kBAAkB,EAClB,KAAK,EACL,QAAQ,EACR,aAAa,EACb,mBAAmB,EACnB,oBAAoB,EACrB,MAAM,SAAS,CAAC;AACjB,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AAC3E,OAAO,EAAE,MAAM,EAAE,MAAM,uBAAuB,CAAC;AAE/C,qBAAa,IAAK,SAAQ,WAAW;IACnC,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAoC;IAEzD;;OAEG;IACH,MAAM,CACJ,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,2BAA2B,EACnC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,GAAG,CAAC;IAClB,MAAM,CACJ,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,wBAAwB,EAChC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,aAAa,CAAC,oBAAoB,CAAC,CAAC;IACzD,MAAM,CACJ,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,mBAAmB,EAC3B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,aAAa,CAAC,oBAAoB,CAAC,GAAG,GAAG,CAAC;IAgB/D;;OAEG;IACH,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC;IAO9F;;OAEG;IACH,MAAM,CACJ,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,eAAe,EACrB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC;IAQvB;;OAEG;IACH,IAAI,CACF,QAAQ,EAAE,MAAM,EAChB,KAAK,CAAC,EAAE,aAAa,EACrB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,GAAG,CAAC;IAClC,IAAI,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,GAAG,CAAC;IAgBtF;;OAEG;IACH,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC;IAO5F;;;;OAIG;IACG,aAAa,CACjB,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,2BAA2B,EACjC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GAC1D,OAAO,CAAC,GAAG,CAAC;IAKf;;;;OAIG;IACH,eAAe,CACb,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,yBAAyB,EAC/B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,eAAe;IAIlB;;;;OAIG;IACG,IAAI,CACR,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GAC1D,OAAO,CAAC,GAAG,CAAC;IA6Cf;;OAEG;IACH,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,yBAAyB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,eAAe;IAIzG;;;;;OAKG;IACH,iBAAiB,CACf,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,sCAAsC,EAC5C,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,GAAG,CAAC;IAClB,iBAAiB,CACf,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,mCAAmC,EACzC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,aAAa,CAAC,oBAAoB,CAAC,CAAC;IACzD,iBAAiB,CACf,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,8BAA8B,EACpC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,aAAa,CAAC,oBAAoB,CAAC,GAAG,GAAG,CAAC;IAe/D;;;;OAIG;IACG,wBAAwB,CAC5B,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,sCAAsC,EAC5C,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GAC1D,OAAO,CAAC,GAAG,CAAC;IAKf;;;;OAIG;IACH,uBAAuB,CACrB,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,gCAAgC,EACtC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,eAAe;CASnB;AAED,qBAAa,QAAS,SAAQ,UAAU,CAAC,GAAG,CAAC;CAAG;AAEhD;;GAEG;AACH,MAAM,WAAW,8BAA8B;IAC7C;;;;;OAKG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,QAAQ,EAAE,8BAA8B,CAAC,QAAQ,CAAC;IAElD;;;OAGG;IACH,IAAI,EAAE,UAAU,CAAC;CAClB;AAED,yBAAiB,8BAA8B,CAAC;IAC9C;;OAEG;IACH,UAAiB,QAAQ;QACvB;;WAEG;QACH,SAAS,EAAE,MAAM,CAAC;QAElB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;KACd;CACF;AAED;;;GAGG;AACH,MAAM,WAAW,GAAG;IAClB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;OAEG;IACH,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAE1B;;OAEG;IACH,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IAEzB;;;OAGG;IACH,kBAAkB,EAAE,GAAG,CAAC,iBAAiB,GAAG,IAAI,CAAC;IAEjD;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;OAEG;IACH,UAAU,EAAE,GAAG,CAAC,SAAS,GAAG,IAAI,CAAC;IAEjC;;;OAGG;IACH,qBAAqB,EAAE,MAAM,GAAG,IAAI,CAAC;IAErC;;;OAGG;IACH,iBAAiB,EAAE,MAAM,GAAG,IAAI,CAAC;IAEjC;;;;;OAKG;IACH,QAAQ,EAAE,OAAO,GAAG,IAAI,CAAC;IAEzB;;;;OAIG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,MAAM,EAAE,YAAY,CAAC;IAErB;;;;OAIG;IACH,mBAAmB,EAAE,OAAO,CAAC;IAE7B;;;OAGG;IACH,eAAe,EAAE,GAAG,CAAC,cAAc,GAAG,IAAI,CAAC;IAE3C;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,eAAe,EAAE,UAAU,CAAC,6BAA6B,GAAG,IAAI,CAAC;IAEjE;;OAEG;IACH,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAE1B;;;;OAIG;IACH,MAAM,EAAE,SAAS,CAAC;IAElB;;;OAGG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;;;;;;;OAQG;IACH,WAAW,EAAE,UAAU,CAAC,yBAAyB,GAAG,IAAI,CAAC;IAEzD;;;;OAIG;IACH,KAAK,EAAE,KAAK,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;IAE1C;;;OAGG;IACH,mBAAmB,EAAE,GAAG,CAAC,kBAAkB,GAAG,IAAI,CAAC;IAEnD;;;OAGG;IACH,KAAK,EAAE,GAAG,CAAC,KAAK,GAAG,IAAI,CAAC;IAExB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACvB;AAED,yBAAiB,GAAG,CAAC;IACnB;;;OAGG;IACH,UAAiB,iBAAiB;QAChC;;;WAGG;QACH,MAAM,CAAC,EAAE,uBAAuB,GAAG,mBAAmB,CAAC;KACxD;IAED;;OAEG;IACH,UAAiB,SAAS;QACxB;;WAEG;QACH,IAAI,EAAE,cAAc,GAAG,qBAAqB,GAAG,gBAAgB,CAAC;QAEhE;;WAEG;QACH,OAAO,EAAE,MAAM,CAAC;KACjB;IAED;;;OAGG;IACH,UAAiB,cAAc;QAC7B;;WAEG;QACH,mBAAmB,EAAE,cAAc,CAAC,iBAAiB,CAAC;QAEtD;;WAEG;QACH,IAAI,EAAE,qBAAqB,CAAC;KAC7B;IAED,UAAiB,cAAc,CAAC;QAC9B;;WAEG;QACH,UAAiB,iBAAiB;YAChC;;eAEG;YACH,UAAU,EAAE,KAAK,CAAC,OAAO,CAAC,8BAA8B,CAAC,CAAC;SAC3D;KACF;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;;;WAKG;QACH,IAAI,EAAE,MAAM,GAAG,eAAe,CAAC;QAE/B;;;WAGG;QACH,aAAa,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAC/B;IAED;;;OAGG;IACH,UAAiB,KAAK;QACpB;;WAEG;QACH,iBAAiB,EAAE,MAAM,CAAC;QAE1B;;WAEG;QACH,aAAa,EAAE,MAAM,CAAC;QAEtB;;WAEG;QACH,YAAY,EAAE,MAAM,CAAC;KACtB;CACF;AAED;;;;GAIG;AACH,MAAM,MAAM,SAAS,GACjB,QAAQ,GACR,aAAa,GACb,iBAAiB,GACjB,YAAY,GACZ,WAAW,GACX,QAAQ,GACR,WAAW,GACX,YAAY,GACZ,SAAS,CAAC;AAEd,MAAM,MAAM,eAAe,GAAG,2BAA2B,GAAG,wBAAwB,CAAC;AAErF,MAAM,WAAW,mBAAmB;IAClC;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;;;;;;;;OASG;IACH,OAAO,CAAC,EAAE,KAAK,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC;IAEzC;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAExC;;OAEG;IACH,mBAAmB,CAAC,EAAE,KAAK,CAAC,eAAe,CAAC,iBAAiB,CAAC,GAAG,IAAI,CAAC;IAEtE;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;;OAMG;IACH,qBAAqB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtC;;;;;;OAMG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElC;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC;IAEjD;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAE9B;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,eAAe,CAAC,EAAE,UAAU,CAAC,6BAA6B,GAAG,IAAI,CAAC;IAElE;;;;OAIG;IACH,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAExB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;;;;;OASG;IACH,WAAW,CAAC,EAAE,UAAU,CAAC,yBAAyB,GAAG,IAAI,CAAC;IAE1D;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;IAElD;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;OAGG;IACH,mBAAmB,CAAC,EAAE,eAAe,CAAC,kBAAkB,GAAG,IAAI,CAAC;CACjE;AAED,yBAAiB,eAAe,CAAC;IAC/B,UAAiB,iBAAiB;QAChC;;WAEG;QACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;QAE7D;;;;;;;WAOG;QACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;QAE3B;;WAEG;QACH,WAAW,CAAC,EAAE,KAAK,CAAC,iBAAiB,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;QAEzD;;;;;WAKG;QACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;KAC3B;IAED,UAAiB,iBAAiB,CAAC;QACjC,UAAiB,UAAU;YACzB;;eAEG;YACH,OAAO,CAAC,EAAE,MAAM,CAAC;YAEjB;;eAEG;YACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;SAC1E;QAED,UAAiB,UAAU,CAAC;YAC1B,UAAiB,UAAU;gBACzB;;mBAEG;gBACH,IAAI,EAAE,aAAa,CAAC;aACrB;SACF;KACF;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;;;WAKG;QACH,IAAI,EAAE,MAAM,GAAG,eAAe,CAAC;QAE/B;;;WAGG;QACH,aAAa,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAC/B;IAED,KAAY,2BAA2B,GAAG,OAAO,CAAC,2BAA2B,CAAC;IAC9E,KAAY,wBAAwB,GAAG,OAAO,CAAC,wBAAwB,CAAC;CACzE;AAED,MAAM,WAAW,2BAA4B,SAAQ,mBAAmB;IACtE;;;;OAIG;IACH,MAAM,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC;CACvB;AAED,MAAM,WAAW,wBAAyB,SAAQ,mBAAmB;IACnE;;;;OAIG;IACH,MAAM,EAAE,IAAI,CAAC;CACd;AAED,MAAM,WAAW,eAAe;IAC9B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;CAC3B;AAED,MAAM,WAAW,aAAc,SAAQ,gBAAgB;IACrD;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CACxB;AAED,MAAM,WAAW,sBAAsB;IACrC;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAExC;;OAEG;IACH,mBAAmB,CAAC,EAAE,KAAK,CAAC,sBAAsB,CAAC,iBAAiB,CAAC,GAAG,IAAI,CAAC;IAE7E;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;;OAMG;IACH,qBAAqB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtC;;;;;;OAMG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElC;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;OAKG;IACH,KAAK,CAAC,EACF,CAAC,MAAM,GAAG,EAAE,CAAC,GACb,QAAQ,GACR,mBAAmB,GACnB,aAAa,GACb,wBAAwB,GACxB,oBAAoB,GACpB,qBAAqB,GACrB,oBAAoB,GACpB,sBAAsB,GACtB,OAAO,GACP,YAAY,GACZ,YAAY,GACZ,WAAW,GACX,gBAAgB,GAChB,gBAAgB,GAChB,eAAe,GACf,mBAAmB,GACnB,oBAAoB,GACpB,oBAAoB,GACpB,oBAAoB,GACpB,wBAAwB,GACxB,IAAI,CAAC;IAET;;;;;;;;;;;;;;;;OAgBG;IACH,eAAe,CAAC,EAAE,UAAU,CAAC,6BAA6B,GAAG,IAAI,CAAC;IAElE;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,UAAU,CAAC,yBAAyB,GAAG,IAAI,CAAC;IAE1D;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;IAElD;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;OAGG;IACH,mBAAmB,CAAC,EAAE,sBAAsB,CAAC,kBAAkB,GAAG,IAAI,CAAC;CACxE;AAED,yBAAiB,sBAAsB,CAAC;IACtC,UAAiB,iBAAiB;QAChC;;WAEG;QACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;QAE7D;;;;;;;WAOG;QACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;QAE3B;;WAEG;QACH,WAAW,CAAC,EAAE,KAAK,CAAC,iBAAiB,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;QAEzD;;;;;WAKG;QACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;KAC3B;IAED,UAAiB,iBAAiB,CAAC;QACjC,UAAiB,UAAU;YACzB;;eAEG;YACH,OAAO,CAAC,EAAE,MAAM,CAAC;YAEjB;;eAEG;YACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,aAAa,CAAC,cAAc,CAAC,CAAC;SACjF;KACF;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;;;WAKG;QACH,IAAI,EAAE,MAAM,GAAG,eAAe,CAAC;QAE/B;;;WAGG;QACH,aAAa,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAC/B;CACF;AAED,MAAM,WAAW,wBAAwB;IACvC;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAExC;;OAEG;IACH,mBAAmB,CAAC,EAAE,KAAK,CAAC,wBAAwB,CAAC,iBAAiB,CAAC,GAAG,IAAI,CAAC;IAE/E;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;;OAMG;IACH,qBAAqB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtC;;;;;;OAMG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElC;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;OAKG;IACH,KAAK,CAAC,EACF,CAAC,MAAM,GAAG,EAAE,CAAC,GACb,QAAQ,GACR,mBAAmB,GACnB,aAAa,GACb,wBAAwB,GACxB,oBAAoB,GACpB,qBAAqB,GACrB,oBAAoB,GACpB,sBAAsB,GACtB,OAAO,GACP,YAAY,GACZ,YAAY,GACZ,WAAW,GACX,gBAAgB,GAChB,gBAAgB,GAChB,eAAe,GACf,mBAAmB,GACnB,oBAAoB,GACpB,oBAAoB,GACpB,oBAAoB,GACpB,wBAAwB,GACxB,IAAI,CAAC;IAET;;;;;;;;;;;;;;;;OAgBG;IACH,eAAe,CAAC,EAAE,UAAU,CAAC,6BAA6B,GAAG,IAAI,CAAC;IAElE;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,UAAU,CAAC,yBAAyB,GAAG,IAAI,CAAC;IAE1D;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;IAElD;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;OAGG;IACH,mBAAmB,CAAC,EAAE,wBAAwB,CAAC,kBAAkB,GAAG,IAAI,CAAC;CAC1E;AAED,yBAAiB,wBAAwB,CAAC;IACxC,UAAiB,iBAAiB;QAChC;;WAEG;QACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;QAE7D;;;;;;;WAOG;QACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;QAE3B;;WAEG;QACH,WAAW,CAAC,EAAE,KAAK,CAAC,iBAAiB,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;QAEzD;;;;;WAKG;QACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;KAC3B;IAED,UAAiB,iBAAiB,CAAC;QACjC,UAAiB,UAAU;YACzB;;eAEG;YACH,OAAO,CAAC,EAAE,MAAM,CAAC;YAEjB;;eAEG;YACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,aAAa,CAAC,cAAc,CAAC,CAAC;SACjF;KACF;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;;;WAKG;QACH,IAAI,EAAE,MAAM,GAAG,eAAe,CAAC;QAE/B;;;WAGG;QACH,aAAa,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAC/B;CACF;AAED,MAAM,WAAW,eAAe;IAC9B;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAExC;;OAEG;IACH,mBAAmB,CAAC,EAAE,KAAK,CAAC,eAAe,CAAC,iBAAiB,CAAC,GAAG,IAAI,CAAC;IAEtE;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;;OAMG;IACH,qBAAqB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtC;;;;;;OAMG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElC;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;OAKG;IACH,KAAK,CAAC,EACF,CAAC,MAAM,GAAG,EAAE,CAAC,GACb,QAAQ,GACR,mBAAmB,GACnB,aAAa,GACb,wBAAwB,GACxB,oBAAoB,GACpB,qBAAqB,GACrB,oBAAoB,GACpB,sBAAsB,GACtB,OAAO,GACP,YAAY,GACZ,YAAY,GACZ,WAAW,GACX,gBAAgB,GAChB,gBAAgB,GAChB,eAAe,GACf,mBAAmB,GACnB,oBAAoB,GACpB,oBAAoB,GACpB,oBAAoB,GACpB,wBAAwB,GACxB,IAAI,CAAC;IAET;;;;;;;;;;;;;;;;OAgBG;IACH,eAAe,CAAC,EAAE,UAAU,CAAC,6BAA6B,GAAG,IAAI,CAAC;IAElE;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,UAAU,CAAC,yBAAyB,GAAG,IAAI,CAAC;IAE1D;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;IAElD;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;OAGG;IACH,mBAAmB,CAAC,EAAE,eAAe,CAAC,kBAAkB,GAAG,IAAI,CAAC;CACjE;AAED,yBAAiB,eAAe,CAAC;IAC/B,UAAiB,iBAAiB;QAChC;;WAEG;QACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;QAE7D;;;;;;;WAOG;QACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;QAE3B;;WAEG;QACH,WAAW,CAAC,EAAE,KAAK,CAAC,iBAAiB,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;QAEzD;;;;;WAKG;QACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;KAC3B;IAED,UAAiB,iBAAiB,CAAC;QACjC,UAAiB,UAAU;YACzB;;eAEG;YACH,OAAO,CAAC,EAAE,MAAM,CAAC;YAEjB;;eAEG;YACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,aAAa,CAAC,cAAc,CAAC,CAAC;SACjF;KACF;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;;;WAKG;QACH,IAAI,EAAE,MAAM,GAAG,eAAe,CAAC;QAE/B;;;WAGG;QACH,aAAa,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAC/B;CACF;AAED,MAAM,MAAM,0BAA0B,GAClC,sCAAsC,GACtC,mCAAmC,CAAC;AAExC,MAAM,WAAW,8BAA8B;IAC7C;;OAEG;IACH,YAAY,EAAE,KAAK,CAAC,0BAA0B,CAAC,UAAU,CAAC,CAAC;IAE3D;;;;OAIG;IACH,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;CACzB;AAED,yBAAiB,0BAA0B,CAAC;IAC1C,UAAiB,UAAU;QACzB;;WAEG;QACH,MAAM,CAAC,EAAE,MAAM,CAAC;QAEhB;;;WAGG;QACH,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB;IAED,KAAY,sCAAsC,GAAG,OAAO,CAAC,sCAAsC,CAAC;IACpG,KAAY,mCAAmC,GAAG,OAAO,CAAC,mCAAmC,CAAC;CAC/F;AAED,MAAM,WAAW,sCAAuC,SAAQ,8BAA8B;IAC5F;;;;OAIG;IACH,MAAM,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC;CACvB;AAED,MAAM,WAAW,mCAAoC,SAAQ,8BAA8B;IACzF;;;;OAIG;IACH,MAAM,EAAE,IAAI,CAAC;CACd;AAED,MAAM,WAAW,iCAAiC;IAChD;;OAEG;IACH,YAAY,EAAE,KAAK,CAAC,iCAAiC,CAAC,UAAU,CAAC,CAAC;CACnE;AAED,yBAAiB,iCAAiC,CAAC;IACjD,UAAiB,UAAU;QACzB;;WAEG;QACH,MAAM,CAAC,EAAE,MAAM,CAAC;QAEhB;;;WAGG;QACH,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB;CACF;AAED,MAAM,WAAW,gCAAgC;IAC/C;;OAEG;IACH,YAAY,EAAE,KAAK,CAAC,gCAAgC,CAAC,UAAU,CAAC,CAAC;CAClE;AAED,yBAAiB,gCAAgC,CAAC;IAChD,UAAiB,UAAU;QACzB;;WAEG;QACH,MAAM,CAAC,EAAE,MAAM,CAAC;QAEhB;;;WAGG;QACH,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB;CACF;AAMD,MAAM,CAAC,OAAO,WAAW,IAAI,CAAC;IAC5B,OAAO,EACL,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,GAAG,IAAI,GAAG,EACf,KAAK,SAAS,IAAI,SAAS,EAC3B,QAAQ,IAAI,QAAQ,EACpB,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,sBAAsB,EAC3B,KAAK,wBAAwB,EAC7B,KAAK,eAAe,EACpB,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,sCAAsC,IAAI,sCAAsC,EACrF,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,iCAAiC,EACtC,KAAK,gCAAgC,GACtC,CAAC;IAEF,OAAO,EACL,KAAK,IAAI,KAAK,EACd,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,4BAA4B,IAAI,4BAA4B,EACjE,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,OAAO,IAAI,OAAO,EACvB,KAAK,YAAY,IAAI,YAAY,EACjC,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,QAAQ,IAAI,QAAQ,EACzB,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,YAAY,IAAI,YAAY,EAC5B,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,cAAc,IAAI,cAAc,GACtC,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/runs.js b/node_modules/openai/resources/beta/threads/runs/runs.js new file mode 100644 index 0000000..673e6c1 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/runs.js @@ -0,0 +1,190 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RunsPage = exports.Runs = void 0; +const resource_1 = require("../../../../resource.js"); +const core_1 = require("../../../../core.js"); +const AssistantStream_1 = require("../../../../lib/AssistantStream.js"); +const core_2 = require("../../../../core.js"); +const StepsAPI = __importStar(require("./steps.js")); +const steps_1 = require("./steps.js"); +const pagination_1 = require("../../../../pagination.js"); +class Runs extends resource_1.APIResource { + constructor() { + super(...arguments); + this.steps = new StepsAPI.Steps(this._client); + } + create(threadId, params, options) { + const { include, ...body } = params; + return this._client.post(`/threads/${threadId}/runs`, { + query: { include }, + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + stream: params.stream ?? false, + }); + } + /** + * Retrieves a run. + */ + retrieve(threadId, runId, options) { + return this._client.get(`/threads/${threadId}/runs/${runId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies a run. + */ + update(threadId, runId, body, options) { + return this._client.post(`/threads/${threadId}/runs/${runId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(threadId, query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list(threadId, {}, query); + } + return this._client.getAPIList(`/threads/${threadId}/runs`, RunsPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Cancels a run that is `in_progress`. + */ + cancel(threadId, runId, options) { + return this._client.post(`/threads/${threadId}/runs/${runId}/cancel`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * A helper to create a run an poll for a terminal state. More information on Run + * lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async createAndPoll(threadId, body, options) { + const run = await this.create(threadId, body, options); + return await this.poll(threadId, run.id, options); + } + /** + * Create a Run stream + * + * @deprecated use `stream` instead + */ + createAndStream(threadId, body, options) { + return AssistantStream_1.AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options); + } + /** + * A helper to poll a run status until it reaches a terminal state. More + * information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async poll(threadId, runId, options) { + const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' }; + if (options?.pollIntervalMs) { + headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString(); + } + while (true) { + const { data: run, response } = await this.retrieve(threadId, runId, { + ...options, + headers: { ...options?.headers, ...headers }, + }).withResponse(); + switch (run.status) { + //If we are in any sort of intermediate state we poll + case 'queued': + case 'in_progress': + case 'cancelling': + let sleepInterval = 5000; + if (options?.pollIntervalMs) { + sleepInterval = options.pollIntervalMs; + } + else { + const headerInterval = response.headers.get('openai-poll-after-ms'); + if (headerInterval) { + const headerIntervalMs = parseInt(headerInterval); + if (!isNaN(headerIntervalMs)) { + sleepInterval = headerIntervalMs; + } + } + } + await (0, core_2.sleep)(sleepInterval); + break; + //We return the run in any terminal state. + case 'requires_action': + case 'incomplete': + case 'cancelled': + case 'completed': + case 'failed': + case 'expired': + return run; + } + } + } + /** + * Create a Run stream + */ + stream(threadId, body, options) { + return AssistantStream_1.AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options); + } + submitToolOutputs(threadId, runId, body, options) { + return this._client.post(`/threads/${threadId}/runs/${runId}/submit_tool_outputs`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + stream: body.stream ?? false, + }); + } + /** + * A helper to submit a tool output to a run and poll for a terminal run state. + * More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async submitToolOutputsAndPoll(threadId, runId, body, options) { + const run = await this.submitToolOutputs(threadId, runId, body, options); + return await this.poll(threadId, run.id, options); + } + /** + * Submit the tool outputs from a previous run and stream the run to a terminal + * state. More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + submitToolOutputsStream(threadId, runId, body, options) { + return AssistantStream_1.AssistantStream.createToolAssistantStream(threadId, runId, this._client.beta.threads.runs, body, options); + } +} +exports.Runs = Runs; +class RunsPage extends pagination_1.CursorPage { +} +exports.RunsPage = RunsPage; +Runs.RunsPage = RunsPage; +Runs.Steps = steps_1.Steps; +Runs.RunStepsPage = steps_1.RunStepsPage; +//# sourceMappingURL=runs.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/runs.js.map b/node_modules/openai/resources/beta/threads/runs/runs.js.map new file mode 100644 index 0000000..ba32ea4 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/runs.js.map @@ -0,0 +1 @@ +{"version":3,"file":"runs.js","sourceRoot":"","sources":["../../../../src/resources/beta/threads/runs/runs.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,sDAAmD;AACnD,8CAAoD;AAGpD,wEAA6F;AAC7F,8CAAyC;AAOzC,qDAAoC;AACpC,sCAuBiB;AACjB,0DAA2E;AAG3E,MAAa,IAAK,SAAQ,sBAAW;IAArC;;QACE,UAAK,GAAmB,IAAI,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IA+P3D,CAAC;IA3OC,MAAM,CACJ,QAAgB,EAChB,MAAuB,EACvB,OAA6B;QAE7B,MAAM,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,MAAM,CAAC;QACpC,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,OAAO,EAAE;YACpD,KAAK,EAAE,EAAE,OAAO,EAAE;YAClB,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;YAChE,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,KAAK;SAC/B,CAA6E,CAAC;IACjF,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,QAAgB,EAAE,KAAa,EAAE,OAA6B;QACrE,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,QAAQ,SAAS,KAAK,EAAE,EAAE;YAC5D,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,QAAgB,EAChB,KAAa,EACb,IAAqB,EACrB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,SAAS,KAAK,EAAE,EAAE;YAC7D,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAWD,IAAI,CACF,QAAgB,EAChB,QAA6C,EAAE,EAC/C,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SACvC;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,QAAQ,OAAO,EAAE,QAAQ,EAAE;YACpE,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAE,KAAa,EAAE,OAA6B;QACnE,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,SAAS,KAAK,SAAS,EAAE;YACpE,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,aAAa,CACjB,QAAgB,EAChB,IAAiC,EACjC,OAA2D;QAE3D,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QACvD,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAED;;;;OAIG;IACH,eAAe,CACb,QAAgB,EAChB,IAA+B,EAC/B,OAA6B;QAE7B,OAAO,iCAAe,CAAC,qBAAqB,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IACxG,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,IAAI,CACR,QAAgB,EAChB,KAAa,EACb,OAA2D;QAE3D,MAAM,OAAO,GAA8B,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,yBAAyB,EAAE,MAAM,EAAE,CAAC;QAEtG,IAAI,OAAO,EAAE,cAAc,EAAE;YAC3B,OAAO,CAAC,kCAAkC,CAAC,GAAG,OAAO,CAAC,cAAc,CAAC,QAAQ,EAAE,CAAC;SACjF;QAED,OAAO,IAAI,EAAE;YACX,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,QAAQ,EAAE,KAAK,EAAE;gBACnE,GAAG,OAAO;gBACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,GAAG,OAAO,EAAE;aAC7C,CAAC,CAAC,YAAY,EAAE,CAAC;YAElB,QAAQ,GAAG,CAAC,MAAM,EAAE;gBAClB,qDAAqD;gBACrD,KAAK,QAAQ,CAAC;gBACd,KAAK,aAAa,CAAC;gBACnB,KAAK,YAAY;oBACf,IAAI,aAAa,GAAG,IAAI,CAAC;oBAEzB,IAAI,OAAO,EAAE,cAAc,EAAE;wBAC3B,aAAa,GAAG,OAAO,CAAC,cAAc,CAAC;qBACxC;yBAAM;wBACL,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC,CAAC;wBACpE,IAAI,cAAc,EAAE;4BAClB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,cAAc,CAAC,CAAC;4BAClD,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,EAAE;gCAC5B,aAAa,GAAG,gBAAgB,CAAC;6BAClC;yBACF;qBACF;oBACD,MAAM,IAAA,YAAK,EAAC,aAAa,CAAC,CAAC;oBAC3B,MAAM;gBACR,0CAA0C;gBAC1C,KAAK,iBAAiB,CAAC;gBACvB,KAAK,YAAY,CAAC;gBAClB,KAAK,WAAW,CAAC;gBACjB,KAAK,WAAW,CAAC;gBACjB,KAAK,QAAQ,CAAC;gBACd,KAAK,SAAS;oBACZ,OAAO,GAAG,CAAC;aACd;SACF;IACH,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAE,IAA+B,EAAE,OAA6B;QACrF,OAAO,iCAAe,CAAC,qBAAqB,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IACxG,CAAC;IA0BD,iBAAiB,CACf,QAAgB,EAChB,KAAa,EACb,IAAgC,EAChC,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,SAAS,KAAK,sBAAsB,EAAE;YACjF,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;YAChE,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,KAAK;SAC7B,CAA6E,CAAC;IACjF,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,wBAAwB,CAC5B,QAAgB,EAChB,KAAa,EACb,IAA4C,EAC5C,OAA2D;QAE3D,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QACzE,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAED;;;;OAIG;IACH,uBAAuB,CACrB,QAAgB,EAChB,KAAa,EACb,IAAsC,EACtC,OAA6B;QAE7B,OAAO,iCAAe,CAAC,yBAAyB,CAC9C,QAAQ,EACR,KAAK,EACL,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,EAC9B,IAAI,EACJ,OAAO,CACR,CAAC;IACJ,CAAC;CACF;AAhQD,oBAgQC;AAED,MAAa,QAAS,SAAQ,uBAAe;CAAG;AAAhD,4BAAgD;AAi0ChD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,IAAI,CAAC,KAAK,GAAG,aAAK,CAAC;AACnB,IAAI,CAAC,YAAY,GAAG,oBAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/runs.mjs b/node_modules/openai/resources/beta/threads/runs/runs.mjs new file mode 100644 index 0000000..dff549f --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/runs.mjs @@ -0,0 +1,162 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../../resource.mjs"; +import { isRequestOptions } from "../../../../core.mjs"; +import { AssistantStream } from "../../../../lib/AssistantStream.mjs"; +import { sleep } from "../../../../core.mjs"; +import * as StepsAPI from "./steps.mjs"; +import { RunStepsPage, Steps, } from "./steps.mjs"; +import { CursorPage } from "../../../../pagination.mjs"; +export class Runs extends APIResource { + constructor() { + super(...arguments); + this.steps = new StepsAPI.Steps(this._client); + } + create(threadId, params, options) { + const { include, ...body } = params; + return this._client.post(`/threads/${threadId}/runs`, { + query: { include }, + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + stream: params.stream ?? false, + }); + } + /** + * Retrieves a run. + */ + retrieve(threadId, runId, options) { + return this._client.get(`/threads/${threadId}/runs/${runId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies a run. + */ + update(threadId, runId, body, options) { + return this._client.post(`/threads/${threadId}/runs/${runId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(threadId, query = {}, options) { + if (isRequestOptions(query)) { + return this.list(threadId, {}, query); + } + return this._client.getAPIList(`/threads/${threadId}/runs`, RunsPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Cancels a run that is `in_progress`. + */ + cancel(threadId, runId, options) { + return this._client.post(`/threads/${threadId}/runs/${runId}/cancel`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * A helper to create a run an poll for a terminal state. More information on Run + * lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async createAndPoll(threadId, body, options) { + const run = await this.create(threadId, body, options); + return await this.poll(threadId, run.id, options); + } + /** + * Create a Run stream + * + * @deprecated use `stream` instead + */ + createAndStream(threadId, body, options) { + return AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options); + } + /** + * A helper to poll a run status until it reaches a terminal state. More + * information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async poll(threadId, runId, options) { + const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' }; + if (options?.pollIntervalMs) { + headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString(); + } + while (true) { + const { data: run, response } = await this.retrieve(threadId, runId, { + ...options, + headers: { ...options?.headers, ...headers }, + }).withResponse(); + switch (run.status) { + //If we are in any sort of intermediate state we poll + case 'queued': + case 'in_progress': + case 'cancelling': + let sleepInterval = 5000; + if (options?.pollIntervalMs) { + sleepInterval = options.pollIntervalMs; + } + else { + const headerInterval = response.headers.get('openai-poll-after-ms'); + if (headerInterval) { + const headerIntervalMs = parseInt(headerInterval); + if (!isNaN(headerIntervalMs)) { + sleepInterval = headerIntervalMs; + } + } + } + await sleep(sleepInterval); + break; + //We return the run in any terminal state. + case 'requires_action': + case 'incomplete': + case 'cancelled': + case 'completed': + case 'failed': + case 'expired': + return run; + } + } + } + /** + * Create a Run stream + */ + stream(threadId, body, options) { + return AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options); + } + submitToolOutputs(threadId, runId, body, options) { + return this._client.post(`/threads/${threadId}/runs/${runId}/submit_tool_outputs`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + stream: body.stream ?? false, + }); + } + /** + * A helper to submit a tool output to a run and poll for a terminal run state. + * More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async submitToolOutputsAndPoll(threadId, runId, body, options) { + const run = await this.submitToolOutputs(threadId, runId, body, options); + return await this.poll(threadId, run.id, options); + } + /** + * Submit the tool outputs from a previous run and stream the run to a terminal + * state. More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + submitToolOutputsStream(threadId, runId, body, options) { + return AssistantStream.createToolAssistantStream(threadId, runId, this._client.beta.threads.runs, body, options); + } +} +export class RunsPage extends CursorPage { +} +Runs.RunsPage = RunsPage; +Runs.Steps = Steps; +Runs.RunStepsPage = RunStepsPage; +//# sourceMappingURL=runs.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/runs.mjs.map b/node_modules/openai/resources/beta/threads/runs/runs.mjs.map new file mode 100644 index 0000000..d9f4319 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/runs.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"runs.mjs","sourceRoot":"","sources":["../../../../src/resources/beta/threads/runs/runs.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OAGpB,EAAE,eAAe,EAA6B;OAC9C,EAAE,KAAK,EAAE;OAOT,KAAK,QAAQ;OACb,EAeL,YAAY,EAGZ,KAAK,GAKN;OACM,EAAE,UAAU,EAAyB;AAG5C,MAAM,OAAO,IAAK,SAAQ,WAAW;IAArC;;QACE,UAAK,GAAmB,IAAI,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IA+P3D,CAAC;IA3OC,MAAM,CACJ,QAAgB,EAChB,MAAuB,EACvB,OAA6B;QAE7B,MAAM,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,MAAM,CAAC;QACpC,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,OAAO,EAAE;YACpD,KAAK,EAAE,EAAE,OAAO,EAAE;YAClB,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;YAChE,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,KAAK;SAC/B,CAA6E,CAAC;IACjF,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,QAAgB,EAAE,KAAa,EAAE,OAA6B;QACrE,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,QAAQ,SAAS,KAAK,EAAE,EAAE;YAC5D,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,QAAgB,EAChB,KAAa,EACb,IAAqB,EACrB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,SAAS,KAAK,EAAE,EAAE;YAC7D,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAWD,IAAI,CACF,QAAgB,EAChB,QAA6C,EAAE,EAC/C,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SACvC;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,QAAQ,OAAO,EAAE,QAAQ,EAAE;YACpE,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAE,KAAa,EAAE,OAA6B;QACnE,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,SAAS,KAAK,SAAS,EAAE;YACpE,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,aAAa,CACjB,QAAgB,EAChB,IAAiC,EACjC,OAA2D;QAE3D,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QACvD,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAED;;;;OAIG;IACH,eAAe,CACb,QAAgB,EAChB,IAA+B,EAC/B,OAA6B;QAE7B,OAAO,eAAe,CAAC,qBAAqB,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IACxG,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,IAAI,CACR,QAAgB,EAChB,KAAa,EACb,OAA2D;QAE3D,MAAM,OAAO,GAA8B,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,yBAAyB,EAAE,MAAM,EAAE,CAAC;QAEtG,IAAI,OAAO,EAAE,cAAc,EAAE;YAC3B,OAAO,CAAC,kCAAkC,CAAC,GAAG,OAAO,CAAC,cAAc,CAAC,QAAQ,EAAE,CAAC;SACjF;QAED,OAAO,IAAI,EAAE;YACX,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,QAAQ,EAAE,KAAK,EAAE;gBACnE,GAAG,OAAO;gBACV,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,GAAG,OAAO,EAAE;aAC7C,CAAC,CAAC,YAAY,EAAE,CAAC;YAElB,QAAQ,GAAG,CAAC,MAAM,EAAE;gBAClB,qDAAqD;gBACrD,KAAK,QAAQ,CAAC;gBACd,KAAK,aAAa,CAAC;gBACnB,KAAK,YAAY;oBACf,IAAI,aAAa,GAAG,IAAI,CAAC;oBAEzB,IAAI,OAAO,EAAE,cAAc,EAAE;wBAC3B,aAAa,GAAG,OAAO,CAAC,cAAc,CAAC;qBACxC;yBAAM;wBACL,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC,CAAC;wBACpE,IAAI,cAAc,EAAE;4BAClB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,cAAc,CAAC,CAAC;4BAClD,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,EAAE;gCAC5B,aAAa,GAAG,gBAAgB,CAAC;6BAClC;yBACF;qBACF;oBACD,MAAM,KAAK,CAAC,aAAa,CAAC,CAAC;oBAC3B,MAAM;gBACR,0CAA0C;gBAC1C,KAAK,iBAAiB,CAAC;gBACvB,KAAK,YAAY,CAAC;gBAClB,KAAK,WAAW,CAAC;gBACjB,KAAK,WAAW,CAAC;gBACjB,KAAK,QAAQ,CAAC;gBACd,KAAK,SAAS;oBACZ,OAAO,GAAG,CAAC;aACd;SACF;IACH,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAE,IAA+B,EAAE,OAA6B;QACrF,OAAO,eAAe,CAAC,qBAAqB,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IACxG,CAAC;IA0BD,iBAAiB,CACf,QAAgB,EAChB,KAAa,EACb,IAAgC,EAChC,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,SAAS,KAAK,sBAAsB,EAAE;YACjF,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;YAChE,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,KAAK;SAC7B,CAA6E,CAAC;IACjF,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,wBAAwB,CAC5B,QAAgB,EAChB,KAAa,EACb,IAA4C,EAC5C,OAA2D;QAE3D,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QACzE,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAED;;;;OAIG;IACH,uBAAuB,CACrB,QAAgB,EAChB,KAAa,EACb,IAAsC,EACtC,OAA6B;QAE7B,OAAO,eAAe,CAAC,yBAAyB,CAC9C,QAAQ,EACR,KAAK,EACL,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,EAC9B,IAAI,EACJ,OAAO,CACR,CAAC;IACJ,CAAC;CACF;AAED,MAAM,OAAO,QAAS,SAAQ,UAAe;CAAG;AAi0ChD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;AACnB,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/steps.d.ts b/node_modules/openai/resources/beta/threads/runs/steps.d.ts new file mode 100644 index 0000000..ae46f69 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/steps.d.ts @@ -0,0 +1,592 @@ +import { APIResource } from "../../../../resource.js"; +import * as Core from "../../../../core.js"; +import * as StepsAPI from "./steps.js"; +import { CursorPage, type CursorPageParams } from "../../../../pagination.js"; +export declare class Steps extends APIResource { + /** + * Retrieves a run step. + */ + retrieve(threadId: string, runId: string, stepId: string, query?: StepRetrieveParams, options?: Core.RequestOptions): Core.APIPromise; + retrieve(threadId: string, runId: string, stepId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns a list of run steps belonging to a run. + */ + list(threadId: string, runId: string, query?: StepListParams, options?: Core.RequestOptions): Core.PagePromise; + list(threadId: string, runId: string, options?: Core.RequestOptions): Core.PagePromise; +} +export declare class RunStepsPage extends CursorPage { +} +/** + * Text output from the Code Interpreter tool call as part of a run step. + */ +export interface CodeInterpreterLogs { + /** + * The index of the output in the outputs array. + */ + index: number; + /** + * Always `logs`. + */ + type: 'logs'; + /** + * The text output from the Code Interpreter tool call. + */ + logs?: string; +} +export interface CodeInterpreterOutputImage { + /** + * The index of the output in the outputs array. + */ + index: number; + /** + * Always `image`. + */ + type: 'image'; + image?: CodeInterpreterOutputImage.Image; +} +export declare namespace CodeInterpreterOutputImage { + interface Image { + /** + * The [file](https://platform.openai.com/docs/api-reference/files) ID of the + * image. + */ + file_id?: string; + } +} +/** + * Details of the Code Interpreter tool call the run step was involved in. + */ +export interface CodeInterpreterToolCall { + /** + * The ID of the tool call. + */ + id: string; + /** + * The Code Interpreter tool call definition. + */ + code_interpreter: CodeInterpreterToolCall.CodeInterpreter; + /** + * The type of tool call. This is always going to be `code_interpreter` for this + * type of tool call. + */ + type: 'code_interpreter'; +} +export declare namespace CodeInterpreterToolCall { + /** + * The Code Interpreter tool call definition. + */ + interface CodeInterpreter { + /** + * The input to the Code Interpreter tool call. + */ + input: string; + /** + * The outputs from the Code Interpreter tool call. Code Interpreter can output one + * or more items, including text (`logs`) or images (`image`). Each of these are + * represented by a different object type. + */ + outputs: Array; + } + namespace CodeInterpreter { + /** + * Text output from the Code Interpreter tool call as part of a run step. + */ + interface Logs { + /** + * The text output from the Code Interpreter tool call. + */ + logs: string; + /** + * Always `logs`. + */ + type: 'logs'; + } + interface Image { + image: Image.Image; + /** + * Always `image`. + */ + type: 'image'; + } + namespace Image { + interface Image { + /** + * The [file](https://platform.openai.com/docs/api-reference/files) ID of the + * image. + */ + file_id: string; + } + } + } +} +/** + * Details of the Code Interpreter tool call the run step was involved in. + */ +export interface CodeInterpreterToolCallDelta { + /** + * The index of the tool call in the tool calls array. + */ + index: number; + /** + * The type of tool call. This is always going to be `code_interpreter` for this + * type of tool call. + */ + type: 'code_interpreter'; + /** + * The ID of the tool call. + */ + id?: string; + /** + * The Code Interpreter tool call definition. + */ + code_interpreter?: CodeInterpreterToolCallDelta.CodeInterpreter; +} +export declare namespace CodeInterpreterToolCallDelta { + /** + * The Code Interpreter tool call definition. + */ + interface CodeInterpreter { + /** + * The input to the Code Interpreter tool call. + */ + input?: string; + /** + * The outputs from the Code Interpreter tool call. Code Interpreter can output one + * or more items, including text (`logs`) or images (`image`). Each of these are + * represented by a different object type. + */ + outputs?: Array; + } +} +export interface FileSearchToolCall { + /** + * The ID of the tool call object. + */ + id: string; + /** + * For now, this is always going to be an empty object. + */ + file_search: FileSearchToolCall.FileSearch; + /** + * The type of tool call. This is always going to be `file_search` for this type of + * tool call. + */ + type: 'file_search'; +} +export declare namespace FileSearchToolCall { + /** + * For now, this is always going to be an empty object. + */ + interface FileSearch { + /** + * The ranking options for the file search. + */ + ranking_options?: FileSearch.RankingOptions; + /** + * The results of the file search. + */ + results?: Array; + } + namespace FileSearch { + /** + * The ranking options for the file search. + */ + interface RankingOptions { + /** + * The ranker used for the file search. + */ + ranker: 'default_2024_08_21'; + /** + * The score threshold for the file search. All values must be a floating point + * number between 0 and 1. + */ + score_threshold: number; + } + /** + * A result instance of the file search. + */ + interface Result { + /** + * The ID of the file that result was found in. + */ + file_id: string; + /** + * The name of the file that result was found in. + */ + file_name: string; + /** + * The score of the result. All values must be a floating point number between 0 + * and 1. + */ + score: number; + /** + * The content of the result that was found. The content is only included if + * requested via the include query parameter. + */ + content?: Array; + } + namespace Result { + interface Content { + /** + * The text content of the file. + */ + text?: string; + /** + * The type of the content. + */ + type?: 'text'; + } + } + } +} +export interface FileSearchToolCallDelta { + /** + * For now, this is always going to be an empty object. + */ + file_search: unknown; + /** + * The index of the tool call in the tool calls array. + */ + index: number; + /** + * The type of tool call. This is always going to be `file_search` for this type of + * tool call. + */ + type: 'file_search'; + /** + * The ID of the tool call object. + */ + id?: string; +} +export interface FunctionToolCall { + /** + * The ID of the tool call object. + */ + id: string; + /** + * The definition of the function that was called. + */ + function: FunctionToolCall.Function; + /** + * The type of tool call. This is always going to be `function` for this type of + * tool call. + */ + type: 'function'; +} +export declare namespace FunctionToolCall { + /** + * The definition of the function that was called. + */ + interface Function { + /** + * The arguments passed to the function. + */ + arguments: string; + /** + * The name of the function. + */ + name: string; + /** + * The output of the function. This will be `null` if the outputs have not been + * [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + * yet. + */ + output: string | null; + } +} +export interface FunctionToolCallDelta { + /** + * The index of the tool call in the tool calls array. + */ + index: number; + /** + * The type of tool call. This is always going to be `function` for this type of + * tool call. + */ + type: 'function'; + /** + * The ID of the tool call object. + */ + id?: string; + /** + * The definition of the function that was called. + */ + function?: FunctionToolCallDelta.Function; +} +export declare namespace FunctionToolCallDelta { + /** + * The definition of the function that was called. + */ + interface Function { + /** + * The arguments passed to the function. + */ + arguments?: string; + /** + * The name of the function. + */ + name?: string; + /** + * The output of the function. This will be `null` if the outputs have not been + * [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + * yet. + */ + output?: string | null; + } +} +/** + * Details of the message creation by the run step. + */ +export interface MessageCreationStepDetails { + message_creation: MessageCreationStepDetails.MessageCreation; + /** + * Always `message_creation`. + */ + type: 'message_creation'; +} +export declare namespace MessageCreationStepDetails { + interface MessageCreation { + /** + * The ID of the message that was created by this run step. + */ + message_id: string; + } +} +/** + * Represents a step in execution of a run. + */ +export interface RunStep { + /** + * The identifier of the run step, which can be referenced in API endpoints. + */ + id: string; + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) + * associated with the run step. + */ + assistant_id: string; + /** + * The Unix timestamp (in seconds) for when the run step was cancelled. + */ + cancelled_at: number | null; + /** + * The Unix timestamp (in seconds) for when the run step completed. + */ + completed_at: number | null; + /** + * The Unix timestamp (in seconds) for when the run step was created. + */ + created_at: number; + /** + * The Unix timestamp (in seconds) for when the run step expired. A step is + * considered expired if the parent run is expired. + */ + expired_at: number | null; + /** + * The Unix timestamp (in seconds) for when the run step failed. + */ + failed_at: number | null; + /** + * The last error associated with this run step. Will be `null` if there are no + * errors. + */ + last_error: RunStep.LastError | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + /** + * The object type, which is always `thread.run.step`. + */ + object: 'thread.run.step'; + /** + * The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that + * this run step is a part of. + */ + run_id: string; + /** + * The status of the run step, which can be either `in_progress`, `cancelled`, + * `failed`, `completed`, or `expired`. + */ + status: 'in_progress' | 'cancelled' | 'failed' | 'completed' | 'expired'; + /** + * The details of the run step. + */ + step_details: MessageCreationStepDetails | ToolCallsStepDetails; + /** + * The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + * that was run. + */ + thread_id: string; + /** + * The type of run step, which can be either `message_creation` or `tool_calls`. + */ + type: 'message_creation' | 'tool_calls'; + /** + * Usage statistics related to the run step. This value will be `null` while the + * run step's status is `in_progress`. + */ + usage: RunStep.Usage | null; +} +export declare namespace RunStep { + /** + * The last error associated with this run step. Will be `null` if there are no + * errors. + */ + interface LastError { + /** + * One of `server_error` or `rate_limit_exceeded`. + */ + code: 'server_error' | 'rate_limit_exceeded'; + /** + * A human-readable description of the error. + */ + message: string; + } + /** + * Usage statistics related to the run step. This value will be `null` while the + * run step's status is `in_progress`. + */ + interface Usage { + /** + * Number of completion tokens used over the course of the run step. + */ + completion_tokens: number; + /** + * Number of prompt tokens used over the course of the run step. + */ + prompt_tokens: number; + /** + * Total number of tokens used (prompt + completion). + */ + total_tokens: number; + } +} +/** + * The delta containing the fields that have changed on the run step. + */ +export interface RunStepDelta { + /** + * The details of the run step. + */ + step_details?: RunStepDeltaMessageDelta | ToolCallDeltaObject; +} +/** + * Represents a run step delta i.e. any changed fields on a run step during + * streaming. + */ +export interface RunStepDeltaEvent { + /** + * The identifier of the run step, which can be referenced in API endpoints. + */ + id: string; + /** + * The delta containing the fields that have changed on the run step. + */ + delta: RunStepDelta; + /** + * The object type, which is always `thread.run.step.delta`. + */ + object: 'thread.run.step.delta'; +} +/** + * Details of the message creation by the run step. + */ +export interface RunStepDeltaMessageDelta { + /** + * Always `message_creation`. + */ + type: 'message_creation'; + message_creation?: RunStepDeltaMessageDelta.MessageCreation; +} +export declare namespace RunStepDeltaMessageDelta { + interface MessageCreation { + /** + * The ID of the message that was created by this run step. + */ + message_id?: string; + } +} +export type RunStepInclude = 'step_details.tool_calls[*].file_search.results[*].content'; +/** + * Details of the Code Interpreter tool call the run step was involved in. + */ +export type ToolCall = CodeInterpreterToolCall | FileSearchToolCall | FunctionToolCall; +/** + * Details of the Code Interpreter tool call the run step was involved in. + */ +export type ToolCallDelta = CodeInterpreterToolCallDelta | FileSearchToolCallDelta | FunctionToolCallDelta; +/** + * Details of the tool call. + */ +export interface ToolCallDeltaObject { + /** + * Always `tool_calls`. + */ + type: 'tool_calls'; + /** + * An array of tool calls the run step was involved in. These can be associated + * with one of three types of tools: `code_interpreter`, `file_search`, or + * `function`. + */ + tool_calls?: Array; +} +/** + * Details of the tool call. + */ +export interface ToolCallsStepDetails { + /** + * An array of tool calls the run step was involved in. These can be associated + * with one of three types of tools: `code_interpreter`, `file_search`, or + * `function`. + */ + tool_calls: Array; + /** + * Always `tool_calls`. + */ + type: 'tool_calls'; +} +export interface StepRetrieveParams { + /** + * A list of additional fields to include in the response. Currently the only + * supported value is `step_details.tool_calls[*].file_search.results[*].content` + * to fetch the file search result content. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + include?: Array; +} +export interface StepListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + /** + * A list of additional fields to include in the response. Currently the only + * supported value is `step_details.tool_calls[*].file_search.results[*].content` + * to fetch the file search result content. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + include?: Array; + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} +export declare namespace Steps { + export { type CodeInterpreterLogs as CodeInterpreterLogs, type CodeInterpreterOutputImage as CodeInterpreterOutputImage, type CodeInterpreterToolCall as CodeInterpreterToolCall, type CodeInterpreterToolCallDelta as CodeInterpreterToolCallDelta, type FileSearchToolCall as FileSearchToolCall, type FileSearchToolCallDelta as FileSearchToolCallDelta, type FunctionToolCall as FunctionToolCall, type FunctionToolCallDelta as FunctionToolCallDelta, type MessageCreationStepDetails as MessageCreationStepDetails, type RunStep as RunStep, type RunStepDelta as RunStepDelta, type RunStepDeltaEvent as RunStepDeltaEvent, type RunStepDeltaMessageDelta as RunStepDeltaMessageDelta, type RunStepInclude as RunStepInclude, type ToolCall as ToolCall, type ToolCallDelta as ToolCallDelta, type ToolCallDeltaObject as ToolCallDeltaObject, type ToolCallsStepDetails as ToolCallsStepDetails, RunStepsPage as RunStepsPage, type StepRetrieveParams as StepRetrieveParams, type StepListParams as StepListParams, }; +} +//# sourceMappingURL=steps.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/steps.d.ts.map b/node_modules/openai/resources/beta/threads/runs/steps.d.ts.map new file mode 100644 index 0000000..ca06aa3 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/steps.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"steps.d.ts","sourceRoot":"","sources":["../../../../src/resources/beta/threads/runs/steps.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAEnD,OAAO,KAAK,IAAI,MAAM,kBAAkB,CAAC;AACzC,OAAO,KAAK,QAAQ,MAAM,SAAS,CAAC;AACpC,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AAE3E,qBAAa,KAAM,SAAQ,WAAW;IACpC;;OAEG;IACH,QAAQ,CACN,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,KAAK,CAAC,EAAE,kBAAkB,EAC1B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC;IAC3B,QAAQ,CACN,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC;IAkB3B;;OAEG;IACH,IAAI,CACF,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,KAAK,CAAC,EAAE,cAAc,EACtB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,YAAY,EAAE,OAAO,CAAC;IAC1C,IAAI,CACF,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,YAAY,EAAE,OAAO,CAAC;CAgB3C;AAED,qBAAa,YAAa,SAAQ,UAAU,CAAC,OAAO,CAAC;CAAG;AAExD;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,0BAA0B;IACzC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,IAAI,EAAE,OAAO,CAAC;IAEd,KAAK,CAAC,EAAE,0BAA0B,CAAC,KAAK,CAAC;CAC1C;AAED,yBAAiB,0BAA0B,CAAC;IAC1C,UAAiB,KAAK;QACpB;;;WAGG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB;CACF;AAED;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,gBAAgB,EAAE,uBAAuB,CAAC,eAAe,CAAC;IAE1D;;;OAGG;IACH,IAAI,EAAE,kBAAkB,CAAC;CAC1B;AAED,yBAAiB,uBAAuB,CAAC;IACvC;;OAEG;IACH,UAAiB,eAAe;QAC9B;;WAEG;QACH,KAAK,EAAE,MAAM,CAAC;QAEd;;;;WAIG;QACH,OAAO,EAAE,KAAK,CAAC,eAAe,CAAC,IAAI,GAAG,eAAe,CAAC,KAAK,CAAC,CAAC;KAC9D;IAED,UAAiB,eAAe,CAAC;QAC/B;;WAEG;QACH,UAAiB,IAAI;YACnB;;eAEG;YACH,IAAI,EAAE,MAAM,CAAC;YAEb;;eAEG;YACH,IAAI,EAAE,MAAM,CAAC;SACd;QAED,UAAiB,KAAK;YACpB,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC;YAEnB;;eAEG;YACH,IAAI,EAAE,OAAO,CAAC;SACf;QAED,UAAiB,KAAK,CAAC;YACrB,UAAiB,KAAK;gBACpB;;;mBAGG;gBACH,OAAO,EAAE,MAAM,CAAC;aACjB;SACF;KACF;CACF;AAED;;GAEG;AACH,MAAM,WAAW,4BAA4B;IAC3C;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;;OAGG;IACH,IAAI,EAAE,kBAAkB,CAAC;IAEzB;;OAEG;IACH,EAAE,CAAC,EAAE,MAAM,CAAC;IAEZ;;OAEG;IACH,gBAAgB,CAAC,EAAE,4BAA4B,CAAC,eAAe,CAAC;CACjE;AAED,yBAAiB,4BAA4B,CAAC;IAC5C;;OAEG;IACH,UAAiB,eAAe;QAC9B;;WAEG;QACH,KAAK,CAAC,EAAE,MAAM,CAAC;QAEf;;;;WAIG;QACH,OAAO,CAAC,EAAE,KAAK,CAAC,QAAQ,CAAC,mBAAmB,GAAG,QAAQ,CAAC,0BAA0B,CAAC,CAAC;KACrF;CACF;AAED,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,WAAW,EAAE,kBAAkB,CAAC,UAAU,CAAC;IAE3C;;;OAGG;IACH,IAAI,EAAE,aAAa,CAAC;CACrB;AAED,yBAAiB,kBAAkB,CAAC;IAClC;;OAEG;IACH,UAAiB,UAAU;QACzB;;WAEG;QACH,eAAe,CAAC,EAAE,UAAU,CAAC,cAAc,CAAC;QAE5C;;WAEG;QACH,OAAO,CAAC,EAAE,KAAK,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;KACpC;IAED,UAAiB,UAAU,CAAC;QAC1B;;WAEG;QACH,UAAiB,cAAc;YAC7B;;eAEG;YACH,MAAM,EAAE,oBAAoB,CAAC;YAE7B;;;eAGG;YACH,eAAe,EAAE,MAAM,CAAC;SACzB;QAED;;WAEG;QACH,UAAiB,MAAM;YACrB;;eAEG;YACH,OAAO,EAAE,MAAM,CAAC;YAEhB;;eAEG;YACH,SAAS,EAAE,MAAM,CAAC;YAElB;;;eAGG;YACH,KAAK,EAAE,MAAM,CAAC;YAEd;;;eAGG;YACH,OAAO,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;SACjC;QAED,UAAiB,MAAM,CAAC;YACtB,UAAiB,OAAO;gBACtB;;mBAEG;gBACH,IAAI,CAAC,EAAE,MAAM,CAAC;gBAEd;;mBAEG;gBACH,IAAI,CAAC,EAAE,MAAM,CAAC;aACf;SACF;KACF;CACF;AAED,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,WAAW,EAAE,OAAO,CAAC;IAErB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;;OAGG;IACH,IAAI,EAAE,aAAa,CAAC;IAEpB;;OAEG;IACH,EAAE,CAAC,EAAE,MAAM,CAAC;CACb;AAED,MAAM,WAAW,gBAAgB;IAC/B;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,QAAQ,EAAE,gBAAgB,CAAC,QAAQ,CAAC;IAEpC;;;OAGG;IACH,IAAI,EAAE,UAAU,CAAC;CAClB;AAED,yBAAiB,gBAAgB,CAAC;IAChC;;OAEG;IACH,UAAiB,QAAQ;QACvB;;WAEG;QACH,SAAS,EAAE,MAAM,CAAC;QAElB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;QAEb;;;;WAIG;QACH,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;KACvB;CACF;AAED,MAAM,WAAW,qBAAqB;IACpC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;;OAGG;IACH,IAAI,EAAE,UAAU,CAAC;IAEjB;;OAEG;IACH,EAAE,CAAC,EAAE,MAAM,CAAC;IAEZ;;OAEG;IACH,QAAQ,CAAC,EAAE,qBAAqB,CAAC,QAAQ,CAAC;CAC3C;AAED,yBAAiB,qBAAqB,CAAC;IACrC;;OAEG;IACH,UAAiB,QAAQ;QACvB;;WAEG;QACH,SAAS,CAAC,EAAE,MAAM,CAAC;QAEnB;;WAEG;QACH,IAAI,CAAC,EAAE,MAAM,CAAC;QAEd;;;;WAIG;QACH,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KACxB;CACF;AAED;;GAEG;AACH,MAAM,WAAW,0BAA0B;IACzC,gBAAgB,EAAE,0BAA0B,CAAC,eAAe,CAAC;IAE7D;;OAEG;IACH,IAAI,EAAE,kBAAkB,CAAC;CAC1B;AAED,yBAAiB,0BAA0B,CAAC;IAC1C,UAAiB,eAAe;QAC9B;;WAEG;QACH,UAAU,EAAE,MAAM,CAAC;KACpB;CACF;AAED;;GAEG;AACH,MAAM,WAAW,OAAO;IACtB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;OAEG;IACH,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;;OAGG;IACH,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAE1B;;OAEG;IACH,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IAEzB;;;OAGG;IACH,UAAU,EAAE,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC;IAErC;;;;;OAKG;IACH,QAAQ,EAAE,OAAO,GAAG,IAAI,CAAC;IAEzB;;OAEG;IACH,MAAM,EAAE,iBAAiB,CAAC;IAE1B;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;;OAGG;IACH,MAAM,EAAE,aAAa,GAAG,WAAW,GAAG,QAAQ,GAAG,WAAW,GAAG,SAAS,CAAC;IAEzE;;OAEG;IACH,YAAY,EAAE,0BAA0B,GAAG,oBAAoB,CAAC;IAEhE;;;OAGG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,IAAI,EAAE,kBAAkB,GAAG,YAAY,CAAC;IAExC;;;OAGG;IACH,KAAK,EAAE,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC;CAC7B;AAED,yBAAiB,OAAO,CAAC;IACvB;;;OAGG;IACH,UAAiB,SAAS;QACxB;;WAEG;QACH,IAAI,EAAE,cAAc,GAAG,qBAAqB,CAAC;QAE7C;;WAEG;QACH,OAAO,EAAE,MAAM,CAAC;KACjB;IAED;;;OAGG;IACH,UAAiB,KAAK;QACpB;;WAEG;QACH,iBAAiB,EAAE,MAAM,CAAC;QAE1B;;WAEG;QACH,aAAa,EAAE,MAAM,CAAC;QAEtB;;WAEG;QACH,YAAY,EAAE,MAAM,CAAC;KACtB;CACF;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,YAAY,CAAC,EAAE,wBAAwB,GAAG,mBAAmB,CAAC;CAC/D;AAED;;;GAGG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,KAAK,EAAE,YAAY,CAAC;IAEpB;;OAEG;IACH,MAAM,EAAE,uBAAuB,CAAC;CACjC;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;OAEG;IACH,IAAI,EAAE,kBAAkB,CAAC;IAEzB,gBAAgB,CAAC,EAAE,wBAAwB,CAAC,eAAe,CAAC;CAC7D;AAED,yBAAiB,wBAAwB,CAAC;IACxC,UAAiB,eAAe;QAC9B;;WAEG;QACH,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB;CACF;AAED,MAAM,MAAM,cAAc,GAAG,2DAA2D,CAAC;AAEzF;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG,uBAAuB,GAAG,kBAAkB,GAAG,gBAAgB,CAAC;AAEvF;;GAEG;AACH,MAAM,MAAM,aAAa,GAAG,4BAA4B,GAAG,uBAAuB,GAAG,qBAAqB,CAAC;AAE3G;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,IAAI,EAAE,YAAY,CAAC;IAEnB;;;;OAIG;IACH,UAAU,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,CAAC;CACnC;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,UAAU,EAAE,KAAK,CAAC,QAAQ,CAAC,CAAC;IAE5B;;OAEG;IACH,IAAI,EAAE,YAAY,CAAC;CACpB;AAED,MAAM,WAAW,kBAAkB;IACjC;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE,KAAK,CAAC,cAAc,CAAC,CAAC;CACjC;AAED,MAAM,WAAW,cAAe,SAAQ,gBAAgB;IACtD;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE,KAAK,CAAC,cAAc,CAAC,CAAC;IAEhC;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CACxB;AAID,MAAM,CAAC,OAAO,WAAW,KAAK,CAAC;IAC7B,OAAO,EACL,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,4BAA4B,IAAI,4BAA4B,EACjE,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,OAAO,IAAI,OAAO,EACvB,KAAK,YAAY,IAAI,YAAY,EACjC,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,QAAQ,IAAI,QAAQ,EACzB,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,YAAY,IAAI,YAAY,EAC5B,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,cAAc,IAAI,cAAc,GACtC,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/steps.js b/node_modules/openai/resources/beta/threads/runs/steps.js new file mode 100644 index 0000000..8b75911 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/steps.js @@ -0,0 +1,35 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RunStepsPage = exports.Steps = void 0; +const resource_1 = require("../../../../resource.js"); +const core_1 = require("../../../../core.js"); +const pagination_1 = require("../../../../pagination.js"); +class Steps extends resource_1.APIResource { + retrieve(threadId, runId, stepId, query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.retrieve(threadId, runId, stepId, {}, query); + } + return this._client.get(`/threads/${threadId}/runs/${runId}/steps/${stepId}`, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(threadId, runId, query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list(threadId, runId, {}, query); + } + return this._client.getAPIList(`/threads/${threadId}/runs/${runId}/steps`, RunStepsPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} +exports.Steps = Steps; +class RunStepsPage extends pagination_1.CursorPage { +} +exports.RunStepsPage = RunStepsPage; +Steps.RunStepsPage = RunStepsPage; +//# sourceMappingURL=steps.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/steps.js.map b/node_modules/openai/resources/beta/threads/runs/steps.js.map new file mode 100644 index 0000000..55f415e --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/steps.js.map @@ -0,0 +1 @@ +{"version":3,"file":"steps.js","sourceRoot":"","sources":["../../../../src/resources/beta/threads/runs/steps.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,sDAAmD;AACnD,8CAAoD;AAGpD,0DAA2E;AAE3E,MAAa,KAAM,SAAQ,sBAAW;IAiBpC,QAAQ,CACN,QAAgB,EAChB,KAAa,EACb,MAAc,EACd,QAAkD,EAAE,EACpD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,EAAE,KAAK,EAAE,MAAM,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC1D;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,QAAQ,SAAS,KAAK,UAAU,MAAM,EAAE,EAAE;YAC5E,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAgBD,IAAI,CACF,QAAgB,EAChB,KAAa,EACb,QAA8C,EAAE,EAChD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC9C;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,QAAQ,SAAS,KAAK,QAAQ,EAAE,YAAY,EAAE;YACvF,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;CACF;AA/DD,sBA+DC;AAED,MAAa,YAAa,SAAQ,uBAAmB;CAAG;AAAxD,oCAAwD;AA2pBxD,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/steps.mjs b/node_modules/openai/resources/beta/threads/runs/steps.mjs new file mode 100644 index 0000000..1312d24 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/steps.mjs @@ -0,0 +1,30 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../../resource.mjs"; +import { isRequestOptions } from "../../../../core.mjs"; +import { CursorPage } from "../../../../pagination.mjs"; +export class Steps extends APIResource { + retrieve(threadId, runId, stepId, query = {}, options) { + if (isRequestOptions(query)) { + return this.retrieve(threadId, runId, stepId, {}, query); + } + return this._client.get(`/threads/${threadId}/runs/${runId}/steps/${stepId}`, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(threadId, runId, query = {}, options) { + if (isRequestOptions(query)) { + return this.list(threadId, runId, {}, query); + } + return this._client.getAPIList(`/threads/${threadId}/runs/${runId}/steps`, RunStepsPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} +export class RunStepsPage extends CursorPage { +} +Steps.RunStepsPage = RunStepsPage; +//# sourceMappingURL=steps.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/runs/steps.mjs.map b/node_modules/openai/resources/beta/threads/runs/steps.mjs.map new file mode 100644 index 0000000..5501f85 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/runs/steps.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"steps.mjs","sourceRoot":"","sources":["../../../../src/resources/beta/threads/runs/steps.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OAGpB,EAAE,UAAU,EAAyB;AAE5C,MAAM,OAAO,KAAM,SAAQ,WAAW;IAiBpC,QAAQ,CACN,QAAgB,EAChB,KAAa,EACb,MAAc,EACd,QAAkD,EAAE,EACpD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,EAAE,KAAK,EAAE,MAAM,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC1D;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,QAAQ,SAAS,KAAK,UAAU,MAAM,EAAE,EAAE;YAC5E,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAgBD,IAAI,CACF,QAAgB,EAChB,KAAa,EACb,QAA8C,EAAE,EAChD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC9C;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,QAAQ,SAAS,KAAK,QAAQ,EAAE,YAAY,EAAE;YACvF,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;CACF;AAED,MAAM,OAAO,YAAa,SAAQ,UAAmB;CAAG;AA2pBxD,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/threads.d.ts b/node_modules/openai/resources/beta/threads/threads.d.ts new file mode 100644 index 0000000..2c916c0 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/threads.d.ts @@ -0,0 +1,1216 @@ +import { APIResource } from "../../../resource.js"; +import { AssistantStream, ThreadCreateAndRunParamsBaseStream } from "../../../lib/AssistantStream.js"; +import { APIPromise } from "../../../core.js"; +import * as Core from "../../../core.js"; +import * as ThreadsAPI from "./threads.js"; +import * as Shared from "../../shared.js"; +import * as AssistantsAPI from "../assistants.js"; +import * as ChatAPI from "../../chat/chat.js"; +import * as MessagesAPI from "./messages.js"; +import { Annotation, AnnotationDelta, FileCitationAnnotation, FileCitationDeltaAnnotation, FilePathAnnotation, FilePathDeltaAnnotation, ImageFile, ImageFileContentBlock, ImageFileDelta, ImageFileDeltaBlock, ImageURL, ImageURLContentBlock, ImageURLDelta, ImageURLDeltaBlock, Message as MessagesAPIMessage, MessageContent, MessageContentDelta, MessageContentPartParam, MessageCreateParams, MessageDeleted, MessageDelta, MessageDeltaEvent, MessageListParams, MessageUpdateParams, Messages, MessagesPage, RefusalContentBlock, RefusalDeltaBlock, Text, TextContentBlock, TextContentBlockParam, TextDelta, TextDeltaBlock } from "./messages.js"; +import * as VectorStoresAPI from "../vector-stores/vector-stores.js"; +import * as RunsAPI from "./runs/runs.js"; +import { RequiredActionFunctionToolCall, Run, RunCreateAndPollParams, RunCreateAndStreamParams, RunCreateParams, RunCreateParamsNonStreaming, RunCreateParamsStreaming, RunListParams, RunStatus, RunStreamParams, RunSubmitToolOutputsAndPollParams, RunSubmitToolOutputsParams, RunSubmitToolOutputsParamsNonStreaming, RunSubmitToolOutputsParamsStreaming, RunSubmitToolOutputsStreamParams, RunUpdateParams, Runs, RunsPage } from "./runs/runs.js"; +import { Stream } from "../../../streaming.js"; +export declare class Threads extends APIResource { + runs: RunsAPI.Runs; + messages: MessagesAPI.Messages; + /** + * Create a thread. + */ + create(body?: ThreadCreateParams, options?: Core.RequestOptions): Core.APIPromise; + create(options?: Core.RequestOptions): Core.APIPromise; + /** + * Retrieves a thread. + */ + retrieve(threadId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Modifies a thread. + */ + update(threadId: string, body: ThreadUpdateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Delete a thread. + */ + del(threadId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Create a thread and run it in one request. + */ + createAndRun(body: ThreadCreateAndRunParamsNonStreaming, options?: Core.RequestOptions): APIPromise; + createAndRun(body: ThreadCreateAndRunParamsStreaming, options?: Core.RequestOptions): APIPromise>; + createAndRun(body: ThreadCreateAndRunParamsBase, options?: Core.RequestOptions): APIPromise | RunsAPI.Run>; + /** + * A helper to create a thread, start a run and then poll for a terminal state. + * More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + createAndRunPoll(body: ThreadCreateAndRunParamsNonStreaming, options?: Core.RequestOptions & { + pollIntervalMs?: number; + }): Promise; + /** + * Create a thread and stream the run back + */ + createAndRunStream(body: ThreadCreateAndRunParamsBaseStream, options?: Core.RequestOptions): AssistantStream; +} +/** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ +export type AssistantResponseFormatOption = 'auto' | Shared.ResponseFormatText | Shared.ResponseFormatJSONObject | Shared.ResponseFormatJSONSchema; +/** + * Specifies a tool the model should use. Use to force the model to call a specific + * tool. + */ +export interface AssistantToolChoice { + /** + * The type of the tool. If type is `function`, the function name must be set + */ + type: 'function' | 'code_interpreter' | 'file_search'; + function?: AssistantToolChoiceFunction; +} +export interface AssistantToolChoiceFunction { + /** + * The name of the function to call. + */ + name: string; +} +/** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ +export type AssistantToolChoiceOption = 'none' | 'auto' | 'required' | AssistantToolChoice; +/** + * Represents a thread that contains + * [messages](https://platform.openai.com/docs/api-reference/messages). + */ +export interface Thread { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + /** + * The Unix timestamp (in seconds) for when the thread was created. + */ + created_at: number; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + /** + * The object type, which is always `thread`. + */ + object: 'thread'; + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources: Thread.ToolResources | null; +} +export declare namespace Thread { + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + } + } +} +export interface ThreadDeleted { + id: string; + deleted: boolean; + object: 'thread.deleted'; +} +export interface ThreadCreateParams { + /** + * A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + * start the thread with. + */ + messages?: Array; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: ThreadCreateParams.ToolResources | null; +} +export declare namespace ThreadCreateParams { + interface Message { + /** + * The text contents of the message. + */ + content: string | Array; + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + namespace Message { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + namespace Attachment { + interface FileSearch { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } + } + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this thread. There can be a maximum of 1 vector + * store attached to the thread. + */ + vector_stores?: Array; + } + namespace FileSearch { + interface VectorStore { + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } +} +export interface ThreadUpdateParams { + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: ThreadUpdateParams.ToolResources | null; +} +export declare namespace ThreadUpdateParams { + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + } + } +} +export type ThreadCreateAndRunParams = ThreadCreateAndRunParamsNonStreaming | ThreadCreateAndRunParamsStreaming; +export interface ThreadCreateAndRunParamsBase { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + /** + * Override the default system message of the assistant. This is useful for + * modifying the behavior on a per-run basis. + */ + instructions?: string | null; + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: (string & {}) | ChatAPI.ChatModel | null; + /** + * Whether to enable + * [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + * during tool use. + */ + parallel_tool_calls?: boolean; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: AssistantResponseFormatOption | null; + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream?: boolean | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + /** + * If no thread is provided, an empty thread will be created. + */ + thread?: ThreadCreateAndRunParams.Thread; + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: AssistantToolChoiceOption | null; + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: ThreadCreateAndRunParams.ToolResources | null; + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array | null; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: ThreadCreateAndRunParams.TruncationStrategy | null; +} +export declare namespace ThreadCreateAndRunParams { + /** + * If no thread is provided, an empty thread will be created. + */ + interface Thread { + /** + * A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + * start the thread with. + */ + messages?: Array; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: Thread.ToolResources | null; + } + namespace Thread { + interface Message { + /** + * The text contents of the message. + */ + content: string | Array; + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + namespace Message { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + namespace Attachment { + interface FileSearch { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } + } + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this thread. There can be a maximum of 1 vector + * store attached to the thread. + */ + vector_stores?: Array; + } + namespace FileSearch { + interface VectorStore { + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } + } + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } + type ThreadCreateAndRunParamsNonStreaming = ThreadsAPI.ThreadCreateAndRunParamsNonStreaming; + type ThreadCreateAndRunParamsStreaming = ThreadsAPI.ThreadCreateAndRunParamsStreaming; +} +export interface ThreadCreateAndRunParamsNonStreaming extends ThreadCreateAndRunParamsBase { + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream?: false | null; +} +export interface ThreadCreateAndRunParamsStreaming extends ThreadCreateAndRunParamsBase { + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream: true; +} +export interface ThreadCreateAndRunPollParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + /** + * Override the default system message of the assistant. This is useful for + * modifying the behavior on a per-run basis. + */ + instructions?: string | null; + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: (string & {}) | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-0125-preview' | 'gpt-4-turbo-preview' | 'gpt-4-1106-preview' | 'gpt-4-vision-preview' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613' | null; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: AssistantResponseFormatOption | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + /** + * If no thread is provided, an empty thread will be created. + */ + thread?: ThreadCreateAndRunPollParams.Thread; + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: AssistantToolChoiceOption | null; + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: ThreadCreateAndRunPollParams.ToolResources | null; + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array | null; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: ThreadCreateAndRunPollParams.TruncationStrategy | null; +} +export declare namespace ThreadCreateAndRunPollParams { + /** + * If no thread is provided, an empty thread will be created. + */ + interface Thread { + /** + * A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + * start the thread with. + */ + messages?: Array; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: Thread.ToolResources | null; + } + namespace Thread { + interface Message { + /** + * The text contents of the message. + */ + content: string | Array; + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + namespace Message { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this thread. There can be a maximum of 1 vector + * store attached to the thread. + */ + vector_stores?: Array; + } + namespace FileSearch { + interface VectorStore { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } + } + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} +export interface ThreadCreateAndRunStreamParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + /** + * Override the default system message of the assistant. This is useful for + * modifying the behavior on a per-run basis. + */ + instructions?: string | null; + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: (string & {}) | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-0125-preview' | 'gpt-4-turbo-preview' | 'gpt-4-1106-preview' | 'gpt-4-vision-preview' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613' | null; + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: AssistantResponseFormatOption | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + /** + * If no thread is provided, an empty thread will be created. + */ + thread?: ThreadCreateAndRunStreamParams.Thread; + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: AssistantToolChoiceOption | null; + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: ThreadCreateAndRunStreamParams.ToolResources | null; + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array | null; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: ThreadCreateAndRunStreamParams.TruncationStrategy | null; +} +export declare namespace ThreadCreateAndRunStreamParams { + /** + * If no thread is provided, an empty thread will be created. + */ + interface Thread { + /** + * A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + * start the thread with. + */ + messages?: Array; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: Thread.ToolResources | null; + } + namespace Thread { + interface Message { + /** + * The text contents of the message. + */ + content: string | Array; + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + namespace Message { + interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this thread. There can be a maximum of 1 vector + * store attached to the thread. + */ + vector_stores?: Array; + } + namespace FileSearch { + interface VectorStore { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } + } + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + file_search?: ToolResources.FileSearch; + } + namespace ToolResources { + interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + interface FileSearch { + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} +export declare namespace Threads { + export { type AssistantResponseFormatOption as AssistantResponseFormatOption, type AssistantToolChoice as AssistantToolChoice, type AssistantToolChoiceFunction as AssistantToolChoiceFunction, type AssistantToolChoiceOption as AssistantToolChoiceOption, type Thread as Thread, type ThreadDeleted as ThreadDeleted, type ThreadCreateParams as ThreadCreateParams, type ThreadUpdateParams as ThreadUpdateParams, type ThreadCreateAndRunParams as ThreadCreateAndRunParams, type ThreadCreateAndRunParamsNonStreaming as ThreadCreateAndRunParamsNonStreaming, type ThreadCreateAndRunParamsStreaming as ThreadCreateAndRunParamsStreaming, type ThreadCreateAndRunPollParams, type ThreadCreateAndRunStreamParams, }; + export { Runs as Runs, type RequiredActionFunctionToolCall as RequiredActionFunctionToolCall, type Run as Run, type RunStatus as RunStatus, RunsPage as RunsPage, type RunCreateParams as RunCreateParams, type RunCreateParamsNonStreaming as RunCreateParamsNonStreaming, type RunCreateParamsStreaming as RunCreateParamsStreaming, type RunUpdateParams as RunUpdateParams, type RunListParams as RunListParams, type RunCreateAndPollParams, type RunCreateAndStreamParams, type RunStreamParams, type RunSubmitToolOutputsParams as RunSubmitToolOutputsParams, type RunSubmitToolOutputsParamsNonStreaming as RunSubmitToolOutputsParamsNonStreaming, type RunSubmitToolOutputsParamsStreaming as RunSubmitToolOutputsParamsStreaming, type RunSubmitToolOutputsAndPollParams, type RunSubmitToolOutputsStreamParams, }; + export { Messages as Messages, type Annotation as Annotation, type AnnotationDelta as AnnotationDelta, type FileCitationAnnotation as FileCitationAnnotation, type FileCitationDeltaAnnotation as FileCitationDeltaAnnotation, type FilePathAnnotation as FilePathAnnotation, type FilePathDeltaAnnotation as FilePathDeltaAnnotation, type ImageFile as ImageFile, type ImageFileContentBlock as ImageFileContentBlock, type ImageFileDelta as ImageFileDelta, type ImageFileDeltaBlock as ImageFileDeltaBlock, type ImageURL as ImageURL, type ImageURLContentBlock as ImageURLContentBlock, type ImageURLDelta as ImageURLDelta, type ImageURLDeltaBlock as ImageURLDeltaBlock, type MessagesAPIMessage as Message, type MessageContent as MessageContent, type MessageContentDelta as MessageContentDelta, type MessageContentPartParam as MessageContentPartParam, type MessageDeleted as MessageDeleted, type MessageDelta as MessageDelta, type MessageDeltaEvent as MessageDeltaEvent, type RefusalContentBlock as RefusalContentBlock, type RefusalDeltaBlock as RefusalDeltaBlock, type Text as Text, type TextContentBlock as TextContentBlock, type TextContentBlockParam as TextContentBlockParam, type TextDelta as TextDelta, type TextDeltaBlock as TextDeltaBlock, MessagesPage as MessagesPage, type MessageCreateParams as MessageCreateParams, type MessageUpdateParams as MessageUpdateParams, type MessageListParams as MessageListParams, }; +} +//# sourceMappingURL=threads.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/threads.d.ts.map b/node_modules/openai/resources/beta/threads/threads.d.ts.map new file mode 100644 index 0000000..970e15e --- /dev/null +++ b/node_modules/openai/resources/beta/threads/threads.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"threads.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/threads/threads.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD,OAAO,EAAE,eAAe,EAAE,kCAAkC,EAAE,MAAM,8BAA8B,CAAC;AACnG,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAC3C,OAAO,KAAK,IAAI,MAAM,eAAe,CAAC;AACtC,OAAO,KAAK,UAAU,MAAM,WAAW,CAAC;AACxC,OAAO,KAAK,MAAM,MAAM,cAAc,CAAC;AACvC,OAAO,KAAK,aAAa,MAAM,eAAe,CAAC;AAC/C,OAAO,KAAK,OAAO,MAAM,iBAAiB,CAAC;AAC3C,OAAO,KAAK,WAAW,MAAM,YAAY,CAAC;AAC1C,OAAO,EACL,UAAU,EACV,eAAe,EACf,sBAAsB,EACtB,2BAA2B,EAC3B,kBAAkB,EAClB,uBAAuB,EACvB,SAAS,EACT,qBAAqB,EACrB,cAAc,EACd,mBAAmB,EACnB,QAAQ,EACR,oBAAoB,EACpB,aAAa,EACb,kBAAkB,EAClB,OAAO,IAAI,kBAAkB,EAC7B,cAAc,EACd,mBAAmB,EACnB,uBAAuB,EACvB,mBAAmB,EACnB,cAAc,EACd,YAAY,EACZ,iBAAiB,EACjB,iBAAiB,EACjB,mBAAmB,EACnB,QAAQ,EACR,YAAY,EACZ,mBAAmB,EACnB,iBAAiB,EACjB,IAAI,EACJ,gBAAgB,EAChB,qBAAqB,EACrB,SAAS,EACT,cAAc,EACf,MAAM,YAAY,CAAC;AACpB,OAAO,KAAK,eAAe,MAAM,gCAAgC,CAAC;AAClE,OAAO,KAAK,OAAO,MAAM,aAAa,CAAC;AACvC,OAAO,EACL,8BAA8B,EAC9B,GAAG,EACH,sBAAsB,EACtB,wBAAwB,EACxB,eAAe,EACf,2BAA2B,EAC3B,wBAAwB,EACxB,aAAa,EACb,SAAS,EACT,eAAe,EACf,iCAAiC,EACjC,0BAA0B,EAC1B,sCAAsC,EACtC,mCAAmC,EACnC,gCAAgC,EAChC,eAAe,EACf,IAAI,EACJ,QAAQ,EACT,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAE5C,qBAAa,OAAQ,SAAQ,WAAW;IACtC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAkC;IACpD,QAAQ,EAAE,WAAW,CAAC,QAAQ,CAA0C;IAExE;;OAEG;IACH,MAAM,CAAC,IAAI,CAAC,EAAE,kBAAkB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IACzF,MAAM,CAAC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAe9D;;OAEG;IACH,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAOlF;;OAEG;IACH,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,kBAAkB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAQ1G;;OAEG;IACH,GAAG,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC;IAOpF;;OAEG;IACH,YAAY,CACV,IAAI,EAAE,oCAAoC,EAC1C,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC;IAC1B,YAAY,CACV,IAAI,EAAE,iCAAiC,EACvC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,aAAa,CAAC,oBAAoB,CAAC,CAAC;IACzD,YAAY,CACV,IAAI,EAAE,4BAA4B,EAClC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,aAAa,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC;IAavE;;;;OAIG;IACG,gBAAgB,CACpB,IAAI,EAAE,oCAAoC,EAC1C,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GAC1D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC;IAKvB;;OAEG;IACH,kBAAkB,CAChB,IAAI,EAAE,kCAAkC,EACxC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,eAAe;CAGnB;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,MAAM,6BAA6B,GACrC,MAAM,GACN,MAAM,CAAC,kBAAkB,GACzB,MAAM,CAAC,wBAAwB,GAC/B,MAAM,CAAC,wBAAwB,CAAC;AAEpC;;;GAGG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,IAAI,EAAE,UAAU,GAAG,kBAAkB,GAAG,aAAa,CAAC;IAEtD,QAAQ,CAAC,EAAE,2BAA2B,CAAC;CACxC;AAED,MAAM,WAAW,2BAA2B;IAC1C;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;;;;;;;GAQG;AACH,MAAM,MAAM,yBAAyB,GAAG,MAAM,GAAG,MAAM,GAAG,UAAU,GAAG,mBAAmB,CAAC;AAE3F;;;GAGG;AACH,MAAM,WAAW,MAAM;IACrB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;;;;OAKG;IACH,QAAQ,EAAE,OAAO,GAAG,IAAI,CAAC;IAEzB;;OAEG;IACH,MAAM,EAAE,QAAQ,CAAC;IAEjB;;;;;OAKG;IACH,cAAc,EAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;CAC7C;AAED,yBAAiB,MAAM,CAAC;IACtB;;;;;OAKG;IACH,UAAiB,aAAa;QAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;QAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;KACxC;IAED,UAAiB,aAAa,CAAC;QAC7B,UAAiB,eAAe;YAC9B;;;;eAIG;YACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,UAAiB,UAAU;YACzB;;;;;eAKG;YACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAClC;KACF;CACF;AAED,MAAM,WAAW,aAAa;IAC5B,EAAE,EAAE,MAAM,CAAC;IAEX,OAAO,EAAE,OAAO,CAAC;IAEjB,MAAM,EAAE,gBAAgB,CAAC;CAC1B;AAED,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,QAAQ,CAAC,EAAE,KAAK,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;IAE7C;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;CAC1D;AAED,yBAAiB,kBAAkB,CAAC;IAClC,UAAiB,OAAO;QACtB;;WAEG;QACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;QAE7D;;;;;;;WAOG;QACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;QAE3B;;WAEG;QACH,WAAW,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;QAE/C;;;;;WAKG;QACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;KAC3B;IAED,UAAiB,OAAO,CAAC;QACvB,UAAiB,UAAU;YACzB;;eAEG;YACH,OAAO,CAAC,EAAE,MAAM,CAAC;YAEjB;;eAEG;YACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;SAC1E;QAED,UAAiB,UAAU,CAAC;YAC1B,UAAiB,UAAU;gBACzB;;mBAEG;gBACH,IAAI,EAAE,aAAa,CAAC;aACrB;SACF;KACF;IAED;;;;;OAKG;IACH,UAAiB,aAAa;QAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;QAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;KACxC;IAED,UAAiB,aAAa,CAAC;QAC7B,UAAiB,eAAe;YAC9B;;;;eAIG;YACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,UAAiB,UAAU;YACzB;;;;;eAKG;YACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;YAEjC;;;;;eAKG;YACH,aAAa,CAAC,EAAE,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC;SAC/C;QAED,UAAiB,UAAU,CAAC;YAC1B,UAAiB,WAAW;gBAC1B;;;mBAGG;gBACH,iBAAiB,CAAC,EAAE,eAAe,CAAC,yBAAyB,CAAC;gBAE9D;;;;mBAIG;gBACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;gBAEzB;;;;;mBAKG;gBACH,QAAQ,CAAC,EAAE,OAAO,CAAC;aACpB;SACF;KACF;CACF;AAED,MAAM,WAAW,kBAAkB;IACjC;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;CAC1D;AAED,yBAAiB,kBAAkB,CAAC;IAClC;;;;;OAKG;IACH,UAAiB,aAAa;QAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;QAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;KACxC;IAED,UAAiB,aAAa,CAAC;QAC7B,UAAiB,eAAe;YAC9B;;;;eAIG;YACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,UAAiB,UAAU;YACzB;;;;;eAKG;YACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAClC;KACF;CACF;AAED,MAAM,MAAM,wBAAwB,GAChC,oCAAoC,GACpC,iCAAiC,CAAC;AAEtC,MAAM,WAAW,4BAA4B;IAC3C;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;;OAMG;IACH,qBAAqB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtC;;;;;;OAMG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElC;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;OAKG;IACH,KAAK,CAAC,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC;IAEjD;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAE9B;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,eAAe,CAAC,EAAE,6BAA6B,GAAG,IAAI,CAAC;IAEvD;;;;OAIG;IACH,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAExB;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,MAAM,CAAC,EAAE,wBAAwB,CAAC,MAAM,CAAC;IAEzC;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,yBAAyB,GAAG,IAAI,CAAC;IAE/C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,wBAAwB,CAAC,aAAa,GAAG,IAAI,CAAC;IAE/D;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,CACX,aAAa,CAAC,mBAAmB,GAAG,aAAa,CAAC,cAAc,GAAG,aAAa,CAAC,YAAY,CAC9F,GAAG,IAAI,CAAC;IAET;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;OAGG;IACH,mBAAmB,CAAC,EAAE,wBAAwB,CAAC,kBAAkB,GAAG,IAAI,CAAC;CAC1E;AAED,yBAAiB,wBAAwB,CAAC;IACxC;;OAEG;IACH,UAAiB,MAAM;QACrB;;;WAGG;QACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QAEjC;;;;;WAKG;QACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;QAE1B;;;;;WAKG;QACH,cAAc,CAAC,EAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC9C;IAED,UAAiB,MAAM,CAAC;QACtB,UAAiB,OAAO;YACtB;;eAEG;YACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;YAE7D;;;;;;;eAOG;YACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;YAE3B;;eAEG;YACH,WAAW,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;YAE/C;;;;;eAKG;YACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;SAC3B;QAED,UAAiB,OAAO,CAAC;YACvB,UAAiB,UAAU;gBACzB;;mBAEG;gBACH,OAAO,CAAC,EAAE,MAAM,CAAC;gBAEjB;;mBAEG;gBACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;aAC1E;YAED,UAAiB,UAAU,CAAC;gBAC1B,UAAiB,UAAU;oBACzB;;uBAEG;oBACH,IAAI,EAAE,aAAa,CAAC;iBACrB;aACF;SACF;QAED;;;;;WAKG;QACH,UAAiB,aAAa;YAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;YAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;SACxC;QAED,UAAiB,aAAa,CAAC;YAC7B,UAAiB,eAAe;gBAC9B;;;;mBAIG;gBACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;aAC1B;YAED,UAAiB,UAAU;gBACzB;;;;;mBAKG;gBACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;gBAEjC;;;;;mBAKG;gBACH,aAAa,CAAC,EAAE,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC;aAC/C;YAED,UAAiB,UAAU,CAAC;gBAC1B,UAAiB,WAAW;oBAC1B;;;uBAGG;oBACH,iBAAiB,CAAC,EAAE,eAAe,CAAC,yBAAyB,CAAC;oBAE9D;;;;uBAIG;oBACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;oBAEzB;;;;;uBAKG;oBACH,QAAQ,CAAC,EAAE,OAAO,CAAC;iBACpB;aACF;SACF;KACF;IAED;;;;;OAKG;IACH,UAAiB,aAAa;QAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;QAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;KACxC;IAED,UAAiB,aAAa,CAAC;QAC7B,UAAiB,eAAe;YAC9B;;;;eAIG;YACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,UAAiB,UAAU;YACzB;;;;;eAKG;YACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAClC;KACF;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;;;WAKG;QACH,IAAI,EAAE,MAAM,GAAG,eAAe,CAAC;QAE/B;;;WAGG;QACH,aAAa,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAC/B;IAED,KAAY,oCAAoC,GAAG,UAAU,CAAC,oCAAoC,CAAC;IACnG,KAAY,iCAAiC,GAAG,UAAU,CAAC,iCAAiC,CAAC;CAC9F;AAED,MAAM,WAAW,oCAAqC,SAAQ,4BAA4B;IACxF;;;;OAIG;IACH,MAAM,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC;CACvB;AAED,MAAM,WAAW,iCAAkC,SAAQ,4BAA4B;IACrF;;;;OAIG;IACH,MAAM,EAAE,IAAI,CAAC;CACd;AAED,MAAM,WAAW,4BAA4B;IAC3C;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;;OAMG;IACH,qBAAqB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtC;;;;;;OAMG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElC;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;OAKG;IACH,KAAK,CAAC,EACF,CAAC,MAAM,GAAG,EAAE,CAAC,GACb,QAAQ,GACR,mBAAmB,GACnB,aAAa,GACb,wBAAwB,GACxB,oBAAoB,GACpB,qBAAqB,GACrB,oBAAoB,GACpB,sBAAsB,GACtB,OAAO,GACP,YAAY,GACZ,YAAY,GACZ,WAAW,GACX,gBAAgB,GAChB,gBAAgB,GAChB,eAAe,GACf,mBAAmB,GACnB,oBAAoB,GACpB,oBAAoB,GACpB,oBAAoB,GACpB,wBAAwB,GACxB,IAAI,CAAC;IAET;;;;;;;;;;;;;;;;OAgBG;IACH,eAAe,CAAC,EAAE,6BAA6B,GAAG,IAAI,CAAC;IAEvD;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,MAAM,CAAC,EAAE,4BAA4B,CAAC,MAAM,CAAC;IAE7C;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,yBAAyB,GAAG,IAAI,CAAC;IAE/C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,4BAA4B,CAAC,aAAa,GAAG,IAAI,CAAC;IAEnE;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,CACX,aAAa,CAAC,mBAAmB,GAAG,aAAa,CAAC,cAAc,GAAG,aAAa,CAAC,YAAY,CAC9F,GAAG,IAAI,CAAC;IAET;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;OAGG;IACH,mBAAmB,CAAC,EAAE,4BAA4B,CAAC,kBAAkB,GAAG,IAAI,CAAC;CAC9E;AAED,yBAAiB,4BAA4B,CAAC;IAC5C;;OAEG;IACH,UAAiB,MAAM;QACrB;;;WAGG;QACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QAEjC;;;;;WAKG;QACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;QAE1B;;;;;WAKG;QACH,cAAc,CAAC,EAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC9C;IAED,UAAiB,MAAM,CAAC;QACtB,UAAiB,OAAO;YACtB;;eAEG;YACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;YAE7D;;;;;;;eAOG;YACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;YAE3B;;eAEG;YACH,WAAW,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;YAE/C;;;;;eAKG;YACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;SAC3B;QAED,UAAiB,OAAO,CAAC;YACvB,UAAiB,UAAU;gBACzB;;mBAEG;gBACH,OAAO,CAAC,EAAE,MAAM,CAAC;gBAEjB;;mBAEG;gBACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,aAAa,CAAC,cAAc,CAAC,CAAC;aACjF;SACF;QAED;;;;;WAKG;QACH,UAAiB,aAAa;YAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;YAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;SACxC;QAED,UAAiB,aAAa,CAAC;YAC7B,UAAiB,eAAe;gBAC9B;;;;mBAIG;gBACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;aAC1B;YAED,UAAiB,UAAU;gBACzB;;;;;mBAKG;gBACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;gBAEjC;;;;;mBAKG;gBACH,aAAa,CAAC,EAAE,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC;aAC/C;YAED,UAAiB,UAAU,CAAC;gBAC1B,UAAiB,WAAW;oBAC1B;;;;uBAIG;oBACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;oBAEzB;;;;;uBAKG;oBACH,QAAQ,CAAC,EAAE,OAAO,CAAC;iBACpB;aACF;SACF;KACF;IAED;;;;;OAKG;IACH,UAAiB,aAAa;QAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;QAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;KACxC;IAED,UAAiB,aAAa,CAAC;QAC7B,UAAiB,eAAe;YAC9B;;;;eAIG;YACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,UAAiB,UAAU;YACzB;;;;;eAKG;YACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAClC;KACF;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;;;WAKG;QACH,IAAI,EAAE,MAAM,GAAG,eAAe,CAAC;QAE/B;;;WAGG;QACH,aAAa,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAC/B;CACF;AAED,MAAM,WAAW,8BAA8B;IAC7C;;;;OAIG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;;OAMG;IACH,qBAAqB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtC;;;;;;OAMG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElC;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;;OAKG;IACH,KAAK,CAAC,EACF,CAAC,MAAM,GAAG,EAAE,CAAC,GACb,QAAQ,GACR,mBAAmB,GACnB,aAAa,GACb,wBAAwB,GACxB,oBAAoB,GACpB,qBAAqB,GACrB,oBAAoB,GACpB,sBAAsB,GACtB,OAAO,GACP,YAAY,GACZ,YAAY,GACZ,WAAW,GACX,gBAAgB,GAChB,gBAAgB,GAChB,eAAe,GACf,mBAAmB,GACnB,oBAAoB,GACpB,oBAAoB,GACpB,oBAAoB,GACpB,wBAAwB,GACxB,IAAI,CAAC;IAET;;;;;;;;;;;;;;;;OAgBG;IACH,eAAe,CAAC,EAAE,6BAA6B,GAAG,IAAI,CAAC;IAEvD;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;OAEG;IACH,MAAM,CAAC,EAAE,8BAA8B,CAAC,MAAM,CAAC;IAE/C;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,yBAAyB,GAAG,IAAI,CAAC;IAE/C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,8BAA8B,CAAC,aAAa,GAAG,IAAI,CAAC;IAErE;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,CACX,aAAa,CAAC,mBAAmB,GAAG,aAAa,CAAC,cAAc,GAAG,aAAa,CAAC,YAAY,CAC9F,GAAG,IAAI,CAAC;IAET;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;OAGG;IACH,mBAAmB,CAAC,EAAE,8BAA8B,CAAC,kBAAkB,GAAG,IAAI,CAAC;CAChF;AAED,yBAAiB,8BAA8B,CAAC;IAC9C;;OAEG;IACH,UAAiB,MAAM;QACrB;;;WAGG;QACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QAEjC;;;;;WAKG;QACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;QAE1B;;;;;WAKG;QACH,cAAc,CAAC,EAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC9C;IAED,UAAiB,MAAM,CAAC;QACtB,UAAiB,OAAO;YACtB;;eAEG;YACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,WAAW,CAAC,uBAAuB,CAAC,CAAC;YAE7D;;;;;;;eAOG;YACH,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;YAE3B;;eAEG;YACH,WAAW,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;YAE/C;;;;;eAKG;YACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;SAC3B;QAED,UAAiB,OAAO,CAAC;YACvB,UAAiB,UAAU;gBACzB;;mBAEG;gBACH,OAAO,CAAC,EAAE,MAAM,CAAC;gBAEjB;;mBAEG;gBACH,KAAK,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC,mBAAmB,GAAG,aAAa,CAAC,cAAc,CAAC,CAAC;aACjF;SACF;QAED;;;;;WAKG;QACH,UAAiB,aAAa;YAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;YAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;SACxC;QAED,UAAiB,aAAa,CAAC;YAC7B,UAAiB,eAAe;gBAC9B;;;;mBAIG;gBACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;aAC1B;YAED,UAAiB,UAAU;gBACzB;;;;;mBAKG;gBACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;gBAEjC;;;;;mBAKG;gBACH,aAAa,CAAC,EAAE,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC;aAC/C;YAED,UAAiB,UAAU,CAAC;gBAC1B,UAAiB,WAAW;oBAC1B;;;;uBAIG;oBACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;oBAEzB;;;;;uBAKG;oBACH,QAAQ,CAAC,EAAE,OAAO,CAAC;iBACpB;aACF;SACF;KACF;IAED;;;;;OAKG;IACH,UAAiB,aAAa;QAC5B,gBAAgB,CAAC,EAAE,aAAa,CAAC,eAAe,CAAC;QAEjD,WAAW,CAAC,EAAE,aAAa,CAAC,UAAU,CAAC;KACxC;IAED,UAAiB,aAAa,CAAC;QAC7B,UAAiB,eAAe;YAC9B;;;;eAIG;YACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,UAAiB,UAAU;YACzB;;;;;eAKG;YACH,gBAAgB,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SAClC;KACF;IAED;;;OAGG;IACH,UAAiB,kBAAkB;QACjC;;;;;WAKG;QACH,IAAI,EAAE,MAAM,GAAG,eAAe,CAAC;QAE/B;;;WAGG;QACH,aAAa,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAC/B;CACF;AAOD,MAAM,CAAC,OAAO,WAAW,OAAO,CAAC;IAC/B,OAAO,EACL,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,MAAM,IAAI,MAAM,EACrB,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,oCAAoC,IAAI,oCAAoC,EACjF,KAAK,iCAAiC,IAAI,iCAAiC,EAC3E,KAAK,4BAA4B,EACjC,KAAK,8BAA8B,GACpC,CAAC;IAEF,OAAO,EACL,IAAI,IAAI,IAAI,EACZ,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,GAAG,IAAI,GAAG,EACf,KAAK,SAAS,IAAI,SAAS,EAC3B,QAAQ,IAAI,QAAQ,EACpB,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,sBAAsB,EAC3B,KAAK,wBAAwB,EAC7B,KAAK,eAAe,EACpB,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,sCAAsC,IAAI,sCAAsC,EACrF,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,iCAAiC,EACtC,KAAK,gCAAgC,GACtC,CAAC;IAEF,OAAO,EACL,QAAQ,IAAI,QAAQ,EACpB,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,SAAS,IAAI,SAAS,EAC3B,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,QAAQ,IAAI,QAAQ,EACzB,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,kBAAkB,IAAI,OAAO,EAClC,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,YAAY,IAAI,YAAY,EACjC,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,iBAAiB,IAAI,iBAAiB,EAC3C,KAAK,IAAI,IAAI,IAAI,EACjB,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,SAAS,IAAI,SAAS,EAC3B,KAAK,cAAc,IAAI,cAAc,EACrC,YAAY,IAAI,YAAY,EAC5B,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,iBAAiB,IAAI,iBAAiB,GAC5C,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/threads.js b/node_modules/openai/resources/beta/threads/threads.js new file mode 100644 index 0000000..37b3745 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/threads.js @@ -0,0 +1,108 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Threads = void 0; +const resource_1 = require("../../../resource.js"); +const core_1 = require("../../../core.js"); +const AssistantStream_1 = require("../../../lib/AssistantStream.js"); +const MessagesAPI = __importStar(require("./messages.js")); +const messages_1 = require("./messages.js"); +const RunsAPI = __importStar(require("./runs/runs.js")); +const runs_1 = require("./runs/runs.js"); +class Threads extends resource_1.APIResource { + constructor() { + super(...arguments); + this.runs = new RunsAPI.Runs(this._client); + this.messages = new MessagesAPI.Messages(this._client); + } + create(body = {}, options) { + if ((0, core_1.isRequestOptions)(body)) { + return this.create({}, body); + } + return this._client.post('/threads', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves a thread. + */ + retrieve(threadId, options) { + return this._client.get(`/threads/${threadId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies a thread. + */ + update(threadId, body, options) { + return this._client.post(`/threads/${threadId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Delete a thread. + */ + del(threadId, options) { + return this._client.delete(`/threads/${threadId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + createAndRun(body, options) { + return this._client.post('/threads/runs', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + stream: body.stream ?? false, + }); + } + /** + * A helper to create a thread, start a run and then poll for a terminal state. + * More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async createAndRunPoll(body, options) { + const run = await this.createAndRun(body, options); + return await this.runs.poll(run.thread_id, run.id, options); + } + /** + * Create a thread and stream the run back + */ + createAndRunStream(body, options) { + return AssistantStream_1.AssistantStream.createThreadAssistantStream(body, this._client.beta.threads, options); + } +} +exports.Threads = Threads; +Threads.Runs = runs_1.Runs; +Threads.RunsPage = runs_1.RunsPage; +Threads.Messages = messages_1.Messages; +Threads.MessagesPage = messages_1.MessagesPage; +//# sourceMappingURL=threads.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/threads.js.map b/node_modules/openai/resources/beta/threads/threads.js.map new file mode 100644 index 0000000..673d948 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/threads.js.map @@ -0,0 +1 @@ +{"version":3,"file":"threads.js","sourceRoot":"","sources":["../../../src/resources/beta/threads/threads.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,mDAAgD;AAChD,2CAAiD;AACjD,qEAAmG;AAOnG,2DAA0C;AAC1C,4CAkCoB;AAEpB,wDAAuC;AACvC,yCAmBqB;AAGrB,MAAa,OAAQ,SAAQ,sBAAW;IAAxC;;QACE,SAAI,GAAiB,IAAI,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACpD,aAAQ,GAAyB,IAAI,WAAW,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAqG1E,CAAC;IA9FC,MAAM,CACJ,OAAiD,EAAE,EACnD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,IAAI,CAAC,EAAE;YAC1B,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,EAAE,IAAI,CAAC,CAAC;SAC9B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE;YACnC,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,QAAgB,EAAE,OAA6B;QACtD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,QAAQ,EAAE,EAAE;YAC9C,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAE,IAAwB,EAAE,OAA6B;QAC9E,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,EAAE,EAAE;YAC/C,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAAE,OAA6B;QACjD,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,YAAY,QAAQ,EAAE,EAAE;YACjD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAiBD,YAAY,CACV,IAA8B,EAC9B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,eAAe,EAAE;YACxC,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;YAChE,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,KAAK;SAC7B,CAAqF,CAAC;IACzF,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,gBAAgB,CACpB,IAA0C,EAC1C,OAA2D;QAE3D,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QACnD,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,EAAE,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IAC9D,CAAC;IAED;;OAEG;IACH,kBAAkB,CAChB,IAAwC,EACxC,OAA6B;QAE7B,OAAO,iCAAe,CAAC,2BAA2B,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC/F,CAAC;CACF;AAvGD,0BAuGC;AA41CD,OAAO,CAAC,IAAI,GAAG,WAAI,CAAC;AACpB,OAAO,CAAC,QAAQ,GAAG,eAAQ,CAAC;AAC5B,OAAO,CAAC,QAAQ,GAAG,mBAAQ,CAAC;AAC5B,OAAO,CAAC,YAAY,GAAG,uBAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/threads.mjs b/node_modules/openai/resources/beta/threads/threads.mjs new file mode 100644 index 0000000..20a5e23 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/threads.mjs @@ -0,0 +1,81 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../resource.mjs"; +import { isRequestOptions } from "../../../core.mjs"; +import { AssistantStream } from "../../../lib/AssistantStream.mjs"; +import * as MessagesAPI from "./messages.mjs"; +import { Messages, MessagesPage, } from "./messages.mjs"; +import * as RunsAPI from "./runs/runs.mjs"; +import { Runs, RunsPage, } from "./runs/runs.mjs"; +export class Threads extends APIResource { + constructor() { + super(...arguments); + this.runs = new RunsAPI.Runs(this._client); + this.messages = new MessagesAPI.Messages(this._client); + } + create(body = {}, options) { + if (isRequestOptions(body)) { + return this.create({}, body); + } + return this._client.post('/threads', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves a thread. + */ + retrieve(threadId, options) { + return this._client.get(`/threads/${threadId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies a thread. + */ + update(threadId, body, options) { + return this._client.post(`/threads/${threadId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Delete a thread. + */ + del(threadId, options) { + return this._client.delete(`/threads/${threadId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + createAndRun(body, options) { + return this._client.post('/threads/runs', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + stream: body.stream ?? false, + }); + } + /** + * A helper to create a thread, start a run and then poll for a terminal state. + * More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async createAndRunPoll(body, options) { + const run = await this.createAndRun(body, options); + return await this.runs.poll(run.thread_id, run.id, options); + } + /** + * Create a thread and stream the run back + */ + createAndRunStream(body, options) { + return AssistantStream.createThreadAssistantStream(body, this._client.beta.threads, options); + } +} +Threads.Runs = Runs; +Threads.RunsPage = RunsPage; +Threads.Messages = Messages; +Threads.MessagesPage = MessagesPage; +//# sourceMappingURL=threads.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/threads/threads.mjs.map b/node_modules/openai/resources/beta/threads/threads.mjs.map new file mode 100644 index 0000000..e644651 --- /dev/null +++ b/node_modules/openai/resources/beta/threads/threads.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"threads.mjs","sourceRoot":"","sources":["../../../src/resources/beta/threads/threads.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OACpB,EAAE,eAAe,EAAsC;OAOvD,KAAK,WAAW;OAChB,EAyBL,QAAQ,EACR,YAAY,GAQb;OAEM,KAAK,OAAO;OACZ,EAiBL,IAAI,EACJ,QAAQ,GACT;AAGD,MAAM,OAAO,OAAQ,SAAQ,WAAW;IAAxC;;QACE,SAAI,GAAiB,IAAI,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACpD,aAAQ,GAAyB,IAAI,WAAW,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAqG1E,CAAC;IA9FC,MAAM,CACJ,OAAiD,EAAE,EACnD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;YAC1B,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,EAAE,IAAI,CAAC,CAAC;SAC9B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE;YACnC,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,QAAgB,EAAE,OAA6B;QACtD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,QAAQ,EAAE,EAAE;YAC9C,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAE,IAAwB,EAAE,OAA6B;QAC9E,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,EAAE,EAAE;YAC/C,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAAE,OAA6B;QACjD,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,YAAY,QAAQ,EAAE,EAAE;YACjD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAiBD,YAAY,CACV,IAA8B,EAC9B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,eAAe,EAAE;YACxC,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;YAChE,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,KAAK;SAC7B,CAAqF,CAAC;IACzF,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,gBAAgB,CACpB,IAA0C,EAC1C,OAA2D;QAE3D,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QACnD,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,EAAE,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IAC9D,CAAC;IAED;;OAEG;IACH,kBAAkB,CAChB,IAAwC,EACxC,OAA6B;QAE7B,OAAO,eAAe,CAAC,2BAA2B,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC/F,CAAC;CACF;AA41CD,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpB,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5B,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5B,OAAO,CAAC,YAAY,GAAG,YAAY,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/file-batches.d.ts b/node_modules/openai/resources/beta/vector-stores/file-batches.d.ts new file mode 100644 index 0000000..3eff462 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/file-batches.d.ts @@ -0,0 +1,145 @@ +import { APIResource } from "../../../resource.js"; +import { Uploadable } from "../../../core.js"; +import * as Core from "../../../core.js"; +import * as FilesAPI from "./files.js"; +import { VectorStoreFilesPage } from "./files.js"; +import * as VectorStoresAPI from "./vector-stores.js"; +import { type CursorPageParams } from "../../../pagination.js"; +export declare class FileBatches extends APIResource { + /** + * Create a vector store file batch. + */ + create(vectorStoreId: string, body: FileBatchCreateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Retrieves a vector store file batch. + */ + retrieve(vectorStoreId: string, batchId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Cancel a vector store file batch. This attempts to cancel the processing of + * files in this batch as soon as possible. + */ + cancel(vectorStoreId: string, batchId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Create a vector store batch and poll until all files have been processed. + */ + createAndPoll(vectorStoreId: string, body: FileBatchCreateParams, options?: Core.RequestOptions & { + pollIntervalMs?: number; + }): Promise; + /** + * Returns a list of vector store files in a batch. + */ + listFiles(vectorStoreId: string, batchId: string, query?: FileBatchListFilesParams, options?: Core.RequestOptions): Core.PagePromise; + listFiles(vectorStoreId: string, batchId: string, options?: Core.RequestOptions): Core.PagePromise; + /** + * Wait for the given file batch to be processed. + * + * Note: this will return even if one of the files failed to process, you need to + * check batch.file_counts.failed_count to handle this case. + */ + poll(vectorStoreId: string, batchId: string, options?: Core.RequestOptions & { + pollIntervalMs?: number; + }): Promise; + /** + * Uploads the given files concurrently and then creates a vector store file batch. + * + * The concurrency limit is configurable using the `maxConcurrency` parameter. + */ + uploadAndPoll(vectorStoreId: string, { files, fileIds }: { + files: Uploadable[]; + fileIds?: string[]; + }, options?: Core.RequestOptions & { + pollIntervalMs?: number; + maxConcurrency?: number; + }): Promise; +} +/** + * A batch of files attached to a vector store. + */ +export interface VectorStoreFileBatch { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + /** + * The Unix timestamp (in seconds) for when the vector store files batch was + * created. + */ + created_at: number; + file_counts: VectorStoreFileBatch.FileCounts; + /** + * The object type, which is always `vector_store.file_batch`. + */ + object: 'vector_store.files_batch'; + /** + * The status of the vector store files batch, which can be either `in_progress`, + * `completed`, `cancelled` or `failed`. + */ + status: 'in_progress' | 'completed' | 'cancelled' | 'failed'; + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * that the [File](https://platform.openai.com/docs/api-reference/files) is + * attached to. + */ + vector_store_id: string; +} +export declare namespace VectorStoreFileBatch { + interface FileCounts { + /** + * The number of files that where cancelled. + */ + cancelled: number; + /** + * The number of files that have been processed. + */ + completed: number; + /** + * The number of files that have failed to process. + */ + failed: number; + /** + * The number of files that are currently being processed. + */ + in_progress: number; + /** + * The total number of files. + */ + total: number; + } +} +export interface FileBatchCreateParams { + /** + * A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + * the vector store should use. Useful for tools like `file_search` that can access + * files. + */ + file_ids: Array; + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; +} +export interface FileBatchListFilesParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + /** + * Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + */ + filter?: 'in_progress' | 'completed' | 'failed' | 'cancelled'; + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} +export declare namespace FileBatches { + export { type VectorStoreFileBatch as VectorStoreFileBatch, type FileBatchCreateParams as FileBatchCreateParams, type FileBatchListFilesParams as FileBatchListFilesParams, }; +} +export { VectorStoreFilesPage }; +//# sourceMappingURL=file-batches.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/file-batches.d.ts.map b/node_modules/openai/resources/beta/vector-stores/file-batches.d.ts.map new file mode 100644 index 0000000..241a775 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/file-batches.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"file-batches.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/file-batches.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAGhD,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAE3C,OAAO,KAAK,IAAI,MAAM,eAAe,CAAC;AACtC,OAAO,KAAK,QAAQ,MAAM,SAAS,CAAC;AACpC,OAAO,EAAE,oBAAoB,EAAE,MAAM,SAAS,CAAC;AAC/C,OAAO,KAAK,eAAe,MAAM,iBAAiB,CAAC;AACnD,OAAO,EAAE,KAAK,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAE5D,qBAAa,WAAY,SAAQ,WAAW;IAC1C;;OAEG;IACH,MAAM,CACJ,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,oBAAoB,CAAC;IAQxC;;OAEG;IACH,QAAQ,CACN,aAAa,EAAE,MAAM,EACrB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,oBAAoB,CAAC;IAOxC;;;OAGG;IACH,MAAM,CACJ,aAAa,EAAE,MAAM,EACrB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,oBAAoB,CAAC;IAOxC;;OAEG;IACG,aAAa,CACjB,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GAC1D,OAAO,CAAC,oBAAoB,CAAC;IAKhC;;OAEG;IACH,SAAS,CACP,aAAa,EAAE,MAAM,EACrB,OAAO,EAAE,MAAM,EACf,KAAK,CAAC,EAAE,wBAAwB,EAChC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,oBAAoB,EAAE,QAAQ,CAAC,eAAe,CAAC;IACnE,SAAS,CACP,aAAa,EAAE,MAAM,EACrB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,oBAAoB,EAAE,QAAQ,CAAC,eAAe,CAAC;IAiBnE;;;;;OAKG;IACG,IAAI,CACR,aAAa,EAAE,MAAM,EACrB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GAC1D,OAAO,CAAC,oBAAoB,CAAC;IAqChC;;;;OAIG;IACG,aAAa,CACjB,aAAa,EAAE,MAAM,EACrB,EAAE,KAAK,EAAE,OAAY,EAAE,EAAE;QAAE,KAAK,EAAE,UAAU,EAAE,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,EAAE,CAAA;KAAE,EACpE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAC;QAAC,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GACnF,OAAO,CAAC,oBAAoB,CAAC;CAmCjC;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;;OAGG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB,WAAW,EAAE,oBAAoB,CAAC,UAAU,CAAC;IAE7C;;OAEG;IACH,MAAM,EAAE,0BAA0B,CAAC;IAEnC;;;OAGG;IACH,MAAM,EAAE,aAAa,GAAG,WAAW,GAAG,WAAW,GAAG,QAAQ,CAAC;IAE7D;;;;;OAKG;IACH,eAAe,EAAE,MAAM,CAAC;CACzB;AAED,yBAAiB,oBAAoB,CAAC;IACpC,UAAiB,UAAU;QACzB;;WAEG;QACH,SAAS,EAAE,MAAM,CAAC;QAElB;;WAEG;QACH,SAAS,EAAE,MAAM,CAAC;QAElB;;WAEG;QACH,MAAM,EAAE,MAAM,CAAC;QAEf;;WAEG;QACH,WAAW,EAAE,MAAM,CAAC;QAEpB;;WAEG;QACH,KAAK,EAAE,MAAM,CAAC;KACf;CACF;AAED,MAAM,WAAW,qBAAqB;IACpC;;;;OAIG;IACH,QAAQ,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAExB;;;OAGG;IACH,iBAAiB,CAAC,EAAE,eAAe,CAAC,yBAAyB,CAAC;CAC/D;AAED,MAAM,WAAW,wBAAyB,SAAQ,gBAAgB;IAChE;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,MAAM,CAAC,EAAE,aAAa,GAAG,WAAW,GAAG,QAAQ,GAAG,WAAW,CAAC;IAE9D;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CACxB;AAED,MAAM,CAAC,OAAO,WAAW,WAAW,CAAC;IACnC,OAAO,EACL,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,wBAAwB,IAAI,wBAAwB,GAC1D,CAAC;CACH;AAED,OAAO,EAAE,oBAAoB,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/file-batches.js b/node_modules/openai/resources/beta/vector-stores/file-batches.js new file mode 100644 index 0000000..3ab2102 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/file-batches.js @@ -0,0 +1,127 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VectorStoreFilesPage = exports.FileBatches = void 0; +const resource_1 = require("../../../resource.js"); +const core_1 = require("../../../core.js"); +const core_2 = require("../../../core.js"); +const Util_1 = require("../../../lib/Util.js"); +const files_1 = require("./files.js"); +Object.defineProperty(exports, "VectorStoreFilesPage", { enumerable: true, get: function () { return files_1.VectorStoreFilesPage; } }); +class FileBatches extends resource_1.APIResource { + /** + * Create a vector store file batch. + */ + create(vectorStoreId, body, options) { + return this._client.post(`/vector_stores/${vectorStoreId}/file_batches`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves a vector store file batch. + */ + retrieve(vectorStoreId, batchId, options) { + return this._client.get(`/vector_stores/${vectorStoreId}/file_batches/${batchId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Cancel a vector store file batch. This attempts to cancel the processing of + * files in this batch as soon as possible. + */ + cancel(vectorStoreId, batchId, options) { + return this._client.post(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/cancel`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Create a vector store batch and poll until all files have been processed. + */ + async createAndPoll(vectorStoreId, body, options) { + const batch = await this.create(vectorStoreId, body); + return await this.poll(vectorStoreId, batch.id, options); + } + listFiles(vectorStoreId, batchId, query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.listFiles(vectorStoreId, batchId, {}, query); + } + return this._client.getAPIList(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/files`, files_1.VectorStoreFilesPage, { query, ...options, headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers } }); + } + /** + * Wait for the given file batch to be processed. + * + * Note: this will return even if one of the files failed to process, you need to + * check batch.file_counts.failed_count to handle this case. + */ + async poll(vectorStoreId, batchId, options) { + const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' }; + if (options?.pollIntervalMs) { + headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString(); + } + while (true) { + const { data: batch, response } = await this.retrieve(vectorStoreId, batchId, { + ...options, + headers, + }).withResponse(); + switch (batch.status) { + case 'in_progress': + let sleepInterval = 5000; + if (options?.pollIntervalMs) { + sleepInterval = options.pollIntervalMs; + } + else { + const headerInterval = response.headers.get('openai-poll-after-ms'); + if (headerInterval) { + const headerIntervalMs = parseInt(headerInterval); + if (!isNaN(headerIntervalMs)) { + sleepInterval = headerIntervalMs; + } + } + } + await (0, core_2.sleep)(sleepInterval); + break; + case 'failed': + case 'cancelled': + case 'completed': + return batch; + } + } + } + /** + * Uploads the given files concurrently and then creates a vector store file batch. + * + * The concurrency limit is configurable using the `maxConcurrency` parameter. + */ + async uploadAndPoll(vectorStoreId, { files, fileIds = [] }, options) { + if (files == null || files.length == 0) { + throw new Error(`No \`files\` provided to process. If you've already uploaded files you should use \`.createAndPoll()\` instead`); + } + const configuredConcurrency = options?.maxConcurrency ?? 5; + // We cap the number of workers at the number of files (so we don't start any unnecessary workers) + const concurrencyLimit = Math.min(configuredConcurrency, files.length); + const client = this._client; + const fileIterator = files.values(); + const allFileIds = [...fileIds]; + // This code is based on this design. The libraries don't accommodate our environment limits. + // https://stackoverflow.com/questions/40639432/what-is-the-best-way-to-limit-concurrency-when-using-es6s-promise-all + async function processFiles(iterator) { + for (let item of iterator) { + const fileObj = await client.files.create({ file: item, purpose: 'assistants' }, options); + allFileIds.push(fileObj.id); + } + } + // Start workers to process results + const workers = Array(concurrencyLimit).fill(fileIterator).map(processFiles); + // Wait for all processing to complete. + await (0, Util_1.allSettledWithThrow)(workers); + return await this.createAndPoll(vectorStoreId, { + file_ids: allFileIds, + }); + } +} +exports.FileBatches = FileBatches; +//# sourceMappingURL=file-batches.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/file-batches.js.map b/node_modules/openai/resources/beta/vector-stores/file-batches.js.map new file mode 100644 index 0000000..fa81468 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/file-batches.js.map @@ -0,0 +1 @@ +{"version":3,"file":"file-batches.js","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/file-batches.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,mDAAgD;AAChD,2CAAiD;AACjD,2CAAsC;AAEtC,+CAAwD;AAGxD,sCAA+C;AAsStC,qGAtSA,4BAAoB,OAsSA;AAlS7B,MAAa,WAAY,SAAQ,sBAAW;IAC1C;;OAEG;IACH,MAAM,CACJ,aAAqB,EACrB,IAA2B,EAC3B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,kBAAkB,aAAa,eAAe,EAAE;YACvE,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CACN,aAAqB,EACrB,OAAe,EACf,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,aAAa,iBAAiB,OAAO,EAAE,EAAE;YACjF,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,MAAM,CACJ,aAAqB,EACrB,OAAe,EACf,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,kBAAkB,aAAa,iBAAiB,OAAO,SAAS,EAAE;YACzF,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,aAAa,CACjB,aAAqB,EACrB,IAA2B,EAC3B,OAA2D;QAE3D,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,CAAC,CAAC;QACrD,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,KAAK,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IAgBD,SAAS,CACP,aAAqB,EACrB,OAAe,EACf,QAAwD,EAAE,EAC1D,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,OAAO,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC1D;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAC5B,kBAAkB,aAAa,iBAAiB,OAAO,QAAQ,EAC/D,4BAAoB,EACpB,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,EAAE,CACxF,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,IAAI,CACR,aAAqB,EACrB,OAAe,EACf,OAA2D;QAE3D,MAAM,OAAO,GAA8B,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,yBAAyB,EAAE,MAAM,EAAE,CAAC;QACtG,IAAI,OAAO,EAAE,cAAc,EAAE;YAC3B,OAAO,CAAC,kCAAkC,CAAC,GAAG,OAAO,CAAC,cAAc,CAAC,QAAQ,EAAE,CAAC;SACjF;QAED,OAAO,IAAI,EAAE;YACX,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,aAAa,EAAE,OAAO,EAAE;gBAC5E,GAAG,OAAO;gBACV,OAAO;aACR,CAAC,CAAC,YAAY,EAAE,CAAC;YAElB,QAAQ,KAAK,CAAC,MAAM,EAAE;gBACpB,KAAK,aAAa;oBAChB,IAAI,aAAa,GAAG,IAAI,CAAC;oBAEzB,IAAI,OAAO,EAAE,cAAc,EAAE;wBAC3B,aAAa,GAAG,OAAO,CAAC,cAAc,CAAC;qBACxC;yBAAM;wBACL,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC,CAAC;wBACpE,IAAI,cAAc,EAAE;4BAClB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,cAAc,CAAC,CAAC;4BAClD,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,EAAE;gCAC5B,aAAa,GAAG,gBAAgB,CAAC;6BAClC;yBACF;qBACF;oBACD,MAAM,IAAA,YAAK,EAAC,aAAa,CAAC,CAAC;oBAC3B,MAAM;gBACR,KAAK,QAAQ,CAAC;gBACd,KAAK,WAAW,CAAC;gBACjB,KAAK,WAAW;oBACd,OAAO,KAAK,CAAC;aAChB;SACF;IACH,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,aAAa,CACjB,aAAqB,EACrB,EAAE,KAAK,EAAE,OAAO,GAAG,EAAE,EAA+C,EACpE,OAAoF;QAEpF,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,CAAC,MAAM,IAAI,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,gHAAgH,CACjH,CAAC;SACH;QAED,MAAM,qBAAqB,GAAG,OAAO,EAAE,cAAc,IAAI,CAAC,CAAC;QAE3D,kGAAkG;QAClG,MAAM,gBAAgB,GAAG,IAAI,CAAC,GAAG,CAAC,qBAAqB,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAEvE,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC;QAC5B,MAAM,YAAY,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC;QACpC,MAAM,UAAU,GAAa,CAAC,GAAG,OAAO,CAAC,CAAC;QAE1C,6FAA6F;QAC7F,qHAAqH;QACrH,KAAK,UAAU,YAAY,CAAC,QAAsC;YAChE,KAAK,IAAI,IAAI,IAAI,QAAQ,EAAE;gBACzB,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,YAAY,EAAE,EAAE,OAAO,CAAC,CAAC;gBAC1F,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;aAC7B;QACH,CAAC;QAED,mCAAmC;QACnC,MAAM,OAAO,GAAG,KAAK,CAAC,gBAAgB,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;QAE7E,uCAAuC;QACvC,MAAM,IAAA,0BAAmB,EAAC,OAAO,CAAC,CAAC;QAEnC,OAAO,MAAM,IAAI,CAAC,aAAa,CAAC,aAAa,EAAE;YAC7C,QAAQ,EAAE,UAAU;SACrB,CAAC,CAAC;IACL,CAAC;CACF;AAlLD,kCAkLC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/file-batches.mjs b/node_modules/openai/resources/beta/vector-stores/file-batches.mjs new file mode 100644 index 0000000..bc9e91c --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/file-batches.mjs @@ -0,0 +1,123 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../resource.mjs"; +import { isRequestOptions } from "../../../core.mjs"; +import { sleep } from "../../../core.mjs"; +import { allSettledWithThrow } from "../../../lib/Util.mjs"; +import { VectorStoreFilesPage } from "./files.mjs"; +export class FileBatches extends APIResource { + /** + * Create a vector store file batch. + */ + create(vectorStoreId, body, options) { + return this._client.post(`/vector_stores/${vectorStoreId}/file_batches`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves a vector store file batch. + */ + retrieve(vectorStoreId, batchId, options) { + return this._client.get(`/vector_stores/${vectorStoreId}/file_batches/${batchId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Cancel a vector store file batch. This attempts to cancel the processing of + * files in this batch as soon as possible. + */ + cancel(vectorStoreId, batchId, options) { + return this._client.post(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/cancel`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Create a vector store batch and poll until all files have been processed. + */ + async createAndPoll(vectorStoreId, body, options) { + const batch = await this.create(vectorStoreId, body); + return await this.poll(vectorStoreId, batch.id, options); + } + listFiles(vectorStoreId, batchId, query = {}, options) { + if (isRequestOptions(query)) { + return this.listFiles(vectorStoreId, batchId, {}, query); + } + return this._client.getAPIList(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/files`, VectorStoreFilesPage, { query, ...options, headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers } }); + } + /** + * Wait for the given file batch to be processed. + * + * Note: this will return even if one of the files failed to process, you need to + * check batch.file_counts.failed_count to handle this case. + */ + async poll(vectorStoreId, batchId, options) { + const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' }; + if (options?.pollIntervalMs) { + headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString(); + } + while (true) { + const { data: batch, response } = await this.retrieve(vectorStoreId, batchId, { + ...options, + headers, + }).withResponse(); + switch (batch.status) { + case 'in_progress': + let sleepInterval = 5000; + if (options?.pollIntervalMs) { + sleepInterval = options.pollIntervalMs; + } + else { + const headerInterval = response.headers.get('openai-poll-after-ms'); + if (headerInterval) { + const headerIntervalMs = parseInt(headerInterval); + if (!isNaN(headerIntervalMs)) { + sleepInterval = headerIntervalMs; + } + } + } + await sleep(sleepInterval); + break; + case 'failed': + case 'cancelled': + case 'completed': + return batch; + } + } + } + /** + * Uploads the given files concurrently and then creates a vector store file batch. + * + * The concurrency limit is configurable using the `maxConcurrency` parameter. + */ + async uploadAndPoll(vectorStoreId, { files, fileIds = [] }, options) { + if (files == null || files.length == 0) { + throw new Error(`No \`files\` provided to process. If you've already uploaded files you should use \`.createAndPoll()\` instead`); + } + const configuredConcurrency = options?.maxConcurrency ?? 5; + // We cap the number of workers at the number of files (so we don't start any unnecessary workers) + const concurrencyLimit = Math.min(configuredConcurrency, files.length); + const client = this._client; + const fileIterator = files.values(); + const allFileIds = [...fileIds]; + // This code is based on this design. The libraries don't accommodate our environment limits. + // https://stackoverflow.com/questions/40639432/what-is-the-best-way-to-limit-concurrency-when-using-es6s-promise-all + async function processFiles(iterator) { + for (let item of iterator) { + const fileObj = await client.files.create({ file: item, purpose: 'assistants' }, options); + allFileIds.push(fileObj.id); + } + } + // Start workers to process results + const workers = Array(concurrencyLimit).fill(fileIterator).map(processFiles); + // Wait for all processing to complete. + await allSettledWithThrow(workers); + return await this.createAndPoll(vectorStoreId, { + file_ids: allFileIds, + }); + } +} +export { VectorStoreFilesPage }; +//# sourceMappingURL=file-batches.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/file-batches.mjs.map b/node_modules/openai/resources/beta/vector-stores/file-batches.mjs.map new file mode 100644 index 0000000..d4d8a42 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/file-batches.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"file-batches.mjs","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/file-batches.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OACpB,EAAE,KAAK,EAAE;OAET,EAAE,mBAAmB,EAAE;OAGvB,EAAE,oBAAoB,EAAE;AAI/B,MAAM,OAAO,WAAY,SAAQ,WAAW;IAC1C;;OAEG;IACH,MAAM,CACJ,aAAqB,EACrB,IAA2B,EAC3B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,kBAAkB,aAAa,eAAe,EAAE;YACvE,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CACN,aAAqB,EACrB,OAAe,EACf,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,aAAa,iBAAiB,OAAO,EAAE,EAAE;YACjF,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,MAAM,CACJ,aAAqB,EACrB,OAAe,EACf,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,kBAAkB,aAAa,iBAAiB,OAAO,SAAS,EAAE;YACzF,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,aAAa,CACjB,aAAqB,EACrB,IAA2B,EAC3B,OAA2D;QAE3D,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,CAAC,CAAC;QACrD,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,KAAK,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IAgBD,SAAS,CACP,aAAqB,EACrB,OAAe,EACf,QAAwD,EAAE,EAC1D,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,OAAO,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC1D;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAC5B,kBAAkB,aAAa,iBAAiB,OAAO,QAAQ,EAC/D,oBAAoB,EACpB,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,EAAE,CACxF,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,IAAI,CACR,aAAqB,EACrB,OAAe,EACf,OAA2D;QAE3D,MAAM,OAAO,GAA8B,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,yBAAyB,EAAE,MAAM,EAAE,CAAC;QACtG,IAAI,OAAO,EAAE,cAAc,EAAE;YAC3B,OAAO,CAAC,kCAAkC,CAAC,GAAG,OAAO,CAAC,cAAc,CAAC,QAAQ,EAAE,CAAC;SACjF;QAED,OAAO,IAAI,EAAE;YACX,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,aAAa,EAAE,OAAO,EAAE;gBAC5E,GAAG,OAAO;gBACV,OAAO;aACR,CAAC,CAAC,YAAY,EAAE,CAAC;YAElB,QAAQ,KAAK,CAAC,MAAM,EAAE;gBACpB,KAAK,aAAa;oBAChB,IAAI,aAAa,GAAG,IAAI,CAAC;oBAEzB,IAAI,OAAO,EAAE,cAAc,EAAE;wBAC3B,aAAa,GAAG,OAAO,CAAC,cAAc,CAAC;qBACxC;yBAAM;wBACL,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC,CAAC;wBACpE,IAAI,cAAc,EAAE;4BAClB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,cAAc,CAAC,CAAC;4BAClD,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,EAAE;gCAC5B,aAAa,GAAG,gBAAgB,CAAC;6BAClC;yBACF;qBACF;oBACD,MAAM,KAAK,CAAC,aAAa,CAAC,CAAC;oBAC3B,MAAM;gBACR,KAAK,QAAQ,CAAC;gBACd,KAAK,WAAW,CAAC;gBACjB,KAAK,WAAW;oBACd,OAAO,KAAK,CAAC;aAChB;SACF;IACH,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,aAAa,CACjB,aAAqB,EACrB,EAAE,KAAK,EAAE,OAAO,GAAG,EAAE,EAA+C,EACpE,OAAoF;QAEpF,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,CAAC,MAAM,IAAI,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,gHAAgH,CACjH,CAAC;SACH;QAED,MAAM,qBAAqB,GAAG,OAAO,EAAE,cAAc,IAAI,CAAC,CAAC;QAE3D,kGAAkG;QAClG,MAAM,gBAAgB,GAAG,IAAI,CAAC,GAAG,CAAC,qBAAqB,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAEvE,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC;QAC5B,MAAM,YAAY,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC;QACpC,MAAM,UAAU,GAAa,CAAC,GAAG,OAAO,CAAC,CAAC;QAE1C,6FAA6F;QAC7F,qHAAqH;QACrH,KAAK,UAAU,YAAY,CAAC,QAAsC;YAChE,KAAK,IAAI,IAAI,IAAI,QAAQ,EAAE;gBACzB,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,YAAY,EAAE,EAAE,OAAO,CAAC,CAAC;gBAC1F,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;aAC7B;QACH,CAAC;QAED,mCAAmC;QACnC,MAAM,OAAO,GAAG,KAAK,CAAC,gBAAgB,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;QAE7E,uCAAuC;QACvC,MAAM,mBAAmB,CAAC,OAAO,CAAC,CAAC;QAEnC,OAAO,MAAM,IAAI,CAAC,aAAa,CAAC,aAAa,EAAE;YAC7C,QAAQ,EAAE,UAAU;SACrB,CAAC,CAAC;IACL,CAAC;CACF;AAgHD,OAAO,EAAE,oBAAoB,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/files.d.ts b/node_modules/openai/resources/beta/vector-stores/files.d.ts new file mode 100644 index 0000000..215c417 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/files.d.ts @@ -0,0 +1,159 @@ +import { APIResource } from "../../../resource.js"; +import { Uploadable } from "../../../core.js"; +import * as Core from "../../../core.js"; +import * as VectorStoresAPI from "./vector-stores.js"; +import { CursorPage, type CursorPageParams } from "../../../pagination.js"; +export declare class Files extends APIResource { + /** + * Create a vector store file by attaching a + * [File](https://platform.openai.com/docs/api-reference/files) to a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). + */ + create(vectorStoreId: string, body: FileCreateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Retrieves a vector store file. + */ + retrieve(vectorStoreId: string, fileId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns a list of vector store files. + */ + list(vectorStoreId: string, query?: FileListParams, options?: Core.RequestOptions): Core.PagePromise; + list(vectorStoreId: string, options?: Core.RequestOptions): Core.PagePromise; + /** + * Delete a vector store file. This will remove the file from the vector store but + * the file itself will not be deleted. To delete the file, use the + * [delete file](https://platform.openai.com/docs/api-reference/files/delete) + * endpoint. + */ + del(vectorStoreId: string, fileId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Attach a file to the given vector store and wait for it to be processed. + */ + createAndPoll(vectorStoreId: string, body: FileCreateParams, options?: Core.RequestOptions & { + pollIntervalMs?: number; + }): Promise; + /** + * Wait for the vector store file to finish processing. + * + * Note: this will return even if the file failed to process, you need to check + * file.last_error and file.status to handle these cases + */ + poll(vectorStoreId: string, fileId: string, options?: Core.RequestOptions & { + pollIntervalMs?: number; + }): Promise; + /** + * Upload a file to the `files` API and then attach it to the given vector store. + * + * Note the file will be asynchronously processed (you can use the alternative + * polling helper method to wait for processing to complete). + */ + upload(vectorStoreId: string, file: Uploadable, options?: Core.RequestOptions): Promise; + /** + * Add a file to a vector store and poll until processing is complete. + */ + uploadAndPoll(vectorStoreId: string, file: Uploadable, options?: Core.RequestOptions & { + pollIntervalMs?: number; + }): Promise; +} +export declare class VectorStoreFilesPage extends CursorPage { +} +/** + * A list of files attached to a vector store. + */ +export interface VectorStoreFile { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + /** + * The Unix timestamp (in seconds) for when the vector store file was created. + */ + created_at: number; + /** + * The last error associated with this vector store file. Will be `null` if there + * are no errors. + */ + last_error: VectorStoreFile.LastError | null; + /** + * The object type, which is always `vector_store.file`. + */ + object: 'vector_store.file'; + /** + * The status of the vector store file, which can be either `in_progress`, + * `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + * vector store file is ready for use. + */ + status: 'in_progress' | 'completed' | 'cancelled' | 'failed'; + /** + * The total vector store usage in bytes. Note that this may be different from the + * original file size. + */ + usage_bytes: number; + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * that the [File](https://platform.openai.com/docs/api-reference/files) is + * attached to. + */ + vector_store_id: string; + /** + * The strategy used to chunk the file. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategy; +} +export declare namespace VectorStoreFile { + /** + * The last error associated with this vector store file. Will be `null` if there + * are no errors. + */ + interface LastError { + /** + * One of `server_error` or `rate_limit_exceeded`. + */ + code: 'server_error' | 'unsupported_file' | 'invalid_file'; + /** + * A human-readable description of the error. + */ + message: string; + } +} +export interface VectorStoreFileDeleted { + id: string; + deleted: boolean; + object: 'vector_store.file.deleted'; +} +export interface FileCreateParams { + /** + * A [File](https://platform.openai.com/docs/api-reference/files) ID that the + * vector store should use. Useful for tools like `file_search` that can access + * files. + */ + file_id: string; + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; +} +export interface FileListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + /** + * Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + */ + filter?: 'in_progress' | 'completed' | 'failed' | 'cancelled'; + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} +export declare namespace Files { + export { type VectorStoreFile as VectorStoreFile, type VectorStoreFileDeleted as VectorStoreFileDeleted, VectorStoreFilesPage as VectorStoreFilesPage, type FileCreateParams as FileCreateParams, type FileListParams as FileListParams, }; +} +//# sourceMappingURL=files.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/files.d.ts.map b/node_modules/openai/resources/beta/vector-stores/files.d.ts.map new file mode 100644 index 0000000..a6dc23a --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/files.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"files.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/files.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAChD,OAAO,EAAS,UAAU,EAAoB,MAAM,eAAe,CAAC;AACpE,OAAO,KAAK,IAAI,MAAM,eAAe,CAAC;AACtC,OAAO,KAAK,eAAe,MAAM,iBAAiB,CAAC;AACnD,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAExE,qBAAa,KAAM,SAAQ,WAAW;IACpC;;;;OAIG;IACH,MAAM,CACJ,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,gBAAgB,EACtB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,eAAe,CAAC;IAQnC;;OAEG;IACH,QAAQ,CACN,aAAa,EAAE,MAAM,EACrB,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,eAAe,CAAC;IAOnC;;OAEG;IACH,IAAI,CACF,aAAa,EAAE,MAAM,EACrB,KAAK,CAAC,EAAE,cAAc,EACtB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,oBAAoB,EAAE,eAAe,CAAC;IAC1D,IAAI,CACF,aAAa,EAAE,MAAM,EACrB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,oBAAoB,EAAE,eAAe,CAAC;IAgB1D;;;;;OAKG;IACH,GAAG,CACD,aAAa,EAAE,MAAM,EACrB,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,sBAAsB,CAAC;IAO1C;;OAEG;IACG,aAAa,CACjB,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,gBAAgB,EACtB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GAC1D,OAAO,CAAC,eAAe,CAAC;IAK3B;;;;;OAKG;IACG,IAAI,CACR,aAAa,EAAE,MAAM,EACrB,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GAC1D,OAAO,CAAC,eAAe,CAAC;IAqC3B;;;;;OAKG;IACG,MAAM,CACV,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,UAAU,EAChB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,OAAO,CAAC,eAAe,CAAC;IAK3B;;OAEG;IACG,aAAa,CACjB,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,UAAU,EAChB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG;QAAE,cAAc,CAAC,EAAE,MAAM,CAAA;KAAE,GAC1D,OAAO,CAAC,eAAe,CAAC;CAI5B;AAED,qBAAa,oBAAqB,SAAQ,UAAU,CAAC,eAAe,CAAC;CAAG;AAExE;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;;OAGG;IACH,UAAU,EAAE,eAAe,CAAC,SAAS,GAAG,IAAI,CAAC;IAE7C;;OAEG;IACH,MAAM,EAAE,mBAAmB,CAAC;IAE5B;;;;OAIG;IACH,MAAM,EAAE,aAAa,GAAG,WAAW,GAAG,WAAW,GAAG,QAAQ,CAAC;IAE7D;;;OAGG;IACH,WAAW,EAAE,MAAM,CAAC;IAEpB;;;;;OAKG;IACH,eAAe,EAAE,MAAM,CAAC;IAExB;;OAEG;IACH,iBAAiB,CAAC,EAAE,eAAe,CAAC,oBAAoB,CAAC;CAC1D;AAED,yBAAiB,eAAe,CAAC;IAC/B;;;OAGG;IACH,UAAiB,SAAS;QACxB;;WAEG;QACH,IAAI,EAAE,cAAc,GAAG,kBAAkB,GAAG,cAAc,CAAC;QAE3D;;WAEG;QACH,OAAO,EAAE,MAAM,CAAC;KACjB;CACF;AAED,MAAM,WAAW,sBAAsB;IACrC,EAAE,EAAE,MAAM,CAAC;IAEX,OAAO,EAAE,OAAO,CAAC;IAEjB,MAAM,EAAE,2BAA2B,CAAC;CACrC;AAED,MAAM,WAAW,gBAAgB;IAC/B;;;;OAIG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,iBAAiB,CAAC,EAAE,eAAe,CAAC,yBAAyB,CAAC;CAC/D;AAED,MAAM,WAAW,cAAe,SAAQ,gBAAgB;IACtD;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,MAAM,CAAC,EAAE,aAAa,GAAG,WAAW,GAAG,QAAQ,GAAG,WAAW,CAAC;IAE9D;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CACxB;AAID,MAAM,CAAC,OAAO,WAAW,KAAK,CAAC;IAC7B,OAAO,EACL,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,oBAAoB,IAAI,oBAAoB,EAC5C,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,cAAc,IAAI,cAAc,GACtC,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/files.js b/node_modules/openai/resources/beta/vector-stores/files.js new file mode 100644 index 0000000..c36cbd2 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/files.js @@ -0,0 +1,122 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VectorStoreFilesPage = exports.Files = void 0; +const resource_1 = require("../../../resource.js"); +const core_1 = require("../../../core.js"); +const pagination_1 = require("../../../pagination.js"); +class Files extends resource_1.APIResource { + /** + * Create a vector store file by attaching a + * [File](https://platform.openai.com/docs/api-reference/files) to a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). + */ + create(vectorStoreId, body, options) { + return this._client.post(`/vector_stores/${vectorStoreId}/files`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves a vector store file. + */ + retrieve(vectorStoreId, fileId, options) { + return this._client.get(`/vector_stores/${vectorStoreId}/files/${fileId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(vectorStoreId, query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list(vectorStoreId, {}, query); + } + return this._client.getAPIList(`/vector_stores/${vectorStoreId}/files`, VectorStoreFilesPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Delete a vector store file. This will remove the file from the vector store but + * the file itself will not be deleted. To delete the file, use the + * [delete file](https://platform.openai.com/docs/api-reference/files/delete) + * endpoint. + */ + del(vectorStoreId, fileId, options) { + return this._client.delete(`/vector_stores/${vectorStoreId}/files/${fileId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Attach a file to the given vector store and wait for it to be processed. + */ + async createAndPoll(vectorStoreId, body, options) { + const file = await this.create(vectorStoreId, body, options); + return await this.poll(vectorStoreId, file.id, options); + } + /** + * Wait for the vector store file to finish processing. + * + * Note: this will return even if the file failed to process, you need to check + * file.last_error and file.status to handle these cases + */ + async poll(vectorStoreId, fileId, options) { + const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' }; + if (options?.pollIntervalMs) { + headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString(); + } + while (true) { + const fileResponse = await this.retrieve(vectorStoreId, fileId, { + ...options, + headers, + }).withResponse(); + const file = fileResponse.data; + switch (file.status) { + case 'in_progress': + let sleepInterval = 5000; + if (options?.pollIntervalMs) { + sleepInterval = options.pollIntervalMs; + } + else { + const headerInterval = fileResponse.response.headers.get('openai-poll-after-ms'); + if (headerInterval) { + const headerIntervalMs = parseInt(headerInterval); + if (!isNaN(headerIntervalMs)) { + sleepInterval = headerIntervalMs; + } + } + } + await (0, core_1.sleep)(sleepInterval); + break; + case 'failed': + case 'completed': + return file; + } + } + } + /** + * Upload a file to the `files` API and then attach it to the given vector store. + * + * Note the file will be asynchronously processed (you can use the alternative + * polling helper method to wait for processing to complete). + */ + async upload(vectorStoreId, file, options) { + const fileInfo = await this._client.files.create({ file: file, purpose: 'assistants' }, options); + return this.create(vectorStoreId, { file_id: fileInfo.id }, options); + } + /** + * Add a file to a vector store and poll until processing is complete. + */ + async uploadAndPoll(vectorStoreId, file, options) { + const fileInfo = await this.upload(vectorStoreId, file, options); + return await this.poll(vectorStoreId, fileInfo.id, options); + } +} +exports.Files = Files; +class VectorStoreFilesPage extends pagination_1.CursorPage { +} +exports.VectorStoreFilesPage = VectorStoreFilesPage; +Files.VectorStoreFilesPage = VectorStoreFilesPage; +//# sourceMappingURL=files.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/files.js.map b/node_modules/openai/resources/beta/vector-stores/files.js.map new file mode 100644 index 0000000..9c85aca --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/files.js.map @@ -0,0 +1 @@ +{"version":3,"file":"files.js","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/files.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,mDAAgD;AAChD,2CAAoE;AAGpE,uDAAwE;AAExE,MAAa,KAAM,SAAQ,sBAAW;IACpC;;;;OAIG;IACH,MAAM,CACJ,aAAqB,EACrB,IAAsB,EACtB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,kBAAkB,aAAa,QAAQ,EAAE;YAChE,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CACN,aAAqB,EACrB,MAAc,EACd,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,aAAa,UAAU,MAAM,EAAE,EAAE;YACzE,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAcD,IAAI,CACF,aAAqB,EACrB,QAA8C,EAAE,EAChD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,kBAAkB,aAAa,QAAQ,EAAE,oBAAoB,EAAE;YAC5F,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;;;;OAKG;IACH,GAAG,CACD,aAAqB,EACrB,MAAc,EACd,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,kBAAkB,aAAa,UAAU,MAAM,EAAE,EAAE;YAC5E,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,aAAa,CACjB,aAAqB,EACrB,IAAsB,EACtB,OAA2D;QAE3D,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QAC7D,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,IAAI,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,IAAI,CACR,aAAqB,EACrB,MAAc,EACd,OAA2D;QAE3D,MAAM,OAAO,GAA8B,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,yBAAyB,EAAE,MAAM,EAAE,CAAC;QACtG,IAAI,OAAO,EAAE,cAAc,EAAE;YAC3B,OAAO,CAAC,kCAAkC,CAAC,GAAG,OAAO,CAAC,cAAc,CAAC,QAAQ,EAAE,CAAC;SACjF;QACD,OAAO,IAAI,EAAE;YACX,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,aAAa,EAAE,MAAM,EAAE;gBAC9D,GAAG,OAAO;gBACV,OAAO;aACR,CAAC,CAAC,YAAY,EAAE,CAAC;YAElB,MAAM,IAAI,GAAG,YAAY,CAAC,IAAI,CAAC;YAE/B,QAAQ,IAAI,CAAC,MAAM,EAAE;gBACnB,KAAK,aAAa;oBAChB,IAAI,aAAa,GAAG,IAAI,CAAC;oBAEzB,IAAI,OAAO,EAAE,cAAc,EAAE;wBAC3B,aAAa,GAAG,OAAO,CAAC,cAAc,CAAC;qBACxC;yBAAM;wBACL,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC,CAAC;wBACjF,IAAI,cAAc,EAAE;4BAClB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,cAAc,CAAC,CAAC;4BAClD,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,EAAE;gCAC5B,aAAa,GAAG,gBAAgB,CAAC;6BAClC;yBACF;qBACF;oBACD,MAAM,IAAA,YAAK,EAAC,aAAa,CAAC,CAAC;oBAC3B,MAAM;gBACR,KAAK,QAAQ,CAAC;gBACd,KAAK,WAAW;oBACd,OAAO,IAAI,CAAC;aACf;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,MAAM,CACV,aAAqB,EACrB,IAAgB,EAChB,OAA6B;QAE7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,YAAY,EAAE,EAAE,OAAO,CAAC,CAAC;QACjG,OAAO,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,EAAE,EAAE,OAAO,CAAC,CAAC;IACvE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,aAAa,CACjB,aAAqB,EACrB,IAAgB,EAChB,OAA2D;QAE3D,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QACjE,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,QAAQ,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IAC9D,CAAC;CACF;AAjKD,sBAiKC;AAED,MAAa,oBAAqB,SAAQ,uBAA2B;CAAG;AAAxE,oDAAwE;AAoHxE,KAAK,CAAC,oBAAoB,GAAG,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/files.mjs b/node_modules/openai/resources/beta/vector-stores/files.mjs new file mode 100644 index 0000000..d9896dd --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/files.mjs @@ -0,0 +1,117 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../resource.mjs"; +import { sleep, isRequestOptions } from "../../../core.mjs"; +import { CursorPage } from "../../../pagination.mjs"; +export class Files extends APIResource { + /** + * Create a vector store file by attaching a + * [File](https://platform.openai.com/docs/api-reference/files) to a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). + */ + create(vectorStoreId, body, options) { + return this._client.post(`/vector_stores/${vectorStoreId}/files`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves a vector store file. + */ + retrieve(vectorStoreId, fileId, options) { + return this._client.get(`/vector_stores/${vectorStoreId}/files/${fileId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(vectorStoreId, query = {}, options) { + if (isRequestOptions(query)) { + return this.list(vectorStoreId, {}, query); + } + return this._client.getAPIList(`/vector_stores/${vectorStoreId}/files`, VectorStoreFilesPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Delete a vector store file. This will remove the file from the vector store but + * the file itself will not be deleted. To delete the file, use the + * [delete file](https://platform.openai.com/docs/api-reference/files/delete) + * endpoint. + */ + del(vectorStoreId, fileId, options) { + return this._client.delete(`/vector_stores/${vectorStoreId}/files/${fileId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Attach a file to the given vector store and wait for it to be processed. + */ + async createAndPoll(vectorStoreId, body, options) { + const file = await this.create(vectorStoreId, body, options); + return await this.poll(vectorStoreId, file.id, options); + } + /** + * Wait for the vector store file to finish processing. + * + * Note: this will return even if the file failed to process, you need to check + * file.last_error and file.status to handle these cases + */ + async poll(vectorStoreId, fileId, options) { + const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' }; + if (options?.pollIntervalMs) { + headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString(); + } + while (true) { + const fileResponse = await this.retrieve(vectorStoreId, fileId, { + ...options, + headers, + }).withResponse(); + const file = fileResponse.data; + switch (file.status) { + case 'in_progress': + let sleepInterval = 5000; + if (options?.pollIntervalMs) { + sleepInterval = options.pollIntervalMs; + } + else { + const headerInterval = fileResponse.response.headers.get('openai-poll-after-ms'); + if (headerInterval) { + const headerIntervalMs = parseInt(headerInterval); + if (!isNaN(headerIntervalMs)) { + sleepInterval = headerIntervalMs; + } + } + } + await sleep(sleepInterval); + break; + case 'failed': + case 'completed': + return file; + } + } + } + /** + * Upload a file to the `files` API and then attach it to the given vector store. + * + * Note the file will be asynchronously processed (you can use the alternative + * polling helper method to wait for processing to complete). + */ + async upload(vectorStoreId, file, options) { + const fileInfo = await this._client.files.create({ file: file, purpose: 'assistants' }, options); + return this.create(vectorStoreId, { file_id: fileInfo.id }, options); + } + /** + * Add a file to a vector store and poll until processing is complete. + */ + async uploadAndPoll(vectorStoreId, file, options) { + const fileInfo = await this.upload(vectorStoreId, file, options); + return await this.poll(vectorStoreId, fileInfo.id, options); + } +} +export class VectorStoreFilesPage extends CursorPage { +} +Files.VectorStoreFilesPage = VectorStoreFilesPage; +//# sourceMappingURL=files.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/files.mjs.map b/node_modules/openai/resources/beta/vector-stores/files.mjs.map new file mode 100644 index 0000000..cf62fc6 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/files.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"files.mjs","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/files.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,KAAK,EAAc,gBAAgB,EAAE;OAGvC,EAAE,UAAU,EAAyB;AAE5C,MAAM,OAAO,KAAM,SAAQ,WAAW;IACpC;;;;OAIG;IACH,MAAM,CACJ,aAAqB,EACrB,IAAsB,EACtB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,kBAAkB,aAAa,QAAQ,EAAE;YAChE,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CACN,aAAqB,EACrB,MAAc,EACd,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,aAAa,UAAU,MAAM,EAAE,EAAE;YACzE,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAcD,IAAI,CACF,aAAqB,EACrB,QAA8C,EAAE,EAChD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,kBAAkB,aAAa,QAAQ,EAAE,oBAAoB,EAAE;YAC5F,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;;;;OAKG;IACH,GAAG,CACD,aAAqB,EACrB,MAAc,EACd,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,kBAAkB,aAAa,UAAU,MAAM,EAAE,EAAE;YAC5E,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,aAAa,CACjB,aAAqB,EACrB,IAAsB,EACtB,OAA2D;QAE3D,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QAC7D,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,IAAI,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,IAAI,CACR,aAAqB,EACrB,MAAc,EACd,OAA2D;QAE3D,MAAM,OAAO,GAA8B,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE,yBAAyB,EAAE,MAAM,EAAE,CAAC;QACtG,IAAI,OAAO,EAAE,cAAc,EAAE;YAC3B,OAAO,CAAC,kCAAkC,CAAC,GAAG,OAAO,CAAC,cAAc,CAAC,QAAQ,EAAE,CAAC;SACjF;QACD,OAAO,IAAI,EAAE;YACX,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,aAAa,EAAE,MAAM,EAAE;gBAC9D,GAAG,OAAO;gBACV,OAAO;aACR,CAAC,CAAC,YAAY,EAAE,CAAC;YAElB,MAAM,IAAI,GAAG,YAAY,CAAC,IAAI,CAAC;YAE/B,QAAQ,IAAI,CAAC,MAAM,EAAE;gBACnB,KAAK,aAAa;oBAChB,IAAI,aAAa,GAAG,IAAI,CAAC;oBAEzB,IAAI,OAAO,EAAE,cAAc,EAAE;wBAC3B,aAAa,GAAG,OAAO,CAAC,cAAc,CAAC;qBACxC;yBAAM;wBACL,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC,CAAC;wBACjF,IAAI,cAAc,EAAE;4BAClB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,cAAc,CAAC,CAAC;4BAClD,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,EAAE;gCAC5B,aAAa,GAAG,gBAAgB,CAAC;6BAClC;yBACF;qBACF;oBACD,MAAM,KAAK,CAAC,aAAa,CAAC,CAAC;oBAC3B,MAAM;gBACR,KAAK,QAAQ,CAAC;gBACd,KAAK,WAAW;oBACd,OAAO,IAAI,CAAC;aACf;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,MAAM,CACV,aAAqB,EACrB,IAAgB,EAChB,OAA6B;QAE7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,YAAY,EAAE,EAAE,OAAO,CAAC,CAAC;QACjG,OAAO,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,EAAE,EAAE,OAAO,CAAC,CAAC;IACvE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,aAAa,CACjB,aAAqB,EACrB,IAAgB,EAChB,OAA2D;QAE3D,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QACjE,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,QAAQ,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;IAC9D,CAAC;CACF;AAED,MAAM,OAAO,oBAAqB,SAAQ,UAA2B;CAAG;AAoHxE,KAAK,CAAC,oBAAoB,GAAG,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/index.d.ts b/node_modules/openai/resources/beta/vector-stores/index.d.ts new file mode 100644 index 0000000..ed29e11 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/index.d.ts @@ -0,0 +1,4 @@ +export { FileBatches, type VectorStoreFileBatch, type FileBatchCreateParams, type FileBatchListFilesParams, } from "./file-batches.js"; +export { VectorStoreFilesPage, Files, type VectorStoreFile, type VectorStoreFileDeleted, type FileCreateParams, type FileListParams, } from "./files.js"; +export { VectorStoresPage, VectorStores, type AutoFileChunkingStrategyParam, type FileChunkingStrategy, type FileChunkingStrategyParam, type OtherFileChunkingStrategyObject, type StaticFileChunkingStrategy, type StaticFileChunkingStrategyObject, type StaticFileChunkingStrategyParam, type VectorStore, type VectorStoreDeleted, type VectorStoreCreateParams, type VectorStoreUpdateParams, type VectorStoreListParams, } from "./vector-stores.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/index.d.ts.map b/node_modules/openai/resources/beta/vector-stores/index.d.ts.map new file mode 100644 index 0000000..3eafcf6 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/index.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,WAAW,EACX,KAAK,oBAAoB,EACzB,KAAK,qBAAqB,EAC1B,KAAK,wBAAwB,GAC9B,MAAM,gBAAgB,CAAC;AACxB,OAAO,EACL,oBAAoB,EACpB,KAAK,EACL,KAAK,eAAe,EACpB,KAAK,sBAAsB,EAC3B,KAAK,gBAAgB,EACrB,KAAK,cAAc,GACpB,MAAM,SAAS,CAAC;AACjB,OAAO,EACL,gBAAgB,EAChB,YAAY,EACZ,KAAK,6BAA6B,EAClC,KAAK,oBAAoB,EACzB,KAAK,yBAAyB,EAC9B,KAAK,+BAA+B,EACpC,KAAK,0BAA0B,EAC/B,KAAK,gCAAgC,EACrC,KAAK,+BAA+B,EACpC,KAAK,WAAW,EAChB,KAAK,kBAAkB,EACvB,KAAK,uBAAuB,EAC5B,KAAK,uBAAuB,EAC5B,KAAK,qBAAqB,GAC3B,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/index.js b/node_modules/openai/resources/beta/vector-stores/index.js new file mode 100644 index 0000000..fd69056 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/index.js @@ -0,0 +1,13 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VectorStores = exports.VectorStoresPage = exports.Files = exports.VectorStoreFilesPage = exports.FileBatches = void 0; +var file_batches_1 = require("./file-batches.js"); +Object.defineProperty(exports, "FileBatches", { enumerable: true, get: function () { return file_batches_1.FileBatches; } }); +var files_1 = require("./files.js"); +Object.defineProperty(exports, "VectorStoreFilesPage", { enumerable: true, get: function () { return files_1.VectorStoreFilesPage; } }); +Object.defineProperty(exports, "Files", { enumerable: true, get: function () { return files_1.Files; } }); +var vector_stores_1 = require("./vector-stores.js"); +Object.defineProperty(exports, "VectorStoresPage", { enumerable: true, get: function () { return vector_stores_1.VectorStoresPage; } }); +Object.defineProperty(exports, "VectorStores", { enumerable: true, get: function () { return vector_stores_1.VectorStores; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/index.js.map b/node_modules/openai/resources/beta/vector-stores/index.js.map new file mode 100644 index 0000000..f292572 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,kDAKwB;AAJtB,2GAAA,WAAW,OAAA;AAKb,oCAOiB;AANf,6GAAA,oBAAoB,OAAA;AACpB,8FAAA,KAAK,OAAA;AAMP,oDAeyB;AAdvB,iHAAA,gBAAgB,OAAA;AAChB,6GAAA,YAAY,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/index.mjs b/node_modules/openai/resources/beta/vector-stores/index.mjs new file mode 100644 index 0000000..ab9854d --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/index.mjs @@ -0,0 +1,5 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { FileBatches, } from "./file-batches.mjs"; +export { VectorStoreFilesPage, Files, } from "./files.mjs"; +export { VectorStoresPage, VectorStores, } from "./vector-stores.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/index.mjs.map b/node_modules/openai/resources/beta/vector-stores/index.mjs.map new file mode 100644 index 0000000..477ab1e --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EACL,WAAW,GAIZ;OACM,EACL,oBAAoB,EACpB,KAAK,GAKN;OACM,EACL,gBAAgB,EAChB,YAAY,GAab"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/vector-stores.d.ts b/node_modules/openai/resources/beta/vector-stores/vector-stores.d.ts new file mode 100644 index 0000000..1f545e5 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/vector-stores.d.ts @@ -0,0 +1,283 @@ +import { APIResource } from "../../../resource.js"; +import * as Core from "../../../core.js"; +import * as FileBatchesAPI from "./file-batches.js"; +import { FileBatchCreateParams, FileBatchListFilesParams, FileBatches, VectorStoreFileBatch } from "./file-batches.js"; +import * as FilesAPI from "./files.js"; +import { FileCreateParams, FileListParams, Files, VectorStoreFile, VectorStoreFileDeleted, VectorStoreFilesPage } from "./files.js"; +import { CursorPage, type CursorPageParams } from "../../../pagination.js"; +export declare class VectorStores extends APIResource { + files: FilesAPI.Files; + fileBatches: FileBatchesAPI.FileBatches; + /** + * Create a vector store. + */ + create(body: VectorStoreCreateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Retrieves a vector store. + */ + retrieve(vectorStoreId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Modifies a vector store. + */ + update(vectorStoreId: string, body: VectorStoreUpdateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns a list of vector stores. + */ + list(query?: VectorStoreListParams, options?: Core.RequestOptions): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + /** + * Delete a vector store. + */ + del(vectorStoreId: string, options?: Core.RequestOptions): Core.APIPromise; +} +export declare class VectorStoresPage extends CursorPage { +} +/** + * The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + * `800` and `chunk_overlap_tokens` of `400`. + */ +export interface AutoFileChunkingStrategyParam { + /** + * Always `auto`. + */ + type: 'auto'; +} +/** + * The strategy used to chunk the file. + */ +export type FileChunkingStrategy = StaticFileChunkingStrategyObject | OtherFileChunkingStrategyObject; +/** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ +export type FileChunkingStrategyParam = AutoFileChunkingStrategyParam | StaticFileChunkingStrategyParam; +/** + * This is returned when the chunking strategy is unknown. Typically, this is + * because the file was indexed before the `chunking_strategy` concept was + * introduced in the API. + */ +export interface OtherFileChunkingStrategyObject { + /** + * Always `other`. + */ + type: 'other'; +} +export interface StaticFileChunkingStrategy { + /** + * The number of tokens that overlap between chunks. The default value is `400`. + * + * Note that the overlap must not exceed half of `max_chunk_size_tokens`. + */ + chunk_overlap_tokens: number; + /** + * The maximum number of tokens in each chunk. The default value is `800`. The + * minimum value is `100` and the maximum value is `4096`. + */ + max_chunk_size_tokens: number; +} +export interface StaticFileChunkingStrategyObject { + static: StaticFileChunkingStrategy; + /** + * Always `static`. + */ + type: 'static'; +} +export interface StaticFileChunkingStrategyParam { + static: StaticFileChunkingStrategy; + /** + * Always `static`. + */ + type: 'static'; +} +/** + * A vector store is a collection of processed files can be used by the + * `file_search` tool. + */ +export interface VectorStore { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + /** + * The Unix timestamp (in seconds) for when the vector store was created. + */ + created_at: number; + file_counts: VectorStore.FileCounts; + /** + * The Unix timestamp (in seconds) for when the vector store was last active. + */ + last_active_at: number | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + /** + * The name of the vector store. + */ + name: string; + /** + * The object type, which is always `vector_store`. + */ + object: 'vector_store'; + /** + * The status of the vector store, which can be either `expired`, `in_progress`, or + * `completed`. A status of `completed` indicates that the vector store is ready + * for use. + */ + status: 'expired' | 'in_progress' | 'completed'; + /** + * The total number of bytes used by the files in the vector store. + */ + usage_bytes: number; + /** + * The expiration policy for a vector store. + */ + expires_after?: VectorStore.ExpiresAfter; + /** + * The Unix timestamp (in seconds) for when the vector store will expire. + */ + expires_at?: number | null; +} +export declare namespace VectorStore { + interface FileCounts { + /** + * The number of files that were cancelled. + */ + cancelled: number; + /** + * The number of files that have been successfully processed. + */ + completed: number; + /** + * The number of files that have failed to process. + */ + failed: number; + /** + * The number of files that are currently being processed. + */ + in_progress: number; + /** + * The total number of files. + */ + total: number; + } + /** + * The expiration policy for a vector store. + */ + interface ExpiresAfter { + /** + * Anchor timestamp after which the expiration policy applies. Supported anchors: + * `last_active_at`. + */ + anchor: 'last_active_at'; + /** + * The number of days after the anchor time that the vector store will expire. + */ + days: number; + } +} +export interface VectorStoreDeleted { + id: string; + deleted: boolean; + object: 'vector_store.deleted'; +} +export interface VectorStoreCreateParams { + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: FileChunkingStrategyParam; + /** + * The expiration policy for a vector store. + */ + expires_after?: VectorStoreCreateParams.ExpiresAfter; + /** + * A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + * the vector store should use. Useful for tools like `file_search` that can access + * files. + */ + file_ids?: Array; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The name of the vector store. + */ + name?: string; +} +export declare namespace VectorStoreCreateParams { + /** + * The expiration policy for a vector store. + */ + interface ExpiresAfter { + /** + * Anchor timestamp after which the expiration policy applies. Supported anchors: + * `last_active_at`. + */ + anchor: 'last_active_at'; + /** + * The number of days after the anchor time that the vector store will expire. + */ + days: number; + } +} +export interface VectorStoreUpdateParams { + /** + * The expiration policy for a vector store. + */ + expires_after?: VectorStoreUpdateParams.ExpiresAfter | null; + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + /** + * The name of the vector store. + */ + name?: string | null; +} +export declare namespace VectorStoreUpdateParams { + /** + * The expiration policy for a vector store. + */ + interface ExpiresAfter { + /** + * Anchor timestamp after which the expiration policy applies. Supported anchors: + * `last_active_at`. + */ + anchor: 'last_active_at'; + /** + * The number of days after the anchor time that the vector store will expire. + */ + days: number; + } +} +export interface VectorStoreListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} +export declare namespace VectorStores { + export { type AutoFileChunkingStrategyParam as AutoFileChunkingStrategyParam, type FileChunkingStrategy as FileChunkingStrategy, type FileChunkingStrategyParam as FileChunkingStrategyParam, type OtherFileChunkingStrategyObject as OtherFileChunkingStrategyObject, type StaticFileChunkingStrategy as StaticFileChunkingStrategy, type StaticFileChunkingStrategyObject as StaticFileChunkingStrategyObject, type StaticFileChunkingStrategyParam as StaticFileChunkingStrategyParam, type VectorStore as VectorStore, type VectorStoreDeleted as VectorStoreDeleted, VectorStoresPage as VectorStoresPage, type VectorStoreCreateParams as VectorStoreCreateParams, type VectorStoreUpdateParams as VectorStoreUpdateParams, type VectorStoreListParams as VectorStoreListParams, }; + export { Files as Files, type VectorStoreFile as VectorStoreFile, type VectorStoreFileDeleted as VectorStoreFileDeleted, VectorStoreFilesPage as VectorStoreFilesPage, type FileCreateParams as FileCreateParams, type FileListParams as FileListParams, }; + export { FileBatches as FileBatches, type VectorStoreFileBatch as VectorStoreFileBatch, type FileBatchCreateParams as FileBatchCreateParams, type FileBatchListFilesParams as FileBatchListFilesParams, }; +} +//# sourceMappingURL=vector-stores.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/vector-stores.d.ts.map b/node_modules/openai/resources/beta/vector-stores/vector-stores.d.ts.map new file mode 100644 index 0000000..287fac5 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/vector-stores.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"vector-stores.d.ts","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/vector-stores.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD,OAAO,KAAK,IAAI,MAAM,eAAe,CAAC;AACtC,OAAO,KAAK,cAAc,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,qBAAqB,EACrB,wBAAwB,EACxB,WAAW,EACX,oBAAoB,EACrB,MAAM,gBAAgB,CAAC;AACxB,OAAO,KAAK,QAAQ,MAAM,SAAS,CAAC;AACpC,OAAO,EACL,gBAAgB,EAChB,cAAc,EACd,KAAK,EACL,eAAe,EACf,sBAAsB,EACtB,oBAAoB,EACrB,MAAM,SAAS,CAAC;AACjB,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAExE,qBAAa,YAAa,SAAQ,WAAW;IAC3C,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAoC;IACzD,WAAW,EAAE,cAAc,CAAC,WAAW,CAAgD;IAEvF;;OAEG;IACH,MAAM,CAAC,IAAI,EAAE,uBAAuB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC;IAQlG;;OAEG;IACH,QAAQ,CAAC,aAAa,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC;IAO5F;;OAEG;IACH,MAAM,CACJ,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,uBAAuB,EAC7B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC;IAQ/B;;OAEG;IACH,IAAI,CACF,KAAK,CAAC,EAAE,qBAAqB,EAC7B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,gBAAgB,EAAE,WAAW,CAAC;IAClD,IAAI,CAAC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,gBAAgB,EAAE,WAAW,CAAC;IAepF;;OAEG;IACH,GAAG,CAAC,aAAa,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,kBAAkB,CAAC;CAM/F;AAED,qBAAa,gBAAiB,SAAQ,UAAU,CAAC,WAAW,CAAC;CAAG;AAEhE;;;GAGG;AACH,MAAM,WAAW,6BAA6B;IAC5C;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,MAAM,MAAM,oBAAoB,GAAG,gCAAgC,GAAG,+BAA+B,CAAC;AAEtG;;;GAGG;AACH,MAAM,MAAM,yBAAyB,GAAG,6BAA6B,GAAG,+BAA+B,CAAC;AAExG;;;;GAIG;AACH,MAAM,WAAW,+BAA+B;IAC9C;;OAEG;IACH,IAAI,EAAE,OAAO,CAAC;CACf;AAED,MAAM,WAAW,0BAA0B;IACzC;;;;OAIG;IACH,oBAAoB,EAAE,MAAM,CAAC;IAE7B;;;OAGG;IACH,qBAAqB,EAAE,MAAM,CAAC;CAC/B;AAED,MAAM,WAAW,gCAAgC;IAC/C,MAAM,EAAE,0BAA0B,CAAC;IAEnC;;OAEG;IACH,IAAI,EAAE,QAAQ,CAAC;CAChB;AAED,MAAM,WAAW,+BAA+B;IAC9C,MAAM,EAAE,0BAA0B,CAAC;IAEnC;;OAEG;IACH,IAAI,EAAE,QAAQ,CAAC;CAChB;AAED;;;GAGG;AACH,MAAM,WAAW,WAAW;IAC1B;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB,WAAW,EAAE,WAAW,CAAC,UAAU,CAAC;IAEpC;;OAEG;IACH,cAAc,EAAE,MAAM,GAAG,IAAI,CAAC;IAE9B;;;;;OAKG;IACH,QAAQ,EAAE,OAAO,GAAG,IAAI,CAAC;IAEzB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,MAAM,EAAE,cAAc,CAAC;IAEvB;;;;OAIG;IACH,MAAM,EAAE,SAAS,GAAG,aAAa,GAAG,WAAW,CAAC;IAEhD;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,aAAa,CAAC,EAAE,WAAW,CAAC,YAAY,CAAC;IAEzC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5B;AAED,yBAAiB,WAAW,CAAC;IAC3B,UAAiB,UAAU;QACzB;;WAEG;QACH,SAAS,EAAE,MAAM,CAAC;QAElB;;WAEG;QACH,SAAS,EAAE,MAAM,CAAC;QAElB;;WAEG;QACH,MAAM,EAAE,MAAM,CAAC;QAEf;;WAEG;QACH,WAAW,EAAE,MAAM,CAAC;QAEpB;;WAEG;QACH,KAAK,EAAE,MAAM,CAAC;KACf;IAED;;OAEG;IACH,UAAiB,YAAY;QAC3B;;;WAGG;QACH,MAAM,EAAE,gBAAgB,CAAC;QAEzB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;KACd;CACF;AAED,MAAM,WAAW,kBAAkB;IACjC,EAAE,EAAE,MAAM,CAAC;IAEX,OAAO,EAAE,OAAO,CAAC;IAEjB,MAAM,EAAE,sBAAsB,CAAC;CAChC;AAED,MAAM,WAAW,uBAAuB;IACtC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,yBAAyB,CAAC;IAE9C;;OAEG;IACH,aAAa,CAAC,EAAE,uBAAuB,CAAC,YAAY,CAAC;IAErD;;;;OAIG;IACH,QAAQ,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAEzB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,yBAAiB,uBAAuB,CAAC;IACvC;;OAEG;IACH,UAAiB,YAAY;QAC3B;;;WAGG;QACH,MAAM,EAAE,gBAAgB,CAAC;QAEzB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;KACd;CACF;AAED,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,aAAa,CAAC,EAAE,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;IAE5D;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACtB;AAED,yBAAiB,uBAAuB,CAAC;IACvC;;OAEG;IACH,UAAiB,YAAY;QAC3B;;;WAGG;QACH,MAAM,EAAE,gBAAgB,CAAC;QAEzB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;KACd;CACF;AAED,MAAM,WAAW,qBAAsB,SAAQ,gBAAgB;IAC7D;;;;;OAKG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CACxB;AAOD,MAAM,CAAC,OAAO,WAAW,YAAY,CAAC;IACpC,OAAO,EACL,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,+BAA+B,IAAI,+BAA+B,EACvE,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,+BAA+B,IAAI,+BAA+B,EACvE,KAAK,WAAW,IAAI,WAAW,EAC/B,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,gBAAgB,IAAI,gBAAgB,EACpC,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,qBAAqB,IAAI,qBAAqB,GACpD,CAAC;IAEF,OAAO,EACL,KAAK,IAAI,KAAK,EACd,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,oBAAoB,IAAI,oBAAoB,EAC5C,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,cAAc,IAAI,cAAc,GACtC,CAAC;IAEF,OAAO,EACL,WAAW,IAAI,WAAW,EAC1B,KAAK,oBAAoB,IAAI,oBAAoB,EACjD,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,wBAAwB,IAAI,wBAAwB,GAC1D,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/vector-stores.js b/node_modules/openai/resources/beta/vector-stores/vector-stores.js new file mode 100644 index 0000000..22ba0a0 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/vector-stores.js @@ -0,0 +1,98 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VectorStoresPage = exports.VectorStores = void 0; +const resource_1 = require("../../../resource.js"); +const core_1 = require("../../../core.js"); +const FileBatchesAPI = __importStar(require("./file-batches.js")); +const file_batches_1 = require("./file-batches.js"); +const FilesAPI = __importStar(require("./files.js")); +const files_1 = require("./files.js"); +const pagination_1 = require("../../../pagination.js"); +class VectorStores extends resource_1.APIResource { + constructor() { + super(...arguments); + this.files = new FilesAPI.Files(this._client); + this.fileBatches = new FileBatchesAPI.FileBatches(this._client); + } + /** + * Create a vector store. + */ + create(body, options) { + return this._client.post('/vector_stores', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves a vector store. + */ + retrieve(vectorStoreId, options) { + return this._client.get(`/vector_stores/${vectorStoreId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies a vector store. + */ + update(vectorStoreId, body, options) { + return this._client.post(`/vector_stores/${vectorStoreId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/vector_stores', VectorStoresPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Delete a vector store. + */ + del(vectorStoreId, options) { + return this._client.delete(`/vector_stores/${vectorStoreId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} +exports.VectorStores = VectorStores; +class VectorStoresPage extends pagination_1.CursorPage { +} +exports.VectorStoresPage = VectorStoresPage; +VectorStores.VectorStoresPage = VectorStoresPage; +VectorStores.Files = files_1.Files; +VectorStores.VectorStoreFilesPage = files_1.VectorStoreFilesPage; +VectorStores.FileBatches = file_batches_1.FileBatches; +//# sourceMappingURL=vector-stores.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/vector-stores.js.map b/node_modules/openai/resources/beta/vector-stores/vector-stores.js.map new file mode 100644 index 0000000..5d263e9 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/vector-stores.js.map @@ -0,0 +1 @@ +{"version":3,"file":"vector-stores.js","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/vector-stores.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,mDAAgD;AAChD,2CAAiD;AAEjD,kEAAiD;AACjD,oDAKwB;AACxB,qDAAoC;AACpC,sCAOiB;AACjB,uDAAwE;AAExE,MAAa,YAAa,SAAQ,sBAAW;IAA7C;;QACE,UAAK,GAAmB,IAAI,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACzD,gBAAW,GAA+B,IAAI,cAAc,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAqEzF,CAAC;IAnEC;;OAEG;IACH,MAAM,CAAC,IAA6B,EAAE,OAA6B;QACjE,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,gBAAgB,EAAE;YACzC,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,aAAqB,EAAE,OAA6B;QAC3D,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,aAAa,EAAE,EAAE;YACzD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,aAAqB,EACrB,IAA6B,EAC7B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,kBAAkB,aAAa,EAAE,EAAE;YAC1D,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAUD,IAAI,CACF,QAAqD,EAAE,EACvD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,gBAAgB,EAAE,gBAAgB,EAAE;YACjE,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,aAAqB,EAAE,OAA6B;QACtD,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,kBAAkB,aAAa,EAAE,EAAE;YAC5D,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;CACF;AAvED,oCAuEC;AAED,MAAa,gBAAiB,SAAQ,uBAAuB;CAAG;AAAhE,4CAAgE;AAkShE,YAAY,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;AACjD,YAAY,CAAC,KAAK,GAAG,aAAK,CAAC;AAC3B,YAAY,CAAC,oBAAoB,GAAG,4BAAoB,CAAC;AACzD,YAAY,CAAC,WAAW,GAAG,0BAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/vector-stores.mjs b/node_modules/openai/resources/beta/vector-stores/vector-stores.mjs new file mode 100644 index 0000000..f80994a --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/vector-stores.mjs @@ -0,0 +1,70 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../resource.mjs"; +import { isRequestOptions } from "../../../core.mjs"; +import * as FileBatchesAPI from "./file-batches.mjs"; +import { FileBatches, } from "./file-batches.mjs"; +import * as FilesAPI from "./files.mjs"; +import { Files, VectorStoreFilesPage, } from "./files.mjs"; +import { CursorPage } from "../../../pagination.mjs"; +export class VectorStores extends APIResource { + constructor() { + super(...arguments); + this.files = new FilesAPI.Files(this._client); + this.fileBatches = new FileBatchesAPI.FileBatches(this._client); + } + /** + * Create a vector store. + */ + create(body, options) { + return this._client.post('/vector_stores', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Retrieves a vector store. + */ + retrieve(vectorStoreId, options) { + return this._client.get(`/vector_stores/${vectorStoreId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Modifies a vector store. + */ + update(vectorStoreId, body, options) { + return this._client.post(`/vector_stores/${vectorStoreId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + list(query = {}, options) { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/vector_stores', VectorStoresPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + /** + * Delete a vector store. + */ + del(vectorStoreId, options) { + return this._client.delete(`/vector_stores/${vectorStoreId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} +export class VectorStoresPage extends CursorPage { +} +VectorStores.VectorStoresPage = VectorStoresPage; +VectorStores.Files = Files; +VectorStores.VectorStoreFilesPage = VectorStoreFilesPage; +VectorStores.FileBatches = FileBatches; +//# sourceMappingURL=vector-stores.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/beta/vector-stores/vector-stores.mjs.map b/node_modules/openai/resources/beta/vector-stores/vector-stores.mjs.map new file mode 100644 index 0000000..227cfb4 --- /dev/null +++ b/node_modules/openai/resources/beta/vector-stores/vector-stores.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"vector-stores.mjs","sourceRoot":"","sources":["../../../src/resources/beta/vector-stores/vector-stores.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OAEpB,KAAK,cAAc;OACnB,EAGL,WAAW,GAEZ;OACM,KAAK,QAAQ;OACb,EAGL,KAAK,EAGL,oBAAoB,GACrB;OACM,EAAE,UAAU,EAAyB;AAE5C,MAAM,OAAO,YAAa,SAAQ,WAAW;IAA7C;;QACE,UAAK,GAAmB,IAAI,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACzD,gBAAW,GAA+B,IAAI,cAAc,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAqEzF,CAAC;IAnEC;;OAEG;IACH,MAAM,CAAC,IAA6B,EAAE,OAA6B;QACjE,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,gBAAgB,EAAE;YACzC,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,aAAqB,EAAE,OAA6B;QAC3D,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,aAAa,EAAE,EAAE;YACzD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,MAAM,CACJ,aAAqB,EACrB,IAA6B,EAC7B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,kBAAkB,aAAa,EAAE,EAAE;YAC1D,IAAI;YACJ,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAUD,IAAI,CACF,QAAqD,EAAE,EACvD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,gBAAgB,EAAE,gBAAgB,EAAE;YACjE,KAAK;YACL,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,aAAqB,EAAE,OAA6B;QACtD,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,kBAAkB,aAAa,EAAE,EAAE;YAC5D,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,aAAa,EAAE,eAAe,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SACjE,CAAC,CAAC;IACL,CAAC;CACF;AAED,MAAM,OAAO,gBAAiB,SAAQ,UAAuB;CAAG;AAkShE,YAAY,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;AACjD,YAAY,CAAC,KAAK,GAAG,KAAK,CAAC;AAC3B,YAAY,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;AACzD,YAAY,CAAC,WAAW,GAAG,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/chat/chat.d.ts b/node_modules/openai/resources/chat/chat.d.ts new file mode 100644 index 0000000..d8f1b06 --- /dev/null +++ b/node_modules/openai/resources/chat/chat.d.ts @@ -0,0 +1,12 @@ +import { APIResource } from "../../resource.js"; +import * as CompletionsAPI from "./completions.js"; +import { ChatCompletion, ChatCompletionAssistantMessageParam, ChatCompletionAudio, ChatCompletionAudioParam, ChatCompletionChunk, ChatCompletionContentPart, ChatCompletionContentPartImage, ChatCompletionContentPartInputAudio, ChatCompletionContentPartRefusal, ChatCompletionContentPartText, ChatCompletionCreateParams, ChatCompletionCreateParamsNonStreaming, ChatCompletionCreateParamsStreaming, ChatCompletionDeveloperMessageParam, ChatCompletionFunctionCallOption, ChatCompletionFunctionMessageParam, ChatCompletionMessage, ChatCompletionMessageParam, ChatCompletionMessageToolCall, ChatCompletionModality, ChatCompletionNamedToolChoice, ChatCompletionPredictionContent, ChatCompletionReasoningEffort, ChatCompletionRole, ChatCompletionStreamOptions, ChatCompletionSystemMessageParam, ChatCompletionTokenLogprob, ChatCompletionTool, ChatCompletionToolChoiceOption, ChatCompletionToolMessageParam, ChatCompletionUserMessageParam, CompletionCreateParams, CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming, Completions, CreateChatCompletionRequestMessage } from "./completions.js"; +export declare class Chat extends APIResource { + completions: CompletionsAPI.Completions; +} +export type ChatModel = 'o1' | 'o1-2024-12-17' | 'o1-preview' | 'o1-preview-2024-09-12' | 'o1-mini' | 'o1-mini-2024-09-12' | 'gpt-4o' | 'gpt-4o-2024-11-20' | 'gpt-4o-2024-08-06' | 'gpt-4o-2024-05-13' | 'gpt-4o-audio-preview' | 'gpt-4o-audio-preview-2024-10-01' | 'gpt-4o-audio-preview-2024-12-17' | 'gpt-4o-mini-audio-preview' | 'gpt-4o-mini-audio-preview-2024-12-17' | 'chatgpt-4o-latest' | 'gpt-4o-mini' | 'gpt-4o-mini-2024-07-18' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-0125-preview' | 'gpt-4-turbo-preview' | 'gpt-4-1106-preview' | 'gpt-4-vision-preview' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-0301' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613'; +export declare namespace Chat { + export { type ChatModel as ChatModel }; + export { Completions as Completions, type ChatCompletion as ChatCompletion, type ChatCompletionAssistantMessageParam as ChatCompletionAssistantMessageParam, type ChatCompletionAudio as ChatCompletionAudio, type ChatCompletionAudioParam as ChatCompletionAudioParam, type ChatCompletionChunk as ChatCompletionChunk, type ChatCompletionContentPart as ChatCompletionContentPart, type ChatCompletionContentPartImage as ChatCompletionContentPartImage, type ChatCompletionContentPartInputAudio as ChatCompletionContentPartInputAudio, type ChatCompletionContentPartRefusal as ChatCompletionContentPartRefusal, type ChatCompletionContentPartText as ChatCompletionContentPartText, type ChatCompletionDeveloperMessageParam as ChatCompletionDeveloperMessageParam, type ChatCompletionFunctionCallOption as ChatCompletionFunctionCallOption, type ChatCompletionFunctionMessageParam as ChatCompletionFunctionMessageParam, type ChatCompletionMessage as ChatCompletionMessage, type ChatCompletionMessageParam as ChatCompletionMessageParam, type ChatCompletionMessageToolCall as ChatCompletionMessageToolCall, type ChatCompletionModality as ChatCompletionModality, type ChatCompletionNamedToolChoice as ChatCompletionNamedToolChoice, type ChatCompletionPredictionContent as ChatCompletionPredictionContent, type ChatCompletionReasoningEffort as ChatCompletionReasoningEffort, type ChatCompletionRole as ChatCompletionRole, type ChatCompletionStreamOptions as ChatCompletionStreamOptions, type ChatCompletionSystemMessageParam as ChatCompletionSystemMessageParam, type ChatCompletionTokenLogprob as ChatCompletionTokenLogprob, type ChatCompletionTool as ChatCompletionTool, type ChatCompletionToolChoiceOption as ChatCompletionToolChoiceOption, type ChatCompletionToolMessageParam as ChatCompletionToolMessageParam, type ChatCompletionUserMessageParam as ChatCompletionUserMessageParam, type CreateChatCompletionRequestMessage as CreateChatCompletionRequestMessage, type ChatCompletionCreateParams as ChatCompletionCreateParams, type CompletionCreateParams as CompletionCreateParams, type ChatCompletionCreateParamsNonStreaming as ChatCompletionCreateParamsNonStreaming, type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming, type ChatCompletionCreateParamsStreaming as ChatCompletionCreateParamsStreaming, type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming, }; +} +//# sourceMappingURL=chat.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/chat/chat.d.ts.map b/node_modules/openai/resources/chat/chat.d.ts.map new file mode 100644 index 0000000..c312226 --- /dev/null +++ b/node_modules/openai/resources/chat/chat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"chat.d.ts","sourceRoot":"","sources":["../../src/resources/chat/chat.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,cAAc,MAAM,eAAe,CAAC;AAChD,OAAO,EACL,cAAc,EACd,mCAAmC,EACnC,mBAAmB,EACnB,wBAAwB,EACxB,mBAAmB,EACnB,yBAAyB,EACzB,8BAA8B,EAC9B,mCAAmC,EACnC,gCAAgC,EAChC,6BAA6B,EAC7B,0BAA0B,EAC1B,sCAAsC,EACtC,mCAAmC,EACnC,mCAAmC,EACnC,gCAAgC,EAChC,kCAAkC,EAClC,qBAAqB,EACrB,0BAA0B,EAC1B,6BAA6B,EAC7B,sBAAsB,EACtB,6BAA6B,EAC7B,+BAA+B,EAC/B,6BAA6B,EAC7B,kBAAkB,EAClB,2BAA2B,EAC3B,gCAAgC,EAChC,0BAA0B,EAC1B,kBAAkB,EAClB,8BAA8B,EAC9B,8BAA8B,EAC9B,8BAA8B,EAC9B,sBAAsB,EACtB,kCAAkC,EAClC,+BAA+B,EAC/B,WAAW,EACX,kCAAkC,EACnC,MAAM,eAAe,CAAC;AAEvB,qBAAa,IAAK,SAAQ,WAAW;IACnC,WAAW,EAAE,cAAc,CAAC,WAAW,CAAgD;CACxF;AAED,MAAM,MAAM,SAAS,GACjB,IAAI,GACJ,eAAe,GACf,YAAY,GACZ,uBAAuB,GACvB,SAAS,GACT,oBAAoB,GACpB,QAAQ,GACR,mBAAmB,GACnB,mBAAmB,GACnB,mBAAmB,GACnB,sBAAsB,GACtB,iCAAiC,GACjC,iCAAiC,GACjC,2BAA2B,GAC3B,sCAAsC,GACtC,mBAAmB,GACnB,aAAa,GACb,wBAAwB,GACxB,aAAa,GACb,wBAAwB,GACxB,oBAAoB,GACpB,qBAAqB,GACrB,oBAAoB,GACpB,sBAAsB,GACtB,OAAO,GACP,YAAY,GACZ,YAAY,GACZ,WAAW,GACX,gBAAgB,GAChB,gBAAgB,GAChB,eAAe,GACf,mBAAmB,GACnB,oBAAoB,GACpB,oBAAoB,GACpB,oBAAoB,GACpB,oBAAoB,GACpB,wBAAwB,CAAC;AAI7B,MAAM,CAAC,OAAO,WAAW,IAAI,CAAC;IAC5B,OAAO,EAAE,KAAK,SAAS,IAAI,SAAS,EAAE,CAAC;IAEvC,OAAO,EACL,WAAW,IAAI,WAAW,EAC1B,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,kCAAkC,IAAI,kCAAkC,EAC7E,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,+BAA+B,IAAI,+BAA+B,EACvE,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,kCAAkC,IAAI,kCAAkC,EAC7E,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,KAAK,sCAAsC,IAAI,sCAAsC,EACrF,KAAK,kCAAkC,IAAI,kCAAkC,EAC7E,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,+BAA+B,IAAI,+BAA+B,GACxE,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/chat/chat.js b/node_modules/openai/resources/chat/chat.js new file mode 100644 index 0000000..682e8da --- /dev/null +++ b/node_modules/openai/resources/chat/chat.js @@ -0,0 +1,39 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Chat = void 0; +const resource_1 = require("../../resource.js"); +const CompletionsAPI = __importStar(require("./completions.js")); +const completions_1 = require("./completions.js"); +class Chat extends resource_1.APIResource { + constructor() { + super(...arguments); + this.completions = new CompletionsAPI.Completions(this._client); + } +} +exports.Chat = Chat; +Chat.Completions = completions_1.Completions; +//# sourceMappingURL=chat.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/chat/chat.js.map b/node_modules/openai/resources/chat/chat.js.map new file mode 100644 index 0000000..db53cc5 --- /dev/null +++ b/node_modules/openai/resources/chat/chat.js.map @@ -0,0 +1 @@ +{"version":3,"file":"chat.js","sourceRoot":"","sources":["../../src/resources/chat/chat.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,gDAA6C;AAC7C,iEAAgD;AAChD,kDAqCuB;AAEvB,MAAa,IAAK,SAAQ,sBAAW;IAArC;;QACE,gBAAW,GAA+B,IAAI,cAAc,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACzF,CAAC;CAAA;AAFD,oBAEC;AAyCD,IAAI,CAAC,WAAW,GAAG,yBAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/chat/chat.mjs b/node_modules/openai/resources/chat/chat.mjs new file mode 100644 index 0000000..7203398 --- /dev/null +++ b/node_modules/openai/resources/chat/chat.mjs @@ -0,0 +1,12 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +import * as CompletionsAPI from "./completions.mjs"; +import { Completions, } from "./completions.mjs"; +export class Chat extends APIResource { + constructor() { + super(...arguments); + this.completions = new CompletionsAPI.Completions(this._client); + } +} +Chat.Completions = Completions; +//# sourceMappingURL=chat.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/chat/chat.mjs.map b/node_modules/openai/resources/chat/chat.mjs.map new file mode 100644 index 0000000..b2da107 --- /dev/null +++ b/node_modules/openai/resources/chat/chat.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"chat.mjs","sourceRoot":"","sources":["../../src/resources/chat/chat.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,KAAK,cAAc;OACnB,EAmCL,WAAW,GAEZ;AAED,MAAM,OAAO,IAAK,SAAQ,WAAW;IAArC;;QACE,gBAAW,GAA+B,IAAI,cAAc,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACzF,CAAC;CAAA;AAyCD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/chat/completions.d.ts b/node_modules/openai/resources/chat/completions.d.ts new file mode 100644 index 0000000..0a20fbd --- /dev/null +++ b/node_modules/openai/resources/chat/completions.d.ts @@ -0,0 +1,1083 @@ +import { APIResource } from "../../resource.js"; +import { APIPromise } from "../../core.js"; +import * as Core from "../../core.js"; +import * as ChatCompletionsAPI from "./completions.js"; +import * as CompletionsAPI from "../completions.js"; +import * as Shared from "../shared.js"; +import * as ChatAPI from "./chat.js"; +import { Stream } from "../../streaming.js"; +export declare class Completions extends APIResource { + /** + * Creates a model response for the given chat conversation. Learn more in the + * [text generation](https://platform.openai.com/docs/guides/text-generation), + * [vision](https://platform.openai.com/docs/guides/vision), and + * [audio](https://platform.openai.com/docs/guides/audio) guides. + * + * Parameter support can differ depending on the model used to generate the + * response, particularly for newer reasoning models. Parameters that are only + * supported for reasoning models are noted below. For the current state of + * unsupported parameters in reasoning models, + * [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). + */ + create(body: ChatCompletionCreateParamsNonStreaming, options?: Core.RequestOptions): APIPromise; + create(body: ChatCompletionCreateParamsStreaming, options?: Core.RequestOptions): APIPromise>; + create(body: ChatCompletionCreateParamsBase, options?: Core.RequestOptions): APIPromise | ChatCompletion>; +} +/** + * Represents a chat completion response returned by model, based on the provided + * input. + */ +export interface ChatCompletion { + /** + * A unique identifier for the chat completion. + */ + id: string; + /** + * A list of chat completion choices. Can be more than one if `n` is greater + * than 1. + */ + choices: Array; + /** + * The Unix timestamp (in seconds) of when the chat completion was created. + */ + created: number; + /** + * The model used for the chat completion. + */ + model: string; + /** + * The object type, which is always `chat.completion`. + */ + object: 'chat.completion'; + /** + * The service tier used for processing the request. This field is only included if + * the `service_tier` parameter is specified in the request. + */ + service_tier?: 'scale' | 'default' | null; + /** + * This fingerprint represents the backend configuration that the model runs with. + * + * Can be used in conjunction with the `seed` request parameter to understand when + * backend changes have been made that might impact determinism. + */ + system_fingerprint?: string; + /** + * Usage statistics for the completion request. + */ + usage?: CompletionsAPI.CompletionUsage; +} +export declare namespace ChatCompletion { + interface Choice { + /** + * The reason the model stopped generating tokens. This will be `stop` if the model + * hit a natural stop point or a provided stop sequence, `length` if the maximum + * number of tokens specified in the request was reached, `content_filter` if + * content was omitted due to a flag from our content filters, `tool_calls` if the + * model called a tool, or `function_call` (deprecated) if the model called a + * function. + */ + finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call'; + /** + * The index of the choice in the list of choices. + */ + index: number; + /** + * Log probability information for the choice. + */ + logprobs: Choice.Logprobs | null; + /** + * A chat completion message generated by the model. + */ + message: ChatCompletionsAPI.ChatCompletionMessage; + } + namespace Choice { + /** + * Log probability information for the choice. + */ + interface Logprobs { + /** + * A list of message content tokens with log probability information. + */ + content: Array | null; + /** + * A list of message refusal tokens with log probability information. + */ + refusal: Array | null; + } + } +} +/** + * Messages sent by the model in response to user messages. + */ +export interface ChatCompletionAssistantMessageParam { + /** + * The role of the messages author, in this case `assistant`. + */ + role: 'assistant'; + /** + * Data about a previous audio response from the model. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ + audio?: ChatCompletionAssistantMessageParam.Audio | null; + /** + * The contents of the assistant message. Required unless `tool_calls` or + * `function_call` is specified. + */ + content?: string | Array | null; + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + function_call?: ChatCompletionAssistantMessageParam.FunctionCall | null; + /** + * An optional name for the participant. Provides the model information to + * differentiate between participants of the same role. + */ + name?: string; + /** + * The refusal message by the assistant. + */ + refusal?: string | null; + /** + * The tool calls generated by the model, such as function calls. + */ + tool_calls?: Array; +} +export declare namespace ChatCompletionAssistantMessageParam { + /** + * Data about a previous audio response from the model. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ + interface Audio { + /** + * Unique identifier for a previous audio response from the model. + */ + id: string; + } + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + interface FunctionCall { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments: string; + /** + * The name of the function to call. + */ + name: string; + } +} +/** + * If the audio output modality is requested, this object contains data about the + * audio response from the model. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ +export interface ChatCompletionAudio { + /** + * Unique identifier for this audio response. + */ + id: string; + /** + * Base64 encoded audio bytes generated by the model, in the format specified in + * the request. + */ + data: string; + /** + * The Unix timestamp (in seconds) for when this audio response will no longer be + * accessible on the server for use in multi-turn conversations. + */ + expires_at: number; + /** + * Transcript of the audio generated by the model. + */ + transcript: string; +} +/** + * Parameters for audio output. Required when audio output is requested with + * `modalities: ["audio"]`. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ +export interface ChatCompletionAudioParam { + /** + * Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, + * or `pcm16`. + */ + format: 'wav' | 'mp3' | 'flac' | 'opus' | 'pcm16'; + /** + * The voice the model uses to respond. Supported voices are `ash`, `ballad`, + * `coral`, `sage`, and `verse` (also supported but not recommended are `alloy`, + * `echo`, and `shimmer`; these voices are less expressive). + */ + voice: 'alloy' | 'ash' | 'ballad' | 'coral' | 'echo' | 'sage' | 'shimmer' | 'verse'; +} +/** + * Represents a streamed chunk of a chat completion response returned by model, + * based on the provided input. + */ +export interface ChatCompletionChunk { + /** + * A unique identifier for the chat completion. Each chunk has the same ID. + */ + id: string; + /** + * A list of chat completion choices. Can contain more than one elements if `n` is + * greater than 1. Can also be empty for the last chunk if you set + * `stream_options: {"include_usage": true}`. + */ + choices: Array; + /** + * The Unix timestamp (in seconds) of when the chat completion was created. Each + * chunk has the same timestamp. + */ + created: number; + /** + * The model to generate the completion. + */ + model: string; + /** + * The object type, which is always `chat.completion.chunk`. + */ + object: 'chat.completion.chunk'; + /** + * The service tier used for processing the request. This field is only included if + * the `service_tier` parameter is specified in the request. + */ + service_tier?: 'scale' | 'default' | null; + /** + * This fingerprint represents the backend configuration that the model runs with. + * Can be used in conjunction with the `seed` request parameter to understand when + * backend changes have been made that might impact determinism. + */ + system_fingerprint?: string; + /** + * An optional field that will only be present when you set + * `stream_options: {"include_usage": true}` in your request. When present, it + * contains a null value except for the last chunk which contains the token usage + * statistics for the entire request. + */ + usage?: CompletionsAPI.CompletionUsage | null; +} +export declare namespace ChatCompletionChunk { + interface Choice { + /** + * A chat completion delta generated by streamed model responses. + */ + delta: Choice.Delta; + /** + * The reason the model stopped generating tokens. This will be `stop` if the model + * hit a natural stop point or a provided stop sequence, `length` if the maximum + * number of tokens specified in the request was reached, `content_filter` if + * content was omitted due to a flag from our content filters, `tool_calls` if the + * model called a tool, or `function_call` (deprecated) if the model called a + * function. + */ + finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call' | null; + /** + * The index of the choice in the list of choices. + */ + index: number; + /** + * Log probability information for the choice. + */ + logprobs?: Choice.Logprobs | null; + } + namespace Choice { + /** + * A chat completion delta generated by streamed model responses. + */ + interface Delta { + /** + * The contents of the chunk message. + */ + content?: string | null; + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + function_call?: Delta.FunctionCall; + /** + * The refusal message generated by the model. + */ + refusal?: string | null; + /** + * The role of the author of this message. + */ + role?: 'system' | 'user' | 'assistant' | 'tool'; + tool_calls?: Array; + } + namespace Delta { + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + interface FunctionCall { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments?: string; + /** + * The name of the function to call. + */ + name?: string; + } + interface ToolCall { + index: number; + /** + * The ID of the tool call. + */ + id?: string; + function?: ToolCall.Function; + /** + * The type of the tool. Currently, only `function` is supported. + */ + type?: 'function'; + } + namespace ToolCall { + interface Function { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments?: string; + /** + * The name of the function to call. + */ + name?: string; + } + } + } + /** + * Log probability information for the choice. + */ + interface Logprobs { + /** + * A list of message content tokens with log probability information. + */ + content: Array | null; + /** + * A list of message refusal tokens with log probability information. + */ + refusal: Array | null; + } + } +} +/** + * Learn about + * [text inputs](https://platform.openai.com/docs/guides/text-generation). + */ +export type ChatCompletionContentPart = ChatCompletionContentPartText | ChatCompletionContentPartImage | ChatCompletionContentPartInputAudio; +/** + * Learn about [image inputs](https://platform.openai.com/docs/guides/vision). + */ +export interface ChatCompletionContentPartImage { + image_url: ChatCompletionContentPartImage.ImageURL; + /** + * The type of the content part. + */ + type: 'image_url'; +} +export declare namespace ChatCompletionContentPartImage { + interface ImageURL { + /** + * Either a URL of the image or the base64 encoded image data. + */ + url: string; + /** + * Specifies the detail level of the image. Learn more in the + * [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). + */ + detail?: 'auto' | 'low' | 'high'; + } +} +/** + * Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). + */ +export interface ChatCompletionContentPartInputAudio { + input_audio: ChatCompletionContentPartInputAudio.InputAudio; + /** + * The type of the content part. Always `input_audio`. + */ + type: 'input_audio'; +} +export declare namespace ChatCompletionContentPartInputAudio { + interface InputAudio { + /** + * Base64 encoded audio data. + */ + data: string; + /** + * The format of the encoded audio data. Currently supports "wav" and "mp3". + */ + format: 'wav' | 'mp3'; + } +} +export interface ChatCompletionContentPartRefusal { + /** + * The refusal message generated by the model. + */ + refusal: string; + /** + * The type of the content part. + */ + type: 'refusal'; +} +/** + * Learn about + * [text inputs](https://platform.openai.com/docs/guides/text-generation). + */ +export interface ChatCompletionContentPartText { + /** + * The text content. + */ + text: string; + /** + * The type of the content part. + */ + type: 'text'; +} +/** + * Developer-provided instructions that the model should follow, regardless of + * messages sent by the user. With o1 models and newer, `developer` messages + * replace the previous `system` messages. + */ +export interface ChatCompletionDeveloperMessageParam { + /** + * The contents of the developer message. + */ + content: string | Array; + /** + * The role of the messages author, in this case `developer`. + */ + role: 'developer'; + /** + * An optional name for the participant. Provides the model information to + * differentiate between participants of the same role. + */ + name?: string; +} +/** + * Specifying a particular function via `{"name": "my_function"}` forces the model + * to call that function. + */ +export interface ChatCompletionFunctionCallOption { + /** + * The name of the function to call. + */ + name: string; +} +/** + * @deprecated + */ +export interface ChatCompletionFunctionMessageParam { + /** + * The contents of the function message. + */ + content: string | null; + /** + * The name of the function to call. + */ + name: string; + /** + * The role of the messages author, in this case `function`. + */ + role: 'function'; +} +/** + * A chat completion message generated by the model. + */ +export interface ChatCompletionMessage { + /** + * The contents of the message. + */ + content: string | null; + /** + * The refusal message generated by the model. + */ + refusal: string | null; + /** + * The role of the author of this message. + */ + role: 'assistant'; + /** + * If the audio output modality is requested, this object contains data about the + * audio response from the model. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ + audio?: ChatCompletionAudio | null; + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + function_call?: ChatCompletionMessage.FunctionCall | null; + /** + * The tool calls generated by the model, such as function calls. + */ + tool_calls?: Array; +} +export declare namespace ChatCompletionMessage { + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + interface FunctionCall { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments: string; + /** + * The name of the function to call. + */ + name: string; + } +} +/** + * Developer-provided instructions that the model should follow, regardless of + * messages sent by the user. With o1 models and newer, `developer` messages + * replace the previous `system` messages. + */ +export type ChatCompletionMessageParam = ChatCompletionDeveloperMessageParam | ChatCompletionSystemMessageParam | ChatCompletionUserMessageParam | ChatCompletionAssistantMessageParam | ChatCompletionToolMessageParam | ChatCompletionFunctionMessageParam; +export interface ChatCompletionMessageToolCall { + /** + * The ID of the tool call. + */ + id: string; + /** + * The function that the model called. + */ + function: ChatCompletionMessageToolCall.Function; + /** + * The type of the tool. Currently, only `function` is supported. + */ + type: 'function'; +} +export declare namespace ChatCompletionMessageToolCall { + /** + * The function that the model called. + */ + interface Function { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments: string; + /** + * The name of the function to call. + */ + name: string; + } +} +export type ChatCompletionModality = 'text' | 'audio'; +/** + * Specifies a tool the model should use. Use to force the model to call a specific + * function. + */ +export interface ChatCompletionNamedToolChoice { + function: ChatCompletionNamedToolChoice.Function; + /** + * The type of the tool. Currently, only `function` is supported. + */ + type: 'function'; +} +export declare namespace ChatCompletionNamedToolChoice { + interface Function { + /** + * The name of the function to call. + */ + name: string; + } +} +/** + * Static predicted output content, such as the content of a text file that is + * being regenerated. + */ +export interface ChatCompletionPredictionContent { + /** + * The content that should be matched when generating a model response. If + * generated tokens would match this content, the entire model response can be + * returned much more quickly. + */ + content: string | Array; + /** + * The type of the predicted content you want to provide. This type is currently + * always `content`. + */ + type: 'content'; +} +/** + * **o1 models only** + * + * Constrains effort on reasoning for + * [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + * supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + * result in faster responses and fewer tokens used on reasoning in a response. + */ +export type ChatCompletionReasoningEffort = 'low' | 'medium' | 'high'; +/** + * The role of the author of a message + */ +export type ChatCompletionRole = 'system' | 'user' | 'assistant' | 'tool' | 'function'; +/** + * Options for streaming response. Only set this when you set `stream: true`. + */ +export interface ChatCompletionStreamOptions { + /** + * If set, an additional chunk will be streamed before the `data: [DONE]` message. + * The `usage` field on this chunk shows the token usage statistics for the entire + * request, and the `choices` field will always be an empty array. All other chunks + * will also include a `usage` field, but with a null value. + */ + include_usage?: boolean; +} +/** + * Developer-provided instructions that the model should follow, regardless of + * messages sent by the user. With o1 models and newer, use `developer` messages + * for this purpose instead. + */ +export interface ChatCompletionSystemMessageParam { + /** + * The contents of the system message. + */ + content: string | Array; + /** + * The role of the messages author, in this case `system`. + */ + role: 'system'; + /** + * An optional name for the participant. Provides the model information to + * differentiate between participants of the same role. + */ + name?: string; +} +export interface ChatCompletionTokenLogprob { + /** + * The token. + */ + token: string; + /** + * A list of integers representing the UTF-8 bytes representation of the token. + * Useful in instances where characters are represented by multiple tokens and + * their byte representations must be combined to generate the correct text + * representation. Can be `null` if there is no bytes representation for the token. + */ + bytes: Array | null; + /** + * The log probability of this token, if it is within the top 20 most likely + * tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + * unlikely. + */ + logprob: number; + /** + * List of the most likely tokens and their log probability, at this token + * position. In rare cases, there may be fewer than the number of requested + * `top_logprobs` returned. + */ + top_logprobs: Array; +} +export declare namespace ChatCompletionTokenLogprob { + interface TopLogprob { + /** + * The token. + */ + token: string; + /** + * A list of integers representing the UTF-8 bytes representation of the token. + * Useful in instances where characters are represented by multiple tokens and + * their byte representations must be combined to generate the correct text + * representation. Can be `null` if there is no bytes representation for the token. + */ + bytes: Array | null; + /** + * The log probability of this token, if it is within the top 20 most likely + * tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + * unlikely. + */ + logprob: number; + } +} +export interface ChatCompletionTool { + function: Shared.FunctionDefinition; + /** + * The type of the tool. Currently, only `function` is supported. + */ + type: 'function'; +} +/** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tool and instead generates a message. `auto` means the model can + * pick between generating a message or calling one or more tools. `required` means + * the model must call one or more tools. Specifying a particular tool via + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + * + * `none` is the default when no tools are present. `auto` is the default if tools + * are present. + */ +export type ChatCompletionToolChoiceOption = 'none' | 'auto' | 'required' | ChatCompletionNamedToolChoice; +export interface ChatCompletionToolMessageParam { + /** + * The contents of the tool message. + */ + content: string | Array; + /** + * The role of the messages author, in this case `tool`. + */ + role: 'tool'; + /** + * Tool call that this message is responding to. + */ + tool_call_id: string; +} +/** + * Messages sent by an end user, containing prompts or additional context + * information. + */ +export interface ChatCompletionUserMessageParam { + /** + * The contents of the user message. + */ + content: string | Array; + /** + * The role of the messages author, in this case `user`. + */ + role: 'user'; + /** + * An optional name for the participant. Provides the model information to + * differentiate between participants of the same role. + */ + name?: string; +} +/** + * @deprecated ChatCompletionMessageParam should be used instead + */ +export type CreateChatCompletionRequestMessage = ChatCompletionMessageParam; +export type ChatCompletionCreateParams = ChatCompletionCreateParamsNonStreaming | ChatCompletionCreateParamsStreaming; +export interface ChatCompletionCreateParamsBase { + /** + * A list of messages comprising the conversation so far. Depending on the + * [model](https://platform.openai.com/docs/models) you use, different message + * types (modalities) are supported, like + * [text](https://platform.openai.com/docs/guides/text-generation), + * [images](https://platform.openai.com/docs/guides/vision), and + * [audio](https://platform.openai.com/docs/guides/audio). + */ + messages: Array; + /** + * ID of the model to use. See the + * [model endpoint compatibility](https://platform.openai.com/docs/models#model-endpoint-compatibility) + * table for details on which models work with the Chat API. + */ + model: (string & {}) | ChatAPI.ChatModel; + /** + * Parameters for audio output. Required when audio output is requested with + * `modalities: ["audio"]`. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ + audio?: ChatCompletionAudioParam | null; + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on their + * existing frequency in the text so far, decreasing the model's likelihood to + * repeat the same line verbatim. + */ + frequency_penalty?: number | null; + /** + * Deprecated in favor of `tool_choice`. + * + * Controls which (if any) function is called by the model. + * + * `none` means the model will not call a function and instead generates a message. + * + * `auto` means the model can pick between generating a message or calling a + * function. + * + * Specifying a particular function via `{"name": "my_function"}` forces the model + * to call that function. + * + * `none` is the default when no functions are present. `auto` is the default if + * functions are present. + */ + function_call?: 'none' | 'auto' | ChatCompletionFunctionCallOption; + /** + * Deprecated in favor of `tools`. + * + * A list of functions the model may generate JSON inputs for. + */ + functions?: Array; + /** + * Modify the likelihood of specified tokens appearing in the completion. + * + * Accepts a JSON object that maps tokens (specified by their token ID in the + * tokenizer) to an associated bias value from -100 to 100. Mathematically, the + * bias is added to the logits generated by the model prior to sampling. The exact + * effect will vary per model, but values between -1 and 1 should decrease or + * increase likelihood of selection; values like -100 or 100 should result in a ban + * or exclusive selection of the relevant token. + */ + logit_bias?: Record | null; + /** + * Whether to return log probabilities of the output tokens or not. If true, + * returns the log probabilities of each output token returned in the `content` of + * `message`. + */ + logprobs?: boolean | null; + /** + * An upper bound for the number of tokens that can be generated for a completion, + * including visible output tokens and + * [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + */ + max_completion_tokens?: number | null; + /** + * The maximum number of [tokens](/tokenizer) that can be generated in the chat + * completion. This value can be used to control + * [costs](https://openai.com/api/pricing/) for text generated via API. + * + * This value is now deprecated in favor of `max_completion_tokens`, and is not + * compatible with + * [o1 series models](https://platform.openai.com/docs/guides/reasoning). + */ + max_tokens?: number | null; + /** + * Developer-defined tags and values used for filtering completions in the + * [dashboard](https://platform.openai.com/chat-completions). + */ + metadata?: Record | null; + /** + * Output types that you would like the model to generate for this request. Most + * models are capable of generating text, which is the default: + * + * `["text"]` + * + * The `gpt-4o-audio-preview` model can also be used to + * [generate audio](https://platform.openai.com/docs/guides/audio). To request that + * this model generate both text and audio responses, you can use: + * + * `["text", "audio"]` + */ + modalities?: Array | null; + /** + * How many chat completion choices to generate for each input message. Note that + * you will be charged based on the number of generated tokens across all of the + * choices. Keep `n` as `1` to minimize costs. + */ + n?: number | null; + /** + * Whether to enable + * [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + * during tool use. + */ + parallel_tool_calls?: boolean; + /** + * Static predicted output content, such as the content of a text file that is + * being regenerated. + */ + prediction?: ChatCompletionPredictionContent | null; + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on + * whether they appear in the text so far, increasing the model's likelihood to + * talk about new topics. + */ + presence_penalty?: number | null; + /** + * **o1 models only** + * + * Constrains effort on reasoning for + * [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + * supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + * result in faster responses and fewer tokens used on reasoning in a response. + */ + reasoning_effort?: ChatCompletionReasoningEffort; + /** + * An object specifying the format that the model must output. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: Shared.ResponseFormatText | Shared.ResponseFormatJSONObject | Shared.ResponseFormatJSONSchema; + /** + * This feature is in Beta. If specified, our system will make a best effort to + * sample deterministically, such that repeated requests with the same `seed` and + * parameters should return the same result. Determinism is not guaranteed, and you + * should refer to the `system_fingerprint` response parameter to monitor changes + * in the backend. + */ + seed?: number | null; + /** + * Specifies the latency tier to use for processing the request. This parameter is + * relevant for customers subscribed to the scale tier service: + * + * - If set to 'auto', and the Project is Scale tier enabled, the system will + * utilize scale tier credits until they are exhausted. + * - If set to 'auto', and the Project is not Scale tier enabled, the request will + * be processed using the default service tier with a lower uptime SLA and no + * latency guarentee. + * - If set to 'default', the request will be processed using the default service + * tier with a lower uptime SLA and no latency guarentee. + * - When not set, the default behavior is 'auto'. + * + * When this parameter is set, the response body will include the `service_tier` + * utilized. + */ + service_tier?: 'auto' | 'default' | null; + /** + * Up to 4 sequences where the API will stop generating further tokens. + */ + stop?: string | null | Array; + /** + * Whether or not to store the output of this chat completion request for use in + * our [model distillation](https://platform.openai.com/docs/guides/distillation) + * or [evals](https://platform.openai.com/docs/guides/evals) products. + */ + store?: boolean | null; + /** + * If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + * sent as data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream?: boolean | null; + /** + * Options for streaming response. Only set this when you set `stream: true`. + */ + stream_options?: ChatCompletionStreamOptions | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. We generally recommend altering this or `top_p` but + * not both. + */ + temperature?: number | null; + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tool and instead generates a message. `auto` means the model can + * pick between generating a message or calling one or more tools. `required` means + * the model must call one or more tools. Specifying a particular tool via + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + * + * `none` is the default when no tools are present. `auto` is the default if tools + * are present. + */ + tool_choice?: ChatCompletionToolChoiceOption; + /** + * A list of tools the model may call. Currently, only functions are supported as a + * tool. Use this to provide a list of functions the model may generate JSON inputs + * for. A max of 128 functions are supported. + */ + tools?: Array; + /** + * An integer between 0 and 20 specifying the number of most likely tokens to + * return at each token position, each with an associated log probability. + * `logprobs` must be set to `true` if this parameter is used. + */ + top_logprobs?: number | null; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or `temperature` but not both. + */ + top_p?: number | null; + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} +export declare namespace ChatCompletionCreateParams { + /** + * @deprecated + */ + interface Function { + /** + * The name of the function to be called. Must be a-z, A-Z, 0-9, or contain + * underscores and dashes, with a maximum length of 64. + */ + name: string; + /** + * A description of what the function does, used by the model to choose when and + * how to call the function. + */ + description?: string; + /** + * The parameters the functions accepts, described as a JSON Schema object. See the + * [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + * and the + * [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + * documentation about the format. + * + * Omitting `parameters` defines a function with an empty parameter list. + */ + parameters?: Shared.FunctionParameters; + } + type ChatCompletionCreateParamsNonStreaming = ChatCompletionsAPI.ChatCompletionCreateParamsNonStreaming; + type ChatCompletionCreateParamsStreaming = ChatCompletionsAPI.ChatCompletionCreateParamsStreaming; +} +/** + * @deprecated Use ChatCompletionCreateParams instead + */ +export type CompletionCreateParams = ChatCompletionCreateParams; +export interface ChatCompletionCreateParamsNonStreaming extends ChatCompletionCreateParamsBase { + /** + * If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + * sent as data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream?: false | null; +} +/** + * @deprecated Use ChatCompletionCreateParamsNonStreaming instead + */ +export type CompletionCreateParamsNonStreaming = ChatCompletionCreateParamsNonStreaming; +export interface ChatCompletionCreateParamsStreaming extends ChatCompletionCreateParamsBase { + /** + * If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + * sent as data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream: true; +} +/** + * @deprecated Use ChatCompletionCreateParamsStreaming instead + */ +export type CompletionCreateParamsStreaming = ChatCompletionCreateParamsStreaming; +export declare namespace Completions { + export { type ChatCompletion as ChatCompletion, type ChatCompletionAssistantMessageParam as ChatCompletionAssistantMessageParam, type ChatCompletionAudio as ChatCompletionAudio, type ChatCompletionAudioParam as ChatCompletionAudioParam, type ChatCompletionChunk as ChatCompletionChunk, type ChatCompletionContentPart as ChatCompletionContentPart, type ChatCompletionContentPartImage as ChatCompletionContentPartImage, type ChatCompletionContentPartInputAudio as ChatCompletionContentPartInputAudio, type ChatCompletionContentPartRefusal as ChatCompletionContentPartRefusal, type ChatCompletionContentPartText as ChatCompletionContentPartText, type ChatCompletionDeveloperMessageParam as ChatCompletionDeveloperMessageParam, type ChatCompletionFunctionCallOption as ChatCompletionFunctionCallOption, type ChatCompletionFunctionMessageParam as ChatCompletionFunctionMessageParam, type ChatCompletionMessage as ChatCompletionMessage, type ChatCompletionMessageParam as ChatCompletionMessageParam, type ChatCompletionMessageToolCall as ChatCompletionMessageToolCall, type ChatCompletionModality as ChatCompletionModality, type ChatCompletionNamedToolChoice as ChatCompletionNamedToolChoice, type ChatCompletionPredictionContent as ChatCompletionPredictionContent, type ChatCompletionReasoningEffort as ChatCompletionReasoningEffort, type ChatCompletionRole as ChatCompletionRole, type ChatCompletionStreamOptions as ChatCompletionStreamOptions, type ChatCompletionSystemMessageParam as ChatCompletionSystemMessageParam, type ChatCompletionTokenLogprob as ChatCompletionTokenLogprob, type ChatCompletionTool as ChatCompletionTool, type ChatCompletionToolChoiceOption as ChatCompletionToolChoiceOption, type ChatCompletionToolMessageParam as ChatCompletionToolMessageParam, type ChatCompletionUserMessageParam as ChatCompletionUserMessageParam, type CreateChatCompletionRequestMessage as CreateChatCompletionRequestMessage, type ChatCompletionCreateParams as ChatCompletionCreateParams, type CompletionCreateParams as CompletionCreateParams, type ChatCompletionCreateParamsNonStreaming as ChatCompletionCreateParamsNonStreaming, type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming, type ChatCompletionCreateParamsStreaming as ChatCompletionCreateParamsStreaming, type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming, }; +} +//# sourceMappingURL=completions.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/chat/completions.d.ts.map b/node_modules/openai/resources/chat/completions.d.ts.map new file mode 100644 index 0000000..7e8a5e6 --- /dev/null +++ b/node_modules/openai/resources/chat/completions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"completions.d.ts","sourceRoot":"","sources":["../../src/resources/chat/completions.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,UAAU,EAAE,MAAM,YAAY,CAAC;AACxC,OAAO,KAAK,IAAI,MAAM,YAAY,CAAC;AACnC,OAAO,KAAK,kBAAkB,MAAM,eAAe,CAAC;AACpD,OAAO,KAAK,cAAc,MAAM,gBAAgB,CAAC;AACjD,OAAO,KAAK,MAAM,MAAM,WAAW,CAAC;AACpC,OAAO,KAAK,OAAO,MAAM,QAAQ,CAAC;AAClC,OAAO,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAEzC,qBAAa,WAAY,SAAQ,WAAW;IAC1C;;;;;;;;;;;OAWG;IACH,MAAM,CACJ,IAAI,EAAE,sCAAsC,EAC5C,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,cAAc,CAAC;IAC7B,MAAM,CACJ,IAAI,EAAE,mCAAmC,EACzC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,mBAAmB,CAAC,CAAC;IAC1C,MAAM,CACJ,IAAI,EAAE,8BAA8B,EACpC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,mBAAmB,CAAC,GAAG,cAAc,CAAC;CAS5D;AAED;;;GAGG;AACH,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;;OAGG;IACH,OAAO,EAAE,KAAK,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IAEtC;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,MAAM,EAAE,iBAAiB,CAAC;IAE1B;;;OAGG;IACH,YAAY,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,IAAI,CAAC;IAE1C;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAE5B;;OAEG;IACH,KAAK,CAAC,EAAE,cAAc,CAAC,eAAe,CAAC;CACxC;AAED,yBAAiB,cAAc,CAAC;IAC9B,UAAiB,MAAM;QACrB;;;;;;;WAOG;QACH,aAAa,EAAE,MAAM,GAAG,QAAQ,GAAG,YAAY,GAAG,gBAAgB,GAAG,eAAe,CAAC;QAErF;;WAEG;QACH,KAAK,EAAE,MAAM,CAAC;QAEd;;WAEG;QACH,QAAQ,EAAE,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;QAEjC;;WAEG;QACH,OAAO,EAAE,kBAAkB,CAAC,qBAAqB,CAAC;KACnD;IAED,UAAiB,MAAM,CAAC;QACtB;;WAEG;QACH,UAAiB,QAAQ;YACvB;;eAEG;YACH,OAAO,EAAE,KAAK,CAAC,kBAAkB,CAAC,0BAA0B,CAAC,GAAG,IAAI,CAAC;YAErE;;eAEG;YACH,OAAO,EAAE,KAAK,CAAC,kBAAkB,CAAC,0BAA0B,CAAC,GAAG,IAAI,CAAC;SACtE;KACF;CACF;AAED;;GAEG;AACH,MAAM,WAAW,mCAAmC;IAClD;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;IAElB;;;OAGG;IACH,KAAK,CAAC,EAAE,mCAAmC,CAAC,KAAK,GAAG,IAAI,CAAC;IAEzD;;;OAGG;IACH,OAAO,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC,6BAA6B,GAAG,gCAAgC,CAAC,GAAG,IAAI,CAAC;IAElG;;;OAGG;IACH,aAAa,CAAC,EAAE,mCAAmC,CAAC,YAAY,GAAG,IAAI,CAAC;IAExE;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAExB;;OAEG;IACH,UAAU,CAAC,EAAE,KAAK,CAAC,6BAA6B,CAAC,CAAC;CACnD;AAED,yBAAiB,mCAAmC,CAAC;IACnD;;;OAGG;IACH,UAAiB,KAAK;QACpB;;WAEG;QACH,EAAE,EAAE,MAAM,CAAC;KACZ;IAED;;;OAGG;IACH,UAAiB,YAAY;QAC3B;;;;;WAKG;QACH,SAAS,EAAE,MAAM,CAAC;QAElB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;KACd;CACF;AAED;;;;GAIG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;;OAGG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;;OAGG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;CACpB;AAED;;;;GAIG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;OAGG;IACH,MAAM,EAAE,KAAK,GAAG,KAAK,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;IAElD;;;;OAIG;IACH,KAAK,EAAE,OAAO,GAAG,KAAK,GAAG,QAAQ,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,OAAO,CAAC;CACrF;AAED;;;GAGG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;;;OAIG;IACH,OAAO,EAAE,KAAK,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC;IAE3C;;;OAGG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,MAAM,EAAE,uBAAuB,CAAC;IAEhC;;;OAGG;IACH,YAAY,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,IAAI,CAAC;IAE1C;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAE5B;;;;;OAKG;IACH,KAAK,CAAC,EAAE,cAAc,CAAC,eAAe,GAAG,IAAI,CAAC;CAC/C;AAED,yBAAiB,mBAAmB,CAAC;IACnC,UAAiB,MAAM;QACrB;;WAEG;QACH,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC;QAEpB;;;;;;;WAOG;QACH,aAAa,EAAE,MAAM,GAAG,QAAQ,GAAG,YAAY,GAAG,gBAAgB,GAAG,eAAe,GAAG,IAAI,CAAC;QAE5F;;WAEG;QACH,KAAK,EAAE,MAAM,CAAC;QAEd;;WAEG;QACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;KACnC;IAED,UAAiB,MAAM,CAAC;QACtB;;WAEG;QACH,UAAiB,KAAK;YACpB;;eAEG;YACH,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;YAExB;;;eAGG;YACH,aAAa,CAAC,EAAE,KAAK,CAAC,YAAY,CAAC;YAEnC;;eAEG;YACH,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;YAExB;;eAEG;YACH,IAAI,CAAC,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,GAAG,MAAM,CAAC;YAEhD,UAAU,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;SACpC;QAED,UAAiB,KAAK,CAAC;YACrB;;;eAGG;YACH,UAAiB,YAAY;gBAC3B;;;;;mBAKG;gBACH,SAAS,CAAC,EAAE,MAAM,CAAC;gBAEnB;;mBAEG;gBACH,IAAI,CAAC,EAAE,MAAM,CAAC;aACf;YAED,UAAiB,QAAQ;gBACvB,KAAK,EAAE,MAAM,CAAC;gBAEd;;mBAEG;gBACH,EAAE,CAAC,EAAE,MAAM,CAAC;gBAEZ,QAAQ,CAAC,EAAE,QAAQ,CAAC,QAAQ,CAAC;gBAE7B;;mBAEG;gBACH,IAAI,CAAC,EAAE,UAAU,CAAC;aACnB;YAED,UAAiB,QAAQ,CAAC;gBACxB,UAAiB,QAAQ;oBACvB;;;;;uBAKG;oBACH,SAAS,CAAC,EAAE,MAAM,CAAC;oBAEnB;;uBAEG;oBACH,IAAI,CAAC,EAAE,MAAM,CAAC;iBACf;aACF;SACF;QAED;;WAEG;QACH,UAAiB,QAAQ;YACvB;;eAEG;YACH,OAAO,EAAE,KAAK,CAAC,kBAAkB,CAAC,0BAA0B,CAAC,GAAG,IAAI,CAAC;YAErE;;eAEG;YACH,OAAO,EAAE,KAAK,CAAC,kBAAkB,CAAC,0BAA0B,CAAC,GAAG,IAAI,CAAC;SACtE;KACF;CACF;AAED;;;GAGG;AACH,MAAM,MAAM,yBAAyB,GACjC,6BAA6B,GAC7B,8BAA8B,GAC9B,mCAAmC,CAAC;AAExC;;GAEG;AACH,MAAM,WAAW,8BAA8B;IAC7C,SAAS,EAAE,8BAA8B,CAAC,QAAQ,CAAC;IAEnD;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;CACnB;AAED,yBAAiB,8BAA8B,CAAC;IAC9C,UAAiB,QAAQ;QACvB;;WAEG;QACH,GAAG,EAAE,MAAM,CAAC;QAEZ;;;WAGG;QACH,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC;KAClC;CACF;AAED;;GAEG;AACH,MAAM,WAAW,mCAAmC;IAClD,WAAW,EAAE,mCAAmC,CAAC,UAAU,CAAC;IAE5D;;OAEG;IACH,IAAI,EAAE,aAAa,CAAC;CACrB;AAED,yBAAiB,mCAAmC,CAAC;IACnD,UAAiB,UAAU;QACzB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;QAEb;;WAEG;QACH,MAAM,EAAE,KAAK,GAAG,KAAK,CAAC;KACvB;CACF;AAED,MAAM,WAAW,gCAAgC;IAC/C;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,IAAI,EAAE,SAAS,CAAC;CACjB;AAED;;;GAGG;AACH,MAAM,WAAW,6BAA6B;IAC5C;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;;;GAIG;AACH,MAAM,WAAW,mCAAmC;IAClD;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,6BAA6B,CAAC,CAAC;IAEvD;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;IAElB;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED;;;GAGG;AACH,MAAM,WAAW,gCAAgC;IAC/C;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,kCAAkC;IACjD;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IAEvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;CAClB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IAEvB;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IAEvB;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;IAElB;;;;OAIG;IACH,KAAK,CAAC,EAAE,mBAAmB,GAAG,IAAI,CAAC;IAEnC;;;OAGG;IACH,aAAa,CAAC,EAAE,qBAAqB,CAAC,YAAY,GAAG,IAAI,CAAC;IAE1D;;OAEG;IACH,UAAU,CAAC,EAAE,KAAK,CAAC,6BAA6B,CAAC,CAAC;CACnD;AAED,yBAAiB,qBAAqB,CAAC;IACrC;;;OAGG;IACH,UAAiB,YAAY;QAC3B;;;;;WAKG;QACH,SAAS,EAAE,MAAM,CAAC;QAElB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;KACd;CACF;AAED;;;;GAIG;AACH,MAAM,MAAM,0BAA0B,GAClC,mCAAmC,GACnC,gCAAgC,GAChC,8BAA8B,GAC9B,mCAAmC,GACnC,8BAA8B,GAC9B,kCAAkC,CAAC;AAEvC,MAAM,WAAW,6BAA6B;IAC5C;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,QAAQ,EAAE,6BAA6B,CAAC,QAAQ,CAAC;IAEjD;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;CAClB;AAED,yBAAiB,6BAA6B,CAAC;IAC7C;;OAEG;IACH,UAAiB,QAAQ;QACvB;;;;;WAKG;QACH,SAAS,EAAE,MAAM,CAAC;QAElB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;KACd;CACF;AAED,MAAM,MAAM,sBAAsB,GAAG,MAAM,GAAG,OAAO,CAAC;AAEtD;;;GAGG;AACH,MAAM,WAAW,6BAA6B;IAC5C,QAAQ,EAAE,6BAA6B,CAAC,QAAQ,CAAC;IAEjD;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;CAClB;AAED,yBAAiB,6BAA6B,CAAC;IAC7C,UAAiB,QAAQ;QACvB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;KACd;CACF;AAED;;;GAGG;AACH,MAAM,WAAW,+BAA+B;IAC9C;;;;OAIG;IACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,6BAA6B,CAAC,CAAC;IAEvD;;;OAGG;IACH,IAAI,EAAE,SAAS,CAAC;CACjB;AAED;;;;;;;GAOG;AACH,MAAM,MAAM,6BAA6B,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;AAEtE;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG,QAAQ,GAAG,MAAM,GAAG,WAAW,GAAG,MAAM,GAAG,UAAU,CAAC;AAEvF;;GAEG;AACH,MAAM,WAAW,2BAA2B;IAC1C;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB;AAED;;;;GAIG;AACH,MAAM,WAAW,gCAAgC;IAC/C;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,6BAA6B,CAAC,CAAC;IAEvD;;OAEG;IACH,IAAI,EAAE,QAAQ,CAAC;IAEf;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,0BAA0B;IACzC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;;;;OAKG;IACH,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC;IAE5B;;;;OAIG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;;;OAIG;IACH,YAAY,EAAE,KAAK,CAAC,0BAA0B,CAAC,UAAU,CAAC,CAAC;CAC5D;AAED,yBAAiB,0BAA0B,CAAC;IAC1C,UAAiB,UAAU;QACzB;;WAEG;QACH,KAAK,EAAE,MAAM,CAAC;QAEd;;;;;WAKG;QACH,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC;QAE5B;;;;WAIG;QACH,OAAO,EAAE,MAAM,CAAC;KACjB;CACF;AAED,MAAM,WAAW,kBAAkB;IACjC,QAAQ,EAAE,MAAM,CAAC,kBAAkB,CAAC;IAEpC;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;CAClB;AAED;;;;;;;;;;GAUG;AACH,MAAM,MAAM,8BAA8B,GAAG,MAAM,GAAG,MAAM,GAAG,UAAU,GAAG,6BAA6B,CAAC;AAE1G,MAAM,WAAW,8BAA8B;IAC7C;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,6BAA6B,CAAC,CAAC;IAEvD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,YAAY,EAAE,MAAM,CAAC;CACtB;AAED;;;GAGG;AACH,MAAM,WAAW,8BAA8B;IAC7C;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAEnD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,MAAM,MAAM,kCAAkC,GAAG,0BAA0B,CAAC;AAE5E,MAAM,MAAM,0BAA0B,GAClC,sCAAsC,GACtC,mCAAmC,CAAC;AAExC,MAAM,WAAW,8BAA8B;IAC7C;;;;;;;OAOG;IACH,QAAQ,EAAE,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAE5C;;;;OAIG;IACH,KAAK,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,SAAS,CAAC;IAEzC;;;;OAIG;IACH,KAAK,CAAC,EAAE,wBAAwB,GAAG,IAAI,CAAC;IAExC;;;;OAIG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElC;;;;;;;;;;;;;;;OAeG;IACH,aAAa,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,gCAAgC,CAAC;IAEnE;;;;OAIG;IACH,SAAS,CAAC,EAAE,KAAK,CAAC,0BAA0B,CAAC,QAAQ,CAAC,CAAC;IAEvD;;;;;;;;;OASG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,IAAI,CAAC;IAE3C;;;;OAIG;IACH,QAAQ,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAE1B;;;;OAIG;IACH,qBAAqB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtC;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;;OAGG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,IAAI,CAAC;IAEzC;;;;;;;;;;;OAWG;IACH,UAAU,CAAC,EAAE,KAAK,CAAC,sBAAsB,CAAC,GAAG,IAAI,CAAC;IAElD;;;;OAIG;IACH,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElB;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAE9B;;;OAGG;IACH,UAAU,CAAC,EAAE,+BAA+B,GAAG,IAAI,CAAC;IAEpD;;;;OAIG;IACH,gBAAgB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEjC;;;;;;;OAOG;IACH,gBAAgB,CAAC,EAAE,6BAA6B,CAAC;IAEjD;;;;;;;;;;;;;;;;;;OAkBG;IACH,eAAe,CAAC,EACZ,MAAM,CAAC,kBAAkB,GACzB,MAAM,CAAC,wBAAwB,GAC/B,MAAM,CAAC,wBAAwB,CAAC;IAEpC;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAErB;;;;;;;;;;;;;;;OAeG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;IAEzC;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;IAErC;;;;OAIG;IACH,KAAK,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAEvB;;;;;;;OAOG;IACH,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAExB;;OAEG;IACH,cAAc,CAAC,EAAE,2BAA2B,GAAG,IAAI,CAAC;IAEpD;;;;;OAKG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;;;;;;OAUG;IACH,WAAW,CAAC,EAAE,8BAA8B,CAAC;IAE7C;;;;OAIG;IACH,KAAK,CAAC,EAAE,KAAK,CAAC,kBAAkB,CAAC,CAAC;IAElC;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE7B;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;;OAIG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,yBAAiB,0BAA0B,CAAC;IAC1C;;OAEG;IACH,UAAiB,QAAQ;QACvB;;;WAGG;QACH,IAAI,EAAE,MAAM,CAAC;QAEb;;;WAGG;QACH,WAAW,CAAC,EAAE,MAAM,CAAC;QAErB;;;;;;;;WAQG;QACH,UAAU,CAAC,EAAE,MAAM,CAAC,kBAAkB,CAAC;KACxC;IAED,KAAY,sCAAsC,GAChD,kBAAkB,CAAC,sCAAsC,CAAC;IAC5D,KAAY,mCAAmC,GAAG,kBAAkB,CAAC,mCAAmC,CAAC;CAC1G;AAED;;GAEG;AACH,MAAM,MAAM,sBAAsB,GAAG,0BAA0B,CAAC;AAEhE,MAAM,WAAW,sCAAuC,SAAQ,8BAA8B;IAC5F;;;;;;;OAOG;IACH,MAAM,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,MAAM,kCAAkC,GAAG,sCAAsC,CAAC;AAExF,MAAM,WAAW,mCAAoC,SAAQ,8BAA8B;IACzF;;;;;;;OAOG;IACH,MAAM,EAAE,IAAI,CAAC;CACd;AAED;;GAEG;AACH,MAAM,MAAM,+BAA+B,GAAG,mCAAmC,CAAC;AAElF,MAAM,CAAC,OAAO,WAAW,WAAW,CAAC;IACnC,OAAO,EACL,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,kCAAkC,IAAI,kCAAkC,EAC7E,KAAK,qBAAqB,IAAI,qBAAqB,EACnD,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,+BAA+B,IAAI,+BAA+B,EACvE,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,2BAA2B,IAAI,2BAA2B,EAC/D,KAAK,gCAAgC,IAAI,gCAAgC,EACzE,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,8BAA8B,IAAI,8BAA8B,EACrE,KAAK,kCAAkC,IAAI,kCAAkC,EAC7E,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,KAAK,sCAAsC,IAAI,sCAAsC,EACrF,KAAK,kCAAkC,IAAI,kCAAkC,EAC7E,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,KAAK,+BAA+B,IAAI,+BAA+B,GACxE,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/chat/completions.js b/node_modules/openai/resources/chat/completions.js new file mode 100644 index 0000000..2f9bcc9 --- /dev/null +++ b/node_modules/openai/resources/chat/completions.js @@ -0,0 +1,12 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Completions = void 0; +const resource_1 = require("../../resource.js"); +class Completions extends resource_1.APIResource { + create(body, options) { + return this._client.post('/chat/completions', { body, ...options, stream: body.stream ?? false }); + } +} +exports.Completions = Completions; +//# sourceMappingURL=completions.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/chat/completions.js.map b/node_modules/openai/resources/chat/completions.js.map new file mode 100644 index 0000000..11e6705 --- /dev/null +++ b/node_modules/openai/resources/chat/completions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"completions.js","sourceRoot":"","sources":["../../src/resources/chat/completions.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,gDAA6C;AAS7C,MAAa,WAAY,SAAQ,sBAAW;IAyB1C,MAAM,CACJ,IAAgC,EAChC,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE,CAErD,CAAC;IAC9C,CAAC;CACF;AAjCD,kCAiCC"} \ No newline at end of file diff --git a/node_modules/openai/resources/chat/completions.mjs b/node_modules/openai/resources/chat/completions.mjs new file mode 100644 index 0000000..7a26def --- /dev/null +++ b/node_modules/openai/resources/chat/completions.mjs @@ -0,0 +1,8 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +export class Completions extends APIResource { + create(body, options) { + return this._client.post('/chat/completions', { body, ...options, stream: body.stream ?? false }); + } +} +//# sourceMappingURL=completions.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/chat/completions.mjs.map b/node_modules/openai/resources/chat/completions.mjs.map new file mode 100644 index 0000000..fd7ea1b --- /dev/null +++ b/node_modules/openai/resources/chat/completions.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"completions.mjs","sourceRoot":"","sources":["../../src/resources/chat/completions.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;AAStB,MAAM,OAAO,WAAY,SAAQ,WAAW;IAyB1C,MAAM,CACJ,IAAgC,EAChC,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE,CAErD,CAAC;IAC9C,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/chat/index.d.ts b/node_modules/openai/resources/chat/index.d.ts new file mode 100644 index 0000000..749b82b --- /dev/null +++ b/node_modules/openai/resources/chat/index.d.ts @@ -0,0 +1,3 @@ +export { Chat, type ChatModel } from "./chat.js"; +export { Completions, type ChatCompletion, type ChatCompletionAssistantMessageParam, type ChatCompletionAudio, type ChatCompletionAudioParam, type ChatCompletionChunk, type ChatCompletionContentPart, type ChatCompletionContentPartImage, type ChatCompletionContentPartInputAudio, type ChatCompletionContentPartRefusal, type ChatCompletionContentPartText, type ChatCompletionDeveloperMessageParam, type ChatCompletionFunctionCallOption, type ChatCompletionFunctionMessageParam, type ChatCompletionMessage, type ChatCompletionMessageParam, type ChatCompletionMessageToolCall, type ChatCompletionModality, type ChatCompletionNamedToolChoice, type ChatCompletionPredictionContent, type ChatCompletionReasoningEffort, type ChatCompletionRole, type ChatCompletionStreamOptions, type ChatCompletionSystemMessageParam, type ChatCompletionTokenLogprob, type ChatCompletionTool, type ChatCompletionToolChoiceOption, type ChatCompletionToolMessageParam, type ChatCompletionUserMessageParam, type CreateChatCompletionRequestMessage, type ChatCompletionCreateParams, type CompletionCreateParams, type ChatCompletionCreateParamsNonStreaming, type CompletionCreateParamsNonStreaming, type ChatCompletionCreateParamsStreaming, type CompletionCreateParamsStreaming, } from "./completions.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/chat/index.d.ts.map b/node_modules/openai/resources/chat/index.d.ts.map new file mode 100644 index 0000000..62497ef --- /dev/null +++ b/node_modules/openai/resources/chat/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/resources/chat/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,IAAI,EAAE,KAAK,SAAS,EAAE,MAAM,QAAQ,CAAC;AAC9C,OAAO,EACL,WAAW,EACX,KAAK,cAAc,EACnB,KAAK,mCAAmC,EACxC,KAAK,mBAAmB,EACxB,KAAK,wBAAwB,EAC7B,KAAK,mBAAmB,EACxB,KAAK,yBAAyB,EAC9B,KAAK,8BAA8B,EACnC,KAAK,mCAAmC,EACxC,KAAK,gCAAgC,EACrC,KAAK,6BAA6B,EAClC,KAAK,mCAAmC,EACxC,KAAK,gCAAgC,EACrC,KAAK,kCAAkC,EACvC,KAAK,qBAAqB,EAC1B,KAAK,0BAA0B,EAC/B,KAAK,6BAA6B,EAClC,KAAK,sBAAsB,EAC3B,KAAK,6BAA6B,EAClC,KAAK,+BAA+B,EACpC,KAAK,6BAA6B,EAClC,KAAK,kBAAkB,EACvB,KAAK,2BAA2B,EAChC,KAAK,gCAAgC,EACrC,KAAK,0BAA0B,EAC/B,KAAK,kBAAkB,EACvB,KAAK,8BAA8B,EACnC,KAAK,8BAA8B,EACnC,KAAK,8BAA8B,EACnC,KAAK,kCAAkC,EACvC,KAAK,0BAA0B,EAC/B,KAAK,sBAAsB,EAC3B,KAAK,sCAAsC,EAC3C,KAAK,kCAAkC,EACvC,KAAK,mCAAmC,EACxC,KAAK,+BAA+B,GACrC,MAAM,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/chat/index.js b/node_modules/openai/resources/chat/index.js new file mode 100644 index 0000000..53c3d16 --- /dev/null +++ b/node_modules/openai/resources/chat/index.js @@ -0,0 +1,9 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Completions = exports.Chat = void 0; +var chat_1 = require("./chat.js"); +Object.defineProperty(exports, "Chat", { enumerable: true, get: function () { return chat_1.Chat; } }); +var completions_1 = require("./completions.js"); +Object.defineProperty(exports, "Completions", { enumerable: true, get: function () { return completions_1.Completions; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/chat/index.js.map b/node_modules/openai/resources/chat/index.js.map new file mode 100644 index 0000000..09ee9f2 --- /dev/null +++ b/node_modules/openai/resources/chat/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/resources/chat/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,kCAA8C;AAArC,4FAAA,IAAI,OAAA;AACb,gDAqCuB;AApCrB,0GAAA,WAAW,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/chat/index.mjs b/node_modules/openai/resources/chat/index.mjs new file mode 100644 index 0000000..f03ef42 --- /dev/null +++ b/node_modules/openai/resources/chat/index.mjs @@ -0,0 +1,4 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { Chat } from "./chat.mjs"; +export { Completions, } from "./completions.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/chat/index.mjs.map b/node_modules/openai/resources/chat/index.mjs.map new file mode 100644 index 0000000..1a88057 --- /dev/null +++ b/node_modules/openai/resources/chat/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../src/resources/chat/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,IAAI,EAAkB;OACxB,EACL,WAAW,GAoCZ"} \ No newline at end of file diff --git a/node_modules/openai/resources/completions.d.ts b/node_modules/openai/resources/completions.d.ts new file mode 100644 index 0000000..a298d95 --- /dev/null +++ b/node_modules/openai/resources/completions.d.ts @@ -0,0 +1,315 @@ +import { APIResource } from "../resource.js"; +import { APIPromise } from "../core.js"; +import * as Core from "../core.js"; +import * as CompletionsAPI from "./completions.js"; +import * as ChatCompletionsAPI from "./chat/completions.js"; +import { Stream } from "../streaming.js"; +export declare class Completions extends APIResource { + /** + * Creates a completion for the provided prompt and parameters. + */ + create(body: CompletionCreateParamsNonStreaming, options?: Core.RequestOptions): APIPromise; + create(body: CompletionCreateParamsStreaming, options?: Core.RequestOptions): APIPromise>; + create(body: CompletionCreateParamsBase, options?: Core.RequestOptions): APIPromise | Completion>; +} +/** + * Represents a completion response from the API. Note: both the streamed and + * non-streamed response objects share the same shape (unlike the chat endpoint). + */ +export interface Completion { + /** + * A unique identifier for the completion. + */ + id: string; + /** + * The list of completion choices the model generated for the input prompt. + */ + choices: Array; + /** + * The Unix timestamp (in seconds) of when the completion was created. + */ + created: number; + /** + * The model used for completion. + */ + model: string; + /** + * The object type, which is always "text_completion" + */ + object: 'text_completion'; + /** + * This fingerprint represents the backend configuration that the model runs with. + * + * Can be used in conjunction with the `seed` request parameter to understand when + * backend changes have been made that might impact determinism. + */ + system_fingerprint?: string; + /** + * Usage statistics for the completion request. + */ + usage?: CompletionUsage; +} +export interface CompletionChoice { + /** + * The reason the model stopped generating tokens. This will be `stop` if the model + * hit a natural stop point or a provided stop sequence, `length` if the maximum + * number of tokens specified in the request was reached, or `content_filter` if + * content was omitted due to a flag from our content filters. + */ + finish_reason: 'stop' | 'length' | 'content_filter'; + index: number; + logprobs: CompletionChoice.Logprobs | null; + text: string; +} +export declare namespace CompletionChoice { + interface Logprobs { + text_offset?: Array; + token_logprobs?: Array; + tokens?: Array; + top_logprobs?: Array>; + } +} +/** + * Usage statistics for the completion request. + */ +export interface CompletionUsage { + /** + * Number of tokens in the generated completion. + */ + completion_tokens: number; + /** + * Number of tokens in the prompt. + */ + prompt_tokens: number; + /** + * Total number of tokens used in the request (prompt + completion). + */ + total_tokens: number; + /** + * Breakdown of tokens used in a completion. + */ + completion_tokens_details?: CompletionUsage.CompletionTokensDetails; + /** + * Breakdown of tokens used in the prompt. + */ + prompt_tokens_details?: CompletionUsage.PromptTokensDetails; +} +export declare namespace CompletionUsage { + /** + * Breakdown of tokens used in a completion. + */ + interface CompletionTokensDetails { + /** + * When using Predicted Outputs, the number of tokens in the prediction that + * appeared in the completion. + */ + accepted_prediction_tokens?: number; + /** + * Audio input tokens generated by the model. + */ + audio_tokens?: number; + /** + * Tokens generated by the model for reasoning. + */ + reasoning_tokens?: number; + /** + * When using Predicted Outputs, the number of tokens in the prediction that did + * not appear in the completion. However, like reasoning tokens, these tokens are + * still counted in the total completion tokens for purposes of billing, output, + * and context window limits. + */ + rejected_prediction_tokens?: number; + } + /** + * Breakdown of tokens used in the prompt. + */ + interface PromptTokensDetails { + /** + * Audio input tokens present in the prompt. + */ + audio_tokens?: number; + /** + * Cached tokens present in the prompt. + */ + cached_tokens?: number; + } +} +export type CompletionCreateParams = CompletionCreateParamsNonStreaming | CompletionCreateParamsStreaming; +export interface CompletionCreateParamsBase { + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model: (string & {}) | 'gpt-3.5-turbo-instruct' | 'davinci-002' | 'babbage-002'; + /** + * The prompt(s) to generate completions for, encoded as a string, array of + * strings, array of tokens, or array of token arrays. + * + * Note that <|endoftext|> is the document separator that the model sees during + * training, so if a prompt is not specified the model will generate as if from the + * beginning of a new document. + */ + prompt: string | Array | Array | Array> | null; + /** + * Generates `best_of` completions server-side and returns the "best" (the one with + * the highest log probability per token). Results cannot be streamed. + * + * When used with `n`, `best_of` controls the number of candidate completions and + * `n` specifies how many to return – `best_of` must be greater than `n`. + * + * **Note:** Because this parameter generates many completions, it can quickly + * consume your token quota. Use carefully and ensure that you have reasonable + * settings for `max_tokens` and `stop`. + */ + best_of?: number | null; + /** + * Echo back the prompt in addition to the completion + */ + echo?: boolean | null; + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on their + * existing frequency in the text so far, decreasing the model's likelihood to + * repeat the same line verbatim. + * + * [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + */ + frequency_penalty?: number | null; + /** + * Modify the likelihood of specified tokens appearing in the completion. + * + * Accepts a JSON object that maps tokens (specified by their token ID in the GPT + * tokenizer) to an associated bias value from -100 to 100. You can use this + * [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + * Mathematically, the bias is added to the logits generated by the model prior to + * sampling. The exact effect will vary per model, but values between -1 and 1 + * should decrease or increase likelihood of selection; values like -100 or 100 + * should result in a ban or exclusive selection of the relevant token. + * + * As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + * from being generated. + */ + logit_bias?: Record | null; + /** + * Include the log probabilities on the `logprobs` most likely output tokens, as + * well the chosen tokens. For example, if `logprobs` is 5, the API will return a + * list of the 5 most likely tokens. The API will always return the `logprob` of + * the sampled token, so there may be up to `logprobs+1` elements in the response. + * + * The maximum value for `logprobs` is 5. + */ + logprobs?: number | null; + /** + * The maximum number of [tokens](/tokenizer) that can be generated in the + * completion. + * + * The token count of your prompt plus `max_tokens` cannot exceed the model's + * context length. + * [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + * for counting tokens. + */ + max_tokens?: number | null; + /** + * How many completions to generate for each prompt. + * + * **Note:** Because this parameter generates many completions, it can quickly + * consume your token quota. Use carefully and ensure that you have reasonable + * settings for `max_tokens` and `stop`. + */ + n?: number | null; + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on + * whether they appear in the text so far, increasing the model's likelihood to + * talk about new topics. + * + * [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + */ + presence_penalty?: number | null; + /** + * If specified, our system will make a best effort to sample deterministically, + * such that repeated requests with the same `seed` and parameters should return + * the same result. + * + * Determinism is not guaranteed, and you should refer to the `system_fingerprint` + * response parameter to monitor changes in the backend. + */ + seed?: number | null; + /** + * Up to 4 sequences where the API will stop generating further tokens. The + * returned text will not contain the stop sequence. + */ + stop?: string | null | Array; + /** + * Whether to stream back partial progress. If set, tokens will be sent as + * data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream?: boolean | null; + /** + * Options for streaming response. Only set this when you set `stream: true`. + */ + stream_options?: ChatCompletionsAPI.ChatCompletionStreamOptions | null; + /** + * The suffix that comes after a completion of inserted text. + * + * This parameter is only supported for `gpt-3.5-turbo-instruct`. + */ + suffix?: string | null; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + * + * We generally recommend altering this or `top_p` but not both. + */ + temperature?: number | null; + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or `temperature` but not both. + */ + top_p?: number | null; + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} +export declare namespace CompletionCreateParams { + type CompletionCreateParamsNonStreaming = CompletionsAPI.CompletionCreateParamsNonStreaming; + type CompletionCreateParamsStreaming = CompletionsAPI.CompletionCreateParamsStreaming; +} +export interface CompletionCreateParamsNonStreaming extends CompletionCreateParamsBase { + /** + * Whether to stream back partial progress. If set, tokens will be sent as + * data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream?: false | null; +} +export interface CompletionCreateParamsStreaming extends CompletionCreateParamsBase { + /** + * Whether to stream back partial progress. If set, tokens will be sent as + * data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream: true; +} +export declare namespace Completions { + export { type Completion as Completion, type CompletionChoice as CompletionChoice, type CompletionUsage as CompletionUsage, type CompletionCreateParams as CompletionCreateParams, type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming, type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming, }; +} +//# sourceMappingURL=completions.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/completions.d.ts.map b/node_modules/openai/resources/completions.d.ts.map new file mode 100644 index 0000000..3b21411 --- /dev/null +++ b/node_modules/openai/resources/completions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"completions.d.ts","sourceRoot":"","sources":["../src/resources/completions.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AACrC,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAChC,OAAO,KAAK,cAAc,MAAM,eAAe,CAAC;AAChD,OAAO,KAAK,kBAAkB,MAAM,oBAAoB,CAAC;AACzD,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,qBAAa,WAAY,SAAQ,WAAW;IAC1C;;OAEG;IACH,MAAM,CAAC,IAAI,EAAE,kCAAkC,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,UAAU,CAAC,UAAU,CAAC;IACvG,MAAM,CACJ,IAAI,EAAE,+BAA+B,EACrC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;IACjC,MAAM,CACJ,IAAI,EAAE,0BAA0B,EAChC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,UAAU,CAAC;CAS/C;AAED;;;GAGG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,OAAO,EAAE,KAAK,CAAC,gBAAgB,CAAC,CAAC;IAEjC;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,MAAM,EAAE,iBAAiB,CAAC;IAE1B;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAE5B;;OAEG;IACH,KAAK,CAAC,EAAE,eAAe,CAAC;CACzB;AAED,MAAM,WAAW,gBAAgB;IAC/B;;;;;OAKG;IACH,aAAa,EAAE,MAAM,GAAG,QAAQ,GAAG,gBAAgB,CAAC;IAEpD,KAAK,EAAE,MAAM,CAAC;IAEd,QAAQ,EAAE,gBAAgB,CAAC,QAAQ,GAAG,IAAI,CAAC;IAE3C,IAAI,EAAE,MAAM,CAAC;CACd;AAED,yBAAiB,gBAAgB,CAAC;IAChC,UAAiB,QAAQ;QACvB,WAAW,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAE5B,cAAc,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAE/B,MAAM,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAEvB,YAAY,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;KAC9C;CACF;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,iBAAiB,EAAE,MAAM,CAAC;IAE1B;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IAEtB;;OAEG;IACH,YAAY,EAAE,MAAM,CAAC;IAErB;;OAEG;IACH,yBAAyB,CAAC,EAAE,eAAe,CAAC,uBAAuB,CAAC;IAEpE;;OAEG;IACH,qBAAqB,CAAC,EAAE,eAAe,CAAC,mBAAmB,CAAC;CAC7D;AAED,yBAAiB,eAAe,CAAC;IAC/B;;OAEG;IACH,UAAiB,uBAAuB;QACtC;;;WAGG;QACH,0BAA0B,CAAC,EAAE,MAAM,CAAC;QAEpC;;WAEG;QACH,YAAY,CAAC,EAAE,MAAM,CAAC;QAEtB;;WAEG;QACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;QAE1B;;;;;WAKG;QACH,0BAA0B,CAAC,EAAE,MAAM,CAAC;KACrC;IAED;;OAEG;IACH,UAAiB,mBAAmB;QAClC;;WAEG;QACH,YAAY,CAAC,EAAE,MAAM,CAAC;QAEtB;;WAEG;QACH,aAAa,CAAC,EAAE,MAAM,CAAC;KACxB;CACF;AAED,MAAM,MAAM,sBAAsB,GAAG,kCAAkC,GAAG,+BAA+B,CAAC;AAE1G,MAAM,WAAW,0BAA0B;IACzC;;;;;;OAMG;IACH,KAAK,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,wBAAwB,GAAG,aAAa,GAAG,aAAa,CAAC;IAEhF;;;;;;;OAOG;IACH,MAAM,EAAE,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,GAAG,IAAI,CAAC;IAE7E;;;;;;;;;;OAUG;IACH,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAExB;;OAEG;IACH,IAAI,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAEtB;;;;;;OAMG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElC;;;;;;;;;;;;;OAaG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,IAAI,CAAC;IAE3C;;;;;;;OAOG;IACH,QAAQ,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEzB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;;;;;OAMG;IACH,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElB;;;;;;OAMG;IACH,gBAAgB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEjC;;;;;;;OAOG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAErB;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;IAErC;;;;;;;OAOG;IACH,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAExB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC,2BAA2B,GAAG,IAAI,CAAC;IAEvE;;;;OAIG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEvB;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE5B;;;;;;OAMG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEtB;;;;OAIG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,yBAAiB,sBAAsB,CAAC;IACtC,KAAY,kCAAkC,GAAG,cAAc,CAAC,kCAAkC,CAAC;IACnG,KAAY,+BAA+B,GAAG,cAAc,CAAC,+BAA+B,CAAC;CAC9F;AAED,MAAM,WAAW,kCAAmC,SAAQ,0BAA0B;IACpF;;;;;;;OAOG;IACH,MAAM,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC;CACvB;AAED,MAAM,WAAW,+BAAgC,SAAQ,0BAA0B;IACjF;;;;;;;OAOG;IACH,MAAM,EAAE,IAAI,CAAC;CACd;AAED,MAAM,CAAC,OAAO,WAAW,WAAW,CAAC;IACnC,OAAO,EACL,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,sBAAsB,IAAI,sBAAsB,EACrD,KAAK,kCAAkC,IAAI,kCAAkC,EAC7E,KAAK,+BAA+B,IAAI,+BAA+B,GACxE,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/completions.js b/node_modules/openai/resources/completions.js new file mode 100644 index 0000000..2389eb9 --- /dev/null +++ b/node_modules/openai/resources/completions.js @@ -0,0 +1,12 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Completions = void 0; +const resource_1 = require("../resource.js"); +class Completions extends resource_1.APIResource { + create(body, options) { + return this._client.post('/completions', { body, ...options, stream: body.stream ?? false }); + } +} +exports.Completions = Completions; +//# sourceMappingURL=completions.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/completions.js.map b/node_modules/openai/resources/completions.js.map new file mode 100644 index 0000000..d1726c6 --- /dev/null +++ b/node_modules/openai/resources/completions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"completions.js","sourceRoot":"","sources":["../src/resources/completions.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,6CAA0C;AAO1C,MAAa,WAAY,SAAQ,sBAAW;IAa1C,MAAM,CACJ,IAA4B,EAC5B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,cAAc,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE,CAEzD,CAAC;IACrC,CAAC;CACF;AArBD,kCAqBC"} \ No newline at end of file diff --git a/node_modules/openai/resources/completions.mjs b/node_modules/openai/resources/completions.mjs new file mode 100644 index 0000000..75b8e2d --- /dev/null +++ b/node_modules/openai/resources/completions.mjs @@ -0,0 +1,8 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../resource.mjs"; +export class Completions extends APIResource { + create(body, options) { + return this._client.post('/completions', { body, ...options, stream: body.stream ?? false }); + } +} +//# sourceMappingURL=completions.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/completions.mjs.map b/node_modules/openai/resources/completions.mjs.map new file mode 100644 index 0000000..5e6a2c7 --- /dev/null +++ b/node_modules/openai/resources/completions.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"completions.mjs","sourceRoot":"","sources":["../src/resources/completions.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;AAOtB,MAAM,OAAO,WAAY,SAAQ,WAAW;IAa1C,MAAM,CACJ,IAA4B,EAC5B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,cAAc,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE,CAEzD,CAAC;IACrC,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/embeddings.d.ts b/node_modules/openai/resources/embeddings.d.ts new file mode 100644 index 0000000..2452707 --- /dev/null +++ b/node_modules/openai/resources/embeddings.d.ts @@ -0,0 +1,101 @@ +import { APIResource } from "../resource.js"; +import * as Core from "../core.js"; +export declare class Embeddings extends APIResource { + /** + * Creates an embedding vector representing the input text. + */ + create(body: EmbeddingCreateParams, options?: Core.RequestOptions): Core.APIPromise; +} +export interface CreateEmbeddingResponse { + /** + * The list of embeddings generated by the model. + */ + data: Array; + /** + * The name of the model used to generate the embedding. + */ + model: string; + /** + * The object type, which is always "list". + */ + object: 'list'; + /** + * The usage information for the request. + */ + usage: CreateEmbeddingResponse.Usage; +} +export declare namespace CreateEmbeddingResponse { + /** + * The usage information for the request. + */ + interface Usage { + /** + * The number of tokens used by the prompt. + */ + prompt_tokens: number; + /** + * The total number of tokens used by the request. + */ + total_tokens: number; + } +} +/** + * Represents an embedding vector returned by embedding endpoint. + */ +export interface Embedding { + /** + * The embedding vector, which is a list of floats. The length of vector depends on + * the model as listed in the + * [embedding guide](https://platform.openai.com/docs/guides/embeddings). + */ + embedding: Array; + /** + * The index of the embedding in the list of embeddings. + */ + index: number; + /** + * The object type, which is always "embedding". + */ + object: 'embedding'; +} +export type EmbeddingModel = 'text-embedding-ada-002' | 'text-embedding-3-small' | 'text-embedding-3-large'; +export interface EmbeddingCreateParams { + /** + * Input text to embed, encoded as a string or array of tokens. To embed multiple + * inputs in a single request, pass an array of strings or array of token arrays. + * The input must not exceed the max input tokens for the model (8192 tokens for + * `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + * dimensions or less. + * [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + * for counting tokens. + */ + input: string | Array | Array | Array>; + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model: (string & {}) | EmbeddingModel; + /** + * The number of dimensions the resulting output embeddings should have. Only + * supported in `text-embedding-3` and later models. + */ + dimensions?: number; + /** + * The format to return the embeddings in. Can be either `float` or + * [`base64`](https://pypi.org/project/pybase64/). + */ + encoding_format?: 'float' | 'base64'; + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} +export declare namespace Embeddings { + export { type CreateEmbeddingResponse as CreateEmbeddingResponse, type Embedding as Embedding, type EmbeddingModel as EmbeddingModel, type EmbeddingCreateParams as EmbeddingCreateParams, }; +} +//# sourceMappingURL=embeddings.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/embeddings.d.ts.map b/node_modules/openai/resources/embeddings.d.ts.map new file mode 100644 index 0000000..f6fc3f1 --- /dev/null +++ b/node_modules/openai/resources/embeddings.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"embeddings.d.ts","sourceRoot":"","sources":["../src/resources/embeddings.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC1C,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAEhC,qBAAa,UAAW,SAAQ,WAAW;IACzC;;OAEG;IACH,MAAM,CACJ,IAAI,EAAE,qBAAqB,EAC3B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,uBAAuB,CAAC;CAG5C;AAED,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,IAAI,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC;IAEvB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,KAAK,EAAE,uBAAuB,CAAC,KAAK,CAAC;CACtC;AAED,yBAAiB,uBAAuB,CAAC;IACvC;;OAEG;IACH,UAAiB,KAAK;QACpB;;WAEG;QACH,aAAa,EAAE,MAAM,CAAC;QAEtB;;WAEG;QACH,YAAY,EAAE,MAAM,CAAC;KACtB;CACF;AAED;;GAEG;AACH,MAAM,WAAW,SAAS;IACxB;;;;OAIG;IACH,SAAS,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAEzB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;CACrB;AAED,MAAM,MAAM,cAAc,GAAG,wBAAwB,GAAG,wBAAwB,GAAG,wBAAwB,CAAC;AAE5G,MAAM,WAAW,qBAAqB;IACpC;;;;;;;;OAQG;IACH,KAAK,EAAE,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC;IAErE;;;;;;OAMG;IACH,KAAK,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,cAAc,CAAC;IAEtC;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;;OAGG;IACH,eAAe,CAAC,EAAE,OAAO,GAAG,QAAQ,CAAC;IAErC;;;;OAIG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,CAAC,OAAO,WAAW,UAAU,CAAC;IAClC,OAAO,EACL,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,SAAS,IAAI,SAAS,EAC3B,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,qBAAqB,IAAI,qBAAqB,GACpD,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/embeddings.js b/node_modules/openai/resources/embeddings.js new file mode 100644 index 0000000..e1b9ee1 --- /dev/null +++ b/node_modules/openai/resources/embeddings.js @@ -0,0 +1,15 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Embeddings = void 0; +const resource_1 = require("../resource.js"); +class Embeddings extends resource_1.APIResource { + /** + * Creates an embedding vector representing the input text. + */ + create(body, options) { + return this._client.post('/embeddings', { body, ...options }); + } +} +exports.Embeddings = Embeddings; +//# sourceMappingURL=embeddings.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/embeddings.js.map b/node_modules/openai/resources/embeddings.js.map new file mode 100644 index 0000000..6d40196 --- /dev/null +++ b/node_modules/openai/resources/embeddings.js.map @@ -0,0 +1 @@ +{"version":3,"file":"embeddings.js","sourceRoot":"","sources":["../src/resources/embeddings.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,6CAA0C;AAG1C,MAAa,UAAW,SAAQ,sBAAW;IACzC;;OAEG;IACH,MAAM,CACJ,IAA2B,EAC3B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAChE,CAAC;CACF;AAVD,gCAUC"} \ No newline at end of file diff --git a/node_modules/openai/resources/embeddings.mjs b/node_modules/openai/resources/embeddings.mjs new file mode 100644 index 0000000..1ddff80 --- /dev/null +++ b/node_modules/openai/resources/embeddings.mjs @@ -0,0 +1,11 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../resource.mjs"; +export class Embeddings extends APIResource { + /** + * Creates an embedding vector representing the input text. + */ + create(body, options) { + return this._client.post('/embeddings', { body, ...options }); + } +} +//# sourceMappingURL=embeddings.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/embeddings.mjs.map b/node_modules/openai/resources/embeddings.mjs.map new file mode 100644 index 0000000..037481d --- /dev/null +++ b/node_modules/openai/resources/embeddings.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"embeddings.mjs","sourceRoot":"","sources":["../src/resources/embeddings.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;AAGtB,MAAM,OAAO,UAAW,SAAQ,WAAW;IACzC;;OAEG;IACH,MAAM,CACJ,IAA2B,EAC3B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAChE,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/files.d.ts b/node_modules/openai/resources/files.d.ts new file mode 100644 index 0000000..120809d --- /dev/null +++ b/node_modules/openai/resources/files.d.ts @@ -0,0 +1,152 @@ +import { APIResource } from "../resource.js"; +import * as Core from "../core.js"; +import { CursorPage, type CursorPageParams } from "../pagination.js"; +import { type Response } from "../_shims/index.js"; +export declare class Files extends APIResource { + /** + * Upload a file that can be used across various endpoints. Individual files can be + * up to 512 MB, and the size of all files uploaded by one organization can be up + * to 100 GB. + * + * The Assistants API supports files up to 2 million tokens and of specific file + * types. See the + * [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for + * details. + * + * The Fine-tuning API only supports `.jsonl` files. The input also has certain + * required formats for fine-tuning + * [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or + * [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + * models. + * + * The Batch API only supports `.jsonl` files up to 200 MB in size. The input also + * has a specific required + * [format](https://platform.openai.com/docs/api-reference/batch/request-input). + * + * Please [contact us](https://help.openai.com/) if you need to increase these + * storage limits. + */ + create(body: FileCreateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns information about a specific file. + */ + retrieve(fileId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns a list of files. + */ + list(query?: FileListParams, options?: Core.RequestOptions): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + /** + * Delete a file. + */ + del(fileId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns the contents of the specified file. + */ + content(fileId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Returns the contents of the specified file. + * + * @deprecated The `.content()` method should be used instead + */ + retrieveContent(fileId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Waits for the given file to be processed, default timeout is 30 mins. + */ + waitForProcessing(id: string, { pollInterval, maxWait }?: { + pollInterval?: number; + maxWait?: number; + }): Promise; +} +export declare class FileObjectsPage extends CursorPage { +} +export type FileContent = string; +export interface FileDeleted { + id: string; + deleted: boolean; + object: 'file'; +} +/** + * The `File` object represents a document that has been uploaded to OpenAI. + */ +export interface FileObject { + /** + * The file identifier, which can be referenced in the API endpoints. + */ + id: string; + /** + * The size of the file, in bytes. + */ + bytes: number; + /** + * The Unix timestamp (in seconds) for when the file was created. + */ + created_at: number; + /** + * The name of the file. + */ + filename: string; + /** + * The object type, which is always `file`. + */ + object: 'file'; + /** + * The intended purpose of the file. Supported values are `assistants`, + * `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` + * and `vision`. + */ + purpose: 'assistants' | 'assistants_output' | 'batch' | 'batch_output' | 'fine-tune' | 'fine-tune-results' | 'vision'; + /** + * @deprecated: Deprecated. The current status of the file, which can be either + * `uploaded`, `processed`, or `error`. + */ + status: 'uploaded' | 'processed' | 'error'; + /** + * @deprecated: Deprecated. For details on why a fine-tuning training file failed + * validation, see the `error` field on `fine_tuning.job`. + */ + status_details?: string; +} +/** + * The intended purpose of the uploaded file. + * + * Use "assistants" for + * [Assistants](https://platform.openai.com/docs/api-reference/assistants) and + * [Message](https://platform.openai.com/docs/api-reference/messages) files, + * "vision" for Assistants image file inputs, "batch" for + * [Batch API](https://platform.openai.com/docs/guides/batch), and "fine-tune" for + * [Fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning). + */ +export type FilePurpose = 'assistants' | 'batch' | 'fine-tune' | 'vision'; +export interface FileCreateParams { + /** + * The File object (not file name) to be uploaded. + */ + file: Core.Uploadable; + /** + * The intended purpose of the uploaded file. + * + * Use "assistants" for + * [Assistants](https://platform.openai.com/docs/api-reference/assistants) and + * [Message](https://platform.openai.com/docs/api-reference/messages) files, + * "vision" for Assistants image file inputs, "batch" for + * [Batch API](https://platform.openai.com/docs/guides/batch), and "fine-tune" for + * [Fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning). + */ + purpose: FilePurpose; +} +export interface FileListParams extends CursorPageParams { + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; + /** + * Only return files with the given purpose. + */ + purpose?: string; +} +export declare namespace Files { + export { type FileContent as FileContent, type FileDeleted as FileDeleted, type FileObject as FileObject, type FilePurpose as FilePurpose, FileObjectsPage as FileObjectsPage, type FileCreateParams as FileCreateParams, type FileListParams as FileListParams, }; +} +//# sourceMappingURL=files.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/files.d.ts.map b/node_modules/openai/resources/files.d.ts.map new file mode 100644 index 0000000..fdc1bad --- /dev/null +++ b/node_modules/openai/resources/files.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"files.d.ts","sourceRoot":"","sources":["../src/resources/files.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAI1C,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,eAAe,CAAC;AAClE,OAAO,EAAE,KAAK,QAAQ,EAAE,MAAM,iBAAiB,CAAC;AAEhD,qBAAa,KAAM,SAAQ,WAAW;IACpC;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,MAAM,CAAC,IAAI,EAAE,gBAAgB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;IAI1F;;OAEG;IACH,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;IAIpF;;OAEG;IACH,IAAI,CAAC,KAAK,CAAC,EAAE,cAAc,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,eAAe,EAAE,UAAU,CAAC;IAC1G,IAAI,CAAC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,eAAe,EAAE,UAAU,CAAC;IAWlF;;OAEG;IACH,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC;IAIhF;;OAEG;IACH,OAAO,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC;IAIjF;;;;OAIG;IACH,eAAe,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAOvF;;OAEG;IACG,iBAAiB,CACrB,EAAE,EAAE,MAAM,EACV,EAAE,YAAmB,EAAE,OAAwB,EAAE,GAAE;QAAE,YAAY,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAO,GAClG,OAAO,CAAC,UAAU,CAAC;CAmBvB;AAED,qBAAa,eAAgB,SAAQ,UAAU,CAAC,UAAU,CAAC;CAAG;AAE9D,MAAM,MAAM,WAAW,GAAG,MAAM,CAAC;AAEjC,MAAM,WAAW,WAAW;IAC1B,EAAE,EAAE,MAAM,CAAC;IAEX,OAAO,EAAE,OAAO,CAAC;IAEjB,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;;;OAIG;IACH,OAAO,EACH,YAAY,GACZ,mBAAmB,GACnB,OAAO,GACP,cAAc,GACd,WAAW,GACX,mBAAmB,GACnB,QAAQ,CAAC;IAEb;;;OAGG;IACH,MAAM,EAAE,UAAU,GAAG,WAAW,GAAG,OAAO,CAAC;IAE3C;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED;;;;;;;;;GASG;AACH,MAAM,MAAM,WAAW,GAAG,YAAY,GAAG,OAAO,GAAG,WAAW,GAAG,QAAQ,CAAC;AAE1E,MAAM,WAAW,gBAAgB;IAC/B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC;IAEtB;;;;;;;;;OASG;IACH,OAAO,EAAE,WAAW,CAAC;CACtB;AAED,MAAM,WAAW,cAAe,SAAQ,gBAAgB;IACtD;;;OAGG;IACH,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;IAEvB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAID,MAAM,CAAC,OAAO,WAAW,KAAK,CAAC;IAC7B,OAAO,EACL,KAAK,WAAW,IAAI,WAAW,EAC/B,KAAK,WAAW,IAAI,WAAW,EAC/B,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,WAAW,IAAI,WAAW,EAC/B,eAAe,IAAI,eAAe,EAClC,KAAK,gBAAgB,IAAI,gBAAgB,EACzC,KAAK,cAAc,IAAI,cAAc,GACtC,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/files.js b/node_modules/openai/resources/files.js new file mode 100644 index 0000000..93e04cf --- /dev/null +++ b/node_modules/openai/resources/files.js @@ -0,0 +1,120 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FileObjectsPage = exports.Files = void 0; +const resource_1 = require("../resource.js"); +const core_1 = require("../core.js"); +const core_2 = require("../core.js"); +const error_1 = require("../error.js"); +const Core = __importStar(require("../core.js")); +const pagination_1 = require("../pagination.js"); +class Files extends resource_1.APIResource { + /** + * Upload a file that can be used across various endpoints. Individual files can be + * up to 512 MB, and the size of all files uploaded by one organization can be up + * to 100 GB. + * + * The Assistants API supports files up to 2 million tokens and of specific file + * types. See the + * [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for + * details. + * + * The Fine-tuning API only supports `.jsonl` files. The input also has certain + * required formats for fine-tuning + * [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or + * [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + * models. + * + * The Batch API only supports `.jsonl` files up to 200 MB in size. The input also + * has a specific required + * [format](https://platform.openai.com/docs/api-reference/batch/request-input). + * + * Please [contact us](https://help.openai.com/) if you need to increase these + * storage limits. + */ + create(body, options) { + return this._client.post('/files', Core.multipartFormRequestOptions({ body, ...options })); + } + /** + * Returns information about a specific file. + */ + retrieve(fileId, options) { + return this._client.get(`/files/${fileId}`, options); + } + list(query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/files', FileObjectsPage, { query, ...options }); + } + /** + * Delete a file. + */ + del(fileId, options) { + return this._client.delete(`/files/${fileId}`, options); + } + /** + * Returns the contents of the specified file. + */ + content(fileId, options) { + return this._client.get(`/files/${fileId}/content`, { ...options, __binaryResponse: true }); + } + /** + * Returns the contents of the specified file. + * + * @deprecated The `.content()` method should be used instead + */ + retrieveContent(fileId, options) { + return this._client.get(`/files/${fileId}/content`, { + ...options, + headers: { Accept: 'application/json', ...options?.headers }, + }); + } + /** + * Waits for the given file to be processed, default timeout is 30 mins. + */ + async waitForProcessing(id, { pollInterval = 5000, maxWait = 30 * 60 * 1000 } = {}) { + const TERMINAL_STATES = new Set(['processed', 'error', 'deleted']); + const start = Date.now(); + let file = await this.retrieve(id); + while (!file.status || !TERMINAL_STATES.has(file.status)) { + await (0, core_2.sleep)(pollInterval); + file = await this.retrieve(id); + if (Date.now() - start > maxWait) { + throw new error_1.APIConnectionTimeoutError({ + message: `Giving up on waiting for file ${id} to finish processing after ${maxWait} milliseconds.`, + }); + } + } + return file; + } +} +exports.Files = Files; +class FileObjectsPage extends pagination_1.CursorPage { +} +exports.FileObjectsPage = FileObjectsPage; +Files.FileObjectsPage = FileObjectsPage; +//# sourceMappingURL=files.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/files.js.map b/node_modules/openai/resources/files.js.map new file mode 100644 index 0000000..5f24454 --- /dev/null +++ b/node_modules/openai/resources/files.js.map @@ -0,0 +1 @@ +{"version":3,"file":"files.js","sourceRoot":"","sources":["../src/resources/files.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,6CAA0C;AAC1C,qCAA2C;AAC3C,qCAAgC;AAChC,uCAAqD;AACrD,iDAAgC;AAChC,iDAAkE;AAGlE,MAAa,KAAM,SAAQ,sBAAW;IACpC;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,MAAM,CAAC,IAAsB,EAAE,OAA6B;QAC1D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IAC7F,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,MAAc,EAAE,OAA6B;QACpD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;IAOD,IAAI,CACF,QAA8C,EAAE,EAChD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,QAAQ,EAAE,eAAe,EAAE,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACnF,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,MAAc,EAAE,OAA6B;QAC/C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,UAAU,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;IAED;;OAEG;IACH,OAAO,CAAC,MAAc,EAAE,OAA6B;QACnD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,MAAM,UAAU,EAAE,EAAE,GAAG,OAAO,EAAE,gBAAgB,EAAE,IAAI,EAAE,CAAC,CAAC;IAC9F,CAAC;IAED;;;;OAIG;IACH,eAAe,CAAC,MAAc,EAAE,OAA6B;QAC3D,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,MAAM,UAAU,EAAE;YAClD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,MAAM,EAAE,kBAAkB,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SAC7D,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,iBAAiB,CACrB,EAAU,EACV,EAAE,YAAY,GAAG,IAAI,EAAE,OAAO,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,KAAkD,EAAE;QAEnG,MAAM,eAAe,GAAG,IAAI,GAAG,CAAC,CAAC,WAAW,EAAE,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC;QAEnE,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QACzB,IAAI,IAAI,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;QAEnC,OAAO,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YACxD,MAAM,IAAA,YAAK,EAAC,YAAY,CAAC,CAAC;YAE1B,IAAI,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;YAC/B,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,GAAG,OAAO,EAAE;gBAChC,MAAM,IAAI,iCAAyB,CAAC;oBAClC,OAAO,EAAE,iCAAiC,EAAE,+BAA+B,OAAO,gBAAgB;iBACnG,CAAC,CAAC;aACJ;SACF;QAED,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AArGD,sBAqGC;AAED,MAAa,eAAgB,SAAQ,uBAAsB;CAAG;AAA9D,0CAA8D;AAgH9D,KAAK,CAAC,eAAe,GAAG,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/files.mjs b/node_modules/openai/resources/files.mjs new file mode 100644 index 0000000..0eaf129 --- /dev/null +++ b/node_modules/openai/resources/files.mjs @@ -0,0 +1,92 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../resource.mjs"; +import { isRequestOptions } from "../core.mjs"; +import { sleep } from "../core.mjs"; +import { APIConnectionTimeoutError } from "../error.mjs"; +import * as Core from "../core.mjs"; +import { CursorPage } from "../pagination.mjs"; +export class Files extends APIResource { + /** + * Upload a file that can be used across various endpoints. Individual files can be + * up to 512 MB, and the size of all files uploaded by one organization can be up + * to 100 GB. + * + * The Assistants API supports files up to 2 million tokens and of specific file + * types. See the + * [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for + * details. + * + * The Fine-tuning API only supports `.jsonl` files. The input also has certain + * required formats for fine-tuning + * [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or + * [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + * models. + * + * The Batch API only supports `.jsonl` files up to 200 MB in size. The input also + * has a specific required + * [format](https://platform.openai.com/docs/api-reference/batch/request-input). + * + * Please [contact us](https://help.openai.com/) if you need to increase these + * storage limits. + */ + create(body, options) { + return this._client.post('/files', Core.multipartFormRequestOptions({ body, ...options })); + } + /** + * Returns information about a specific file. + */ + retrieve(fileId, options) { + return this._client.get(`/files/${fileId}`, options); + } + list(query = {}, options) { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/files', FileObjectsPage, { query, ...options }); + } + /** + * Delete a file. + */ + del(fileId, options) { + return this._client.delete(`/files/${fileId}`, options); + } + /** + * Returns the contents of the specified file. + */ + content(fileId, options) { + return this._client.get(`/files/${fileId}/content`, { ...options, __binaryResponse: true }); + } + /** + * Returns the contents of the specified file. + * + * @deprecated The `.content()` method should be used instead + */ + retrieveContent(fileId, options) { + return this._client.get(`/files/${fileId}/content`, { + ...options, + headers: { Accept: 'application/json', ...options?.headers }, + }); + } + /** + * Waits for the given file to be processed, default timeout is 30 mins. + */ + async waitForProcessing(id, { pollInterval = 5000, maxWait = 30 * 60 * 1000 } = {}) { + const TERMINAL_STATES = new Set(['processed', 'error', 'deleted']); + const start = Date.now(); + let file = await this.retrieve(id); + while (!file.status || !TERMINAL_STATES.has(file.status)) { + await sleep(pollInterval); + file = await this.retrieve(id); + if (Date.now() - start > maxWait) { + throw new APIConnectionTimeoutError({ + message: `Giving up on waiting for file ${id} to finish processing after ${maxWait} milliseconds.`, + }); + } + } + return file; + } +} +export class FileObjectsPage extends CursorPage { +} +Files.FileObjectsPage = FileObjectsPage; +//# sourceMappingURL=files.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/files.mjs.map b/node_modules/openai/resources/files.mjs.map new file mode 100644 index 0000000..7880854 --- /dev/null +++ b/node_modules/openai/resources/files.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"files.mjs","sourceRoot":"","sources":["../src/resources/files.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OACpB,EAAE,KAAK,EAAE;OACT,EAAE,yBAAyB,EAAE;OAC7B,KAAK,IAAI;OACT,EAAE,UAAU,EAAyB;AAG5C,MAAM,OAAO,KAAM,SAAQ,WAAW;IACpC;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,MAAM,CAAC,IAAsB,EAAE,OAA6B;QAC1D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IAC7F,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,MAAc,EAAE,OAA6B;QACpD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;IAOD,IAAI,CACF,QAA8C,EAAE,EAChD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,QAAQ,EAAE,eAAe,EAAE,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACnF,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,MAAc,EAAE,OAA6B;QAC/C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,UAAU,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;IAED;;OAEG;IACH,OAAO,CAAC,MAAc,EAAE,OAA6B;QACnD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,MAAM,UAAU,EAAE,EAAE,GAAG,OAAO,EAAE,gBAAgB,EAAE,IAAI,EAAE,CAAC,CAAC;IAC9F,CAAC;IAED;;;;OAIG;IACH,eAAe,CAAC,MAAc,EAAE,OAA6B;QAC3D,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,MAAM,UAAU,EAAE;YAClD,GAAG,OAAO;YACV,OAAO,EAAE,EAAE,MAAM,EAAE,kBAAkB,EAAE,GAAG,OAAO,EAAE,OAAO,EAAE;SAC7D,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,iBAAiB,CACrB,EAAU,EACV,EAAE,YAAY,GAAG,IAAI,EAAE,OAAO,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,KAAkD,EAAE;QAEnG,MAAM,eAAe,GAAG,IAAI,GAAG,CAAC,CAAC,WAAW,EAAE,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC;QAEnE,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QACzB,IAAI,IAAI,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;QAEnC,OAAO,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YACxD,MAAM,KAAK,CAAC,YAAY,CAAC,CAAC;YAE1B,IAAI,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;YAC/B,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,GAAG,OAAO,EAAE;gBAChC,MAAM,IAAI,yBAAyB,CAAC;oBAClC,OAAO,EAAE,iCAAiC,EAAE,+BAA+B,OAAO,gBAAgB;iBACnG,CAAC,CAAC;aACJ;SACF;QAED,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AAED,MAAM,OAAO,eAAgB,SAAQ,UAAsB;CAAG;AAgH9D,KAAK,CAAC,eAAe,GAAG,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/fine-tuning.d.ts b/node_modules/openai/resources/fine-tuning/fine-tuning.d.ts new file mode 100644 index 0000000..f3e2637 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/fine-tuning.d.ts @@ -0,0 +1,10 @@ +import { APIResource } from "../../resource.js"; +import * as JobsAPI from "./jobs/jobs.js"; +import { FineTuningJob, FineTuningJobEvent, FineTuningJobEventsPage, FineTuningJobIntegration, FineTuningJobWandbIntegration, FineTuningJobWandbIntegrationObject, FineTuningJobsPage, JobCreateParams, JobListEventsParams, JobListParams, Jobs } from "./jobs/jobs.js"; +export declare class FineTuning extends APIResource { + jobs: JobsAPI.Jobs; +} +export declare namespace FineTuning { + export { Jobs as Jobs, type FineTuningJob as FineTuningJob, type FineTuningJobEvent as FineTuningJobEvent, type FineTuningJobIntegration as FineTuningJobIntegration, type FineTuningJobWandbIntegration as FineTuningJobWandbIntegration, type FineTuningJobWandbIntegrationObject as FineTuningJobWandbIntegrationObject, FineTuningJobsPage as FineTuningJobsPage, FineTuningJobEventsPage as FineTuningJobEventsPage, type JobCreateParams as JobCreateParams, type JobListParams as JobListParams, type JobListEventsParams as JobListEventsParams, }; +} +//# sourceMappingURL=fine-tuning.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/fine-tuning.d.ts.map b/node_modules/openai/resources/fine-tuning/fine-tuning.d.ts.map new file mode 100644 index 0000000..8f18ddb --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/fine-tuning.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fine-tuning.d.ts","sourceRoot":"","sources":["../../src/resources/fine-tuning/fine-tuning.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,aAAa,CAAC;AACvC,OAAO,EACL,aAAa,EACb,kBAAkB,EAClB,uBAAuB,EACvB,wBAAwB,EACxB,6BAA6B,EAC7B,mCAAmC,EACnC,kBAAkB,EAClB,eAAe,EACf,mBAAmB,EACnB,aAAa,EACb,IAAI,EACL,MAAM,aAAa,CAAC;AAErB,qBAAa,UAAW,SAAQ,WAAW;IACzC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAkC;CACrD;AAMD,MAAM,CAAC,OAAO,WAAW,UAAU,CAAC;IAClC,OAAO,EACL,IAAI,IAAI,IAAI,EACZ,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,kBAAkB,IAAI,kBAAkB,EACxC,uBAAuB,IAAI,uBAAuB,EAClD,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,mBAAmB,IAAI,mBAAmB,GAChD,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/fine-tuning.js b/node_modules/openai/resources/fine-tuning/fine-tuning.js new file mode 100644 index 0000000..c99fec8 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/fine-tuning.js @@ -0,0 +1,41 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FineTuning = void 0; +const resource_1 = require("../../resource.js"); +const JobsAPI = __importStar(require("./jobs/jobs.js")); +const jobs_1 = require("./jobs/jobs.js"); +class FineTuning extends resource_1.APIResource { + constructor() { + super(...arguments); + this.jobs = new JobsAPI.Jobs(this._client); + } +} +exports.FineTuning = FineTuning; +FineTuning.Jobs = jobs_1.Jobs; +FineTuning.FineTuningJobsPage = jobs_1.FineTuningJobsPage; +FineTuning.FineTuningJobEventsPage = jobs_1.FineTuningJobEventsPage; +//# sourceMappingURL=fine-tuning.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/fine-tuning.js.map b/node_modules/openai/resources/fine-tuning/fine-tuning.js.map new file mode 100644 index 0000000..8cd37d5 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/fine-tuning.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fine-tuning.js","sourceRoot":"","sources":["../../src/resources/fine-tuning/fine-tuning.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,gDAA6C;AAC7C,wDAAuC;AACvC,yCAYqB;AAErB,MAAa,UAAW,SAAQ,sBAAW;IAA3C;;QACE,SAAI,GAAiB,IAAI,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACtD,CAAC;CAAA;AAFD,gCAEC;AAED,UAAU,CAAC,IAAI,GAAG,WAAI,CAAC;AACvB,UAAU,CAAC,kBAAkB,GAAG,yBAAkB,CAAC;AACnD,UAAU,CAAC,uBAAuB,GAAG,8BAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/fine-tuning.mjs b/node_modules/openai/resources/fine-tuning/fine-tuning.mjs new file mode 100644 index 0000000..f600e0e --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/fine-tuning.mjs @@ -0,0 +1,14 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +import * as JobsAPI from "./jobs/jobs.mjs"; +import { FineTuningJobEventsPage, FineTuningJobsPage, Jobs, } from "./jobs/jobs.mjs"; +export class FineTuning extends APIResource { + constructor() { + super(...arguments); + this.jobs = new JobsAPI.Jobs(this._client); + } +} +FineTuning.Jobs = Jobs; +FineTuning.FineTuningJobsPage = FineTuningJobsPage; +FineTuning.FineTuningJobEventsPage = FineTuningJobEventsPage; +//# sourceMappingURL=fine-tuning.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/fine-tuning.mjs.map b/node_modules/openai/resources/fine-tuning/fine-tuning.mjs.map new file mode 100644 index 0000000..796f804 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/fine-tuning.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"fine-tuning.mjs","sourceRoot":"","sources":["../../src/resources/fine-tuning/fine-tuning.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,KAAK,OAAO;OACZ,EAGL,uBAAuB,EAIvB,kBAAkB,EAIlB,IAAI,GACL;AAED,MAAM,OAAO,UAAW,SAAQ,WAAW;IAA3C;;QACE,SAAI,GAAiB,IAAI,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACtD,CAAC;CAAA;AAED,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC;AACvB,UAAU,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AACnD,UAAU,CAAC,uBAAuB,GAAG,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/index.d.ts b/node_modules/openai/resources/fine-tuning/index.d.ts new file mode 100644 index 0000000..0f05126 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/index.d.ts @@ -0,0 +1,3 @@ +export { FineTuning } from "./fine-tuning.js"; +export { FineTuningJobsPage, FineTuningJobEventsPage, Jobs, type FineTuningJob, type FineTuningJobEvent, type FineTuningJobIntegration, type FineTuningJobWandbIntegration, type FineTuningJobWandbIntegrationObject, type JobCreateParams, type JobListParams, type JobListEventsParams, } from "./jobs/index.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/index.d.ts.map b/node_modules/openai/resources/fine-tuning/index.d.ts.map new file mode 100644 index 0000000..bca7b13 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/resources/fine-tuning/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAC3C,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,IAAI,EACJ,KAAK,aAAa,EAClB,KAAK,kBAAkB,EACvB,KAAK,wBAAwB,EAC7B,KAAK,6BAA6B,EAClC,KAAK,mCAAmC,EACxC,KAAK,eAAe,EACpB,KAAK,aAAa,EAClB,KAAK,mBAAmB,GACzB,MAAM,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/index.js b/node_modules/openai/resources/fine-tuning/index.js new file mode 100644 index 0000000..7b187f4 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/index.js @@ -0,0 +1,11 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Jobs = exports.FineTuningJobEventsPage = exports.FineTuningJobsPage = exports.FineTuning = void 0; +var fine_tuning_1 = require("./fine-tuning.js"); +Object.defineProperty(exports, "FineTuning", { enumerable: true, get: function () { return fine_tuning_1.FineTuning; } }); +var index_1 = require("./jobs/index.js"); +Object.defineProperty(exports, "FineTuningJobsPage", { enumerable: true, get: function () { return index_1.FineTuningJobsPage; } }); +Object.defineProperty(exports, "FineTuningJobEventsPage", { enumerable: true, get: function () { return index_1.FineTuningJobEventsPage; } }); +Object.defineProperty(exports, "Jobs", { enumerable: true, get: function () { return index_1.Jobs; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/index.js.map b/node_modules/openai/resources/fine-tuning/index.js.map new file mode 100644 index 0000000..e8bf69e --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/resources/fine-tuning/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,gDAA2C;AAAlC,yGAAA,UAAU,OAAA;AACnB,yCAYsB;AAXpB,2GAAA,kBAAkB,OAAA;AAClB,gHAAA,uBAAuB,OAAA;AACvB,6FAAA,IAAI,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/index.mjs b/node_modules/openai/resources/fine-tuning/index.mjs new file mode 100644 index 0000000..1bf1178 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/index.mjs @@ -0,0 +1,4 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { FineTuning } from "./fine-tuning.mjs"; +export { FineTuningJobsPage, FineTuningJobEventsPage, Jobs, } from "./jobs/index.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/index.mjs.map b/node_modules/openai/resources/fine-tuning/index.mjs.map new file mode 100644 index 0000000..2600bc8 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../src/resources/fine-tuning/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,UAAU,EAAE;OACd,EACL,kBAAkB,EAClB,uBAAuB,EACvB,IAAI,GASL"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/checkpoints.d.ts b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.d.ts new file mode 100644 index 0000000..241d2ff --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.d.ts @@ -0,0 +1,66 @@ +import { APIResource } from "../../../resource.js"; +import * as Core from "../../../core.js"; +import { CursorPage, type CursorPageParams } from "../../../pagination.js"; +export declare class Checkpoints extends APIResource { + /** + * List checkpoints for a fine-tuning job. + */ + list(fineTuningJobId: string, query?: CheckpointListParams, options?: Core.RequestOptions): Core.PagePromise; + list(fineTuningJobId: string, options?: Core.RequestOptions): Core.PagePromise; +} +export declare class FineTuningJobCheckpointsPage extends CursorPage { +} +/** + * The `fine_tuning.job.checkpoint` object represents a model checkpoint for a + * fine-tuning job that is ready to use. + */ +export interface FineTuningJobCheckpoint { + /** + * The checkpoint identifier, which can be referenced in the API endpoints. + */ + id: string; + /** + * The Unix timestamp (in seconds) for when the checkpoint was created. + */ + created_at: number; + /** + * The name of the fine-tuned checkpoint model that is created. + */ + fine_tuned_model_checkpoint: string; + /** + * The name of the fine-tuning job that this checkpoint was created from. + */ + fine_tuning_job_id: string; + /** + * Metrics at the step number during the fine-tuning job. + */ + metrics: FineTuningJobCheckpoint.Metrics; + /** + * The object type, which is always "fine_tuning.job.checkpoint". + */ + object: 'fine_tuning.job.checkpoint'; + /** + * The step number that the checkpoint was created at. + */ + step_number: number; +} +export declare namespace FineTuningJobCheckpoint { + /** + * Metrics at the step number during the fine-tuning job. + */ + interface Metrics { + full_valid_loss?: number; + full_valid_mean_token_accuracy?: number; + step?: number; + train_loss?: number; + train_mean_token_accuracy?: number; + valid_loss?: number; + valid_mean_token_accuracy?: number; + } +} +export interface CheckpointListParams extends CursorPageParams { +} +export declare namespace Checkpoints { + export { type FineTuningJobCheckpoint as FineTuningJobCheckpoint, FineTuningJobCheckpointsPage as FineTuningJobCheckpointsPage, type CheckpointListParams as CheckpointListParams, }; +} +//# sourceMappingURL=checkpoints.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/checkpoints.d.ts.map b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.d.ts.map new file mode 100644 index 0000000..ac20faf --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"checkpoints.d.ts","sourceRoot":"","sources":["../../../src/resources/fine-tuning/jobs/checkpoints.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD,OAAO,KAAK,IAAI,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAExE,qBAAa,WAAY,SAAQ,WAAW;IAC1C;;OAEG;IACH,IAAI,CACF,eAAe,EAAE,MAAM,EACvB,KAAK,CAAC,EAAE,oBAAoB,EAC5B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,4BAA4B,EAAE,uBAAuB,CAAC;IAC1E,IAAI,CACF,eAAe,EAAE,MAAM,EACvB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,4BAA4B,EAAE,uBAAuB,CAAC;CAe3E;AAED,qBAAa,4BAA6B,SAAQ,UAAU,CAAC,uBAAuB,CAAC;CAAG;AAExF;;;GAGG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,2BAA2B,EAAE,MAAM,CAAC;IAEpC;;OAEG;IACH,kBAAkB,EAAE,MAAM,CAAC;IAE3B;;OAEG;IACH,OAAO,EAAE,uBAAuB,CAAC,OAAO,CAAC;IAEzC;;OAEG;IACH,MAAM,EAAE,4BAA4B,CAAC;IAErC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,yBAAiB,uBAAuB,CAAC;IACvC;;OAEG;IACH,UAAiB,OAAO;QACtB,eAAe,CAAC,EAAE,MAAM,CAAC;QAEzB,8BAA8B,CAAC,EAAE,MAAM,CAAC;QAExC,IAAI,CAAC,EAAE,MAAM,CAAC;QAEd,UAAU,CAAC,EAAE,MAAM,CAAC;QAEpB,yBAAyB,CAAC,EAAE,MAAM,CAAC;QAEnC,UAAU,CAAC,EAAE,MAAM,CAAC;QAEpB,yBAAyB,CAAC,EAAE,MAAM,CAAC;KACpC;CACF;AAED,MAAM,WAAW,oBAAqB,SAAQ,gBAAgB;CAAG;AAIjE,MAAM,CAAC,OAAO,WAAW,WAAW,CAAC;IACnC,OAAO,EACL,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,4BAA4B,IAAI,4BAA4B,EAC5D,KAAK,oBAAoB,IAAI,oBAAoB,GAClD,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/checkpoints.js b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.js new file mode 100644 index 0000000..ede731a --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.js @@ -0,0 +1,21 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FineTuningJobCheckpointsPage = exports.Checkpoints = void 0; +const resource_1 = require("../../../resource.js"); +const core_1 = require("../../../core.js"); +const pagination_1 = require("../../../pagination.js"); +class Checkpoints extends resource_1.APIResource { + list(fineTuningJobId, query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list(fineTuningJobId, {}, query); + } + return this._client.getAPIList(`/fine_tuning/jobs/${fineTuningJobId}/checkpoints`, FineTuningJobCheckpointsPage, { query, ...options }); + } +} +exports.Checkpoints = Checkpoints; +class FineTuningJobCheckpointsPage extends pagination_1.CursorPage { +} +exports.FineTuningJobCheckpointsPage = FineTuningJobCheckpointsPage; +Checkpoints.FineTuningJobCheckpointsPage = FineTuningJobCheckpointsPage; +//# sourceMappingURL=checkpoints.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/checkpoints.js.map b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.js.map new file mode 100644 index 0000000..8fe7acd --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.js.map @@ -0,0 +1 @@ +{"version":3,"file":"checkpoints.js","sourceRoot":"","sources":["../../../src/resources/fine-tuning/jobs/checkpoints.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,mDAAgD;AAChD,2CAAiD;AAEjD,uDAAwE;AAExE,MAAa,WAAY,SAAQ,sBAAW;IAa1C,IAAI,CACF,eAAuB,EACvB,QAAoD,EAAE,EACtD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC9C;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAC5B,qBAAqB,eAAe,cAAc,EAClD,4BAA4B,EAC5B,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,CACtB,CAAC;IACJ,CAAC;CACF;AA3BD,kCA2BC;AAED,MAAa,4BAA6B,SAAQ,uBAAmC;CAAG;AAAxF,oEAAwF;AAkExF,WAAW,CAAC,4BAA4B,GAAG,4BAA4B,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/checkpoints.mjs b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.mjs new file mode 100644 index 0000000..db2eb83 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.mjs @@ -0,0 +1,16 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../resource.mjs"; +import { isRequestOptions } from "../../../core.mjs"; +import { CursorPage } from "../../../pagination.mjs"; +export class Checkpoints extends APIResource { + list(fineTuningJobId, query = {}, options) { + if (isRequestOptions(query)) { + return this.list(fineTuningJobId, {}, query); + } + return this._client.getAPIList(`/fine_tuning/jobs/${fineTuningJobId}/checkpoints`, FineTuningJobCheckpointsPage, { query, ...options }); + } +} +export class FineTuningJobCheckpointsPage extends CursorPage { +} +Checkpoints.FineTuningJobCheckpointsPage = FineTuningJobCheckpointsPage; +//# sourceMappingURL=checkpoints.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/checkpoints.mjs.map b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.mjs.map new file mode 100644 index 0000000..9b0673d --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/checkpoints.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"checkpoints.mjs","sourceRoot":"","sources":["../../../src/resources/fine-tuning/jobs/checkpoints.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OAEpB,EAAE,UAAU,EAAyB;AAE5C,MAAM,OAAO,WAAY,SAAQ,WAAW;IAa1C,IAAI,CACF,eAAuB,EACvB,QAAoD,EAAE,EACtD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SAC9C;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAC5B,qBAAqB,eAAe,cAAc,EAClD,4BAA4B,EAC5B,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,CACtB,CAAC;IACJ,CAAC;CACF;AAED,MAAM,OAAO,4BAA6B,SAAQ,UAAmC;CAAG;AAkExF,WAAW,CAAC,4BAA4B,GAAG,4BAA4B,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/index.d.ts b/node_modules/openai/resources/fine-tuning/jobs/index.d.ts new file mode 100644 index 0000000..c47d0e2 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/index.d.ts @@ -0,0 +1,3 @@ +export { FineTuningJobCheckpointsPage, Checkpoints, type FineTuningJobCheckpoint, type CheckpointListParams, } from "./checkpoints.js"; +export { FineTuningJobsPage, FineTuningJobEventsPage, Jobs, type FineTuningJob, type FineTuningJobEvent, type FineTuningJobIntegration, type FineTuningJobWandbIntegration, type FineTuningJobWandbIntegrationObject, type JobCreateParams, type JobListParams, type JobListEventsParams, } from "./jobs.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/index.d.ts.map b/node_modules/openai/resources/fine-tuning/jobs/index.d.ts.map new file mode 100644 index 0000000..6682610 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/resources/fine-tuning/jobs/index.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,4BAA4B,EAC5B,WAAW,EACX,KAAK,uBAAuB,EAC5B,KAAK,oBAAoB,GAC1B,MAAM,eAAe,CAAC;AACvB,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,IAAI,EACJ,KAAK,aAAa,EAClB,KAAK,kBAAkB,EACvB,KAAK,wBAAwB,EAC7B,KAAK,6BAA6B,EAClC,KAAK,mCAAmC,EACxC,KAAK,eAAe,EACpB,KAAK,aAAa,EAClB,KAAK,mBAAmB,GACzB,MAAM,QAAQ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/index.js b/node_modules/openai/resources/fine-tuning/jobs/index.js new file mode 100644 index 0000000..9be1719 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/index.js @@ -0,0 +1,12 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Jobs = exports.FineTuningJobEventsPage = exports.FineTuningJobsPage = exports.Checkpoints = exports.FineTuningJobCheckpointsPage = void 0; +var checkpoints_1 = require("./checkpoints.js"); +Object.defineProperty(exports, "FineTuningJobCheckpointsPage", { enumerable: true, get: function () { return checkpoints_1.FineTuningJobCheckpointsPage; } }); +Object.defineProperty(exports, "Checkpoints", { enumerable: true, get: function () { return checkpoints_1.Checkpoints; } }); +var jobs_1 = require("./jobs.js"); +Object.defineProperty(exports, "FineTuningJobsPage", { enumerable: true, get: function () { return jobs_1.FineTuningJobsPage; } }); +Object.defineProperty(exports, "FineTuningJobEventsPage", { enumerable: true, get: function () { return jobs_1.FineTuningJobEventsPage; } }); +Object.defineProperty(exports, "Jobs", { enumerable: true, get: function () { return jobs_1.Jobs; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/index.js.map b/node_modules/openai/resources/fine-tuning/jobs/index.js.map new file mode 100644 index 0000000..af01e4f --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/resources/fine-tuning/jobs/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,gDAKuB;AAJrB,2HAAA,4BAA4B,OAAA;AAC5B,0GAAA,WAAW,OAAA;AAIb,kCAYgB;AAXd,0GAAA,kBAAkB,OAAA;AAClB,+GAAA,uBAAuB,OAAA;AACvB,4FAAA,IAAI,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/index.mjs b/node_modules/openai/resources/fine-tuning/jobs/index.mjs new file mode 100644 index 0000000..ffb3001 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/index.mjs @@ -0,0 +1,4 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { FineTuningJobCheckpointsPage, Checkpoints, } from "./checkpoints.mjs"; +export { FineTuningJobsPage, FineTuningJobEventsPage, Jobs, } from "./jobs.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/index.mjs.map b/node_modules/openai/resources/fine-tuning/jobs/index.mjs.map new file mode 100644 index 0000000..9ef02d5 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../../src/resources/fine-tuning/jobs/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EACL,4BAA4B,EAC5B,WAAW,GAGZ;OACM,EACL,kBAAkB,EAClB,uBAAuB,EACvB,IAAI,GASL"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/jobs.d.ts b/node_modules/openai/resources/fine-tuning/jobs/jobs.d.ts new file mode 100644 index 0000000..69f311e --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/jobs.d.ts @@ -0,0 +1,569 @@ +import { APIResource } from "../../../resource.js"; +import * as Core from "../../../core.js"; +import * as CheckpointsAPI from "./checkpoints.js"; +import { CheckpointListParams, Checkpoints, FineTuningJobCheckpoint, FineTuningJobCheckpointsPage } from "./checkpoints.js"; +import { CursorPage, type CursorPageParams } from "../../../pagination.js"; +export declare class Jobs extends APIResource { + checkpoints: CheckpointsAPI.Checkpoints; + /** + * Creates a fine-tuning job which begins the process of creating a new model from + * a given dataset. + * + * Response includes details of the enqueued job including job status and the name + * of the fine-tuned models once complete. + * + * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + */ + create(body: JobCreateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Get info about a fine-tuning job. + * + * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + */ + retrieve(fineTuningJobId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * List your organization's fine-tuning jobs + */ + list(query?: JobListParams, options?: Core.RequestOptions): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + /** + * Immediately cancel a fine-tune job. + */ + cancel(fineTuningJobId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Get status updates for a fine-tuning job. + */ + listEvents(fineTuningJobId: string, query?: JobListEventsParams, options?: Core.RequestOptions): Core.PagePromise; + listEvents(fineTuningJobId: string, options?: Core.RequestOptions): Core.PagePromise; +} +export declare class FineTuningJobsPage extends CursorPage { +} +export declare class FineTuningJobEventsPage extends CursorPage { +} +/** + * The `fine_tuning.job` object represents a fine-tuning job that has been created + * through the API. + */ +export interface FineTuningJob { + /** + * The object identifier, which can be referenced in the API endpoints. + */ + id: string; + /** + * The Unix timestamp (in seconds) for when the fine-tuning job was created. + */ + created_at: number; + /** + * For fine-tuning jobs that have `failed`, this will contain more information on + * the cause of the failure. + */ + error: FineTuningJob.Error | null; + /** + * The name of the fine-tuned model that is being created. The value will be null + * if the fine-tuning job is still running. + */ + fine_tuned_model: string | null; + /** + * The Unix timestamp (in seconds) for when the fine-tuning job was finished. The + * value will be null if the fine-tuning job is still running. + */ + finished_at: number | null; + /** + * The hyperparameters used for the fine-tuning job. This value will only be + * returned when running `supervised` jobs. + */ + hyperparameters: FineTuningJob.Hyperparameters; + /** + * The base model that is being fine-tuned. + */ + model: string; + /** + * The object type, which is always "fine_tuning.job". + */ + object: 'fine_tuning.job'; + /** + * The organization that owns the fine-tuning job. + */ + organization_id: string; + /** + * The compiled results file ID(s) for the fine-tuning job. You can retrieve the + * results with the + * [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + */ + result_files: Array; + /** + * The seed used for the fine-tuning job. + */ + seed: number; + /** + * The current status of the fine-tuning job, which can be either + * `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. + */ + status: 'validating_files' | 'queued' | 'running' | 'succeeded' | 'failed' | 'cancelled'; + /** + * The total number of billable tokens processed by this fine-tuning job. The value + * will be null if the fine-tuning job is still running. + */ + trained_tokens: number | null; + /** + * The file ID used for training. You can retrieve the training data with the + * [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + */ + training_file: string; + /** + * The file ID used for validation. You can retrieve the validation results with + * the + * [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + */ + validation_file: string | null; + /** + * The Unix timestamp (in seconds) for when the fine-tuning job is estimated to + * finish. The value will be null if the fine-tuning job is not running. + */ + estimated_finish?: number | null; + /** + * A list of integrations to enable for this fine-tuning job. + */ + integrations?: Array | null; + /** + * The method used for fine-tuning. + */ + method?: FineTuningJob.Method; +} +export declare namespace FineTuningJob { + /** + * For fine-tuning jobs that have `failed`, this will contain more information on + * the cause of the failure. + */ + interface Error { + /** + * A machine-readable error code. + */ + code: string; + /** + * A human-readable error message. + */ + message: string; + /** + * The parameter that was invalid, usually `training_file` or `validation_file`. + * This field will be null if the failure was not parameter-specific. + */ + param: string | null; + } + /** + * The hyperparameters used for the fine-tuning job. This value will only be + * returned when running `supervised` jobs. + */ + interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + /** + * The method used for fine-tuning. + */ + interface Method { + /** + * Configuration for the DPO fine-tuning method. + */ + dpo?: Method.Dpo; + /** + * Configuration for the supervised fine-tuning method. + */ + supervised?: Method.Supervised; + /** + * The type of method. Is either `supervised` or `dpo`. + */ + type?: 'supervised' | 'dpo'; + } + namespace Method { + /** + * Configuration for the DPO fine-tuning method. + */ + interface Dpo { + /** + * The hyperparameters used for the fine-tuning job. + */ + hyperparameters?: Dpo.Hyperparameters; + } + namespace Dpo { + /** + * The hyperparameters used for the fine-tuning job. + */ + interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + /** + * The beta value for the DPO method. A higher beta value will increase the weight + * of the penalty between the policy and reference model. + */ + beta?: 'auto' | number; + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + } + /** + * Configuration for the supervised fine-tuning method. + */ + interface Supervised { + /** + * The hyperparameters used for the fine-tuning job. + */ + hyperparameters?: Supervised.Hyperparameters; + } + namespace Supervised { + /** + * The hyperparameters used for the fine-tuning job. + */ + interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + } + } +} +/** + * Fine-tuning job event object + */ +export interface FineTuningJobEvent { + /** + * The object identifier. + */ + id: string; + /** + * The Unix timestamp (in seconds) for when the fine-tuning job was created. + */ + created_at: number; + /** + * The log level of the event. + */ + level: 'info' | 'warn' | 'error'; + /** + * The message of the event. + */ + message: string; + /** + * The object type, which is always "fine_tuning.job.event". + */ + object: 'fine_tuning.job.event'; + /** + * The data associated with the event. + */ + data?: unknown; + /** + * The type of event. + */ + type?: 'message' | 'metrics'; +} +export type FineTuningJobIntegration = FineTuningJobWandbIntegrationObject; +/** + * The settings for your integration with Weights and Biases. This payload + * specifies the project that metrics will be sent to. Optionally, you can set an + * explicit display name for your run, add tags to your run, and set a default + * entity (team, username, etc) to be associated with your run. + */ +export interface FineTuningJobWandbIntegration { + /** + * The name of the project that the new run will be created under. + */ + project: string; + /** + * The entity to use for the run. This allows you to set the team or username of + * the WandB user that you would like associated with the run. If not set, the + * default entity for the registered WandB API key is used. + */ + entity?: string | null; + /** + * A display name to set for the run. If not set, we will use the Job ID as the + * name. + */ + name?: string | null; + /** + * A list of tags to be attached to the newly created run. These tags are passed + * through directly to WandB. Some default tags are generated by OpenAI: + * "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + */ + tags?: Array; +} +export interface FineTuningJobWandbIntegrationObject { + /** + * The type of the integration being enabled for the fine-tuning job + */ + type: 'wandb'; + /** + * The settings for your integration with Weights and Biases. This payload + * specifies the project that metrics will be sent to. Optionally, you can set an + * explicit display name for your run, add tags to your run, and set a default + * entity (team, username, etc) to be associated with your run. + */ + wandb: FineTuningJobWandbIntegration; +} +export interface JobCreateParams { + /** + * The name of the model to fine-tune. You can select one of the + * [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). + */ + model: (string & {}) | 'babbage-002' | 'davinci-002' | 'gpt-3.5-turbo' | 'gpt-4o-mini'; + /** + * The ID of an uploaded file that contains training data. + * + * See [upload file](https://platform.openai.com/docs/api-reference/files/create) + * for how to upload a file. + * + * Your dataset must be formatted as a JSONL file. Additionally, you must upload + * your file with the purpose `fine-tune`. + * + * The contents of the file should differ depending on if the model uses the + * [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), + * [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + * format, or if the fine-tuning method uses the + * [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) + * format. + * + * See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + * for more details. + */ + training_file: string; + /** + * The hyperparameters used for the fine-tuning job. This value is now deprecated + * in favor of `method`, and should be passed in under the `method` parameter. + */ + hyperparameters?: JobCreateParams.Hyperparameters; + /** + * A list of integrations to enable for your fine-tuning job. + */ + integrations?: Array | null; + /** + * The method used for fine-tuning. + */ + method?: JobCreateParams.Method; + /** + * The seed controls the reproducibility of the job. Passing in the same seed and + * job parameters should produce the same results, but may differ in rare cases. If + * a seed is not specified, one will be generated for you. + */ + seed?: number | null; + /** + * A string of up to 64 characters that will be added to your fine-tuned model + * name. + * + * For example, a `suffix` of "custom-model-name" would produce a model name like + * `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. + */ + suffix?: string | null; + /** + * The ID of an uploaded file that contains validation data. + * + * If you provide this file, the data is used to generate validation metrics + * periodically during fine-tuning. These metrics can be viewed in the fine-tuning + * results file. The same data should not be present in both train and validation + * files. + * + * Your dataset must be formatted as a JSONL file. You must upload your file with + * the purpose `fine-tune`. + * + * See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + * for more details. + */ + validation_file?: string | null; +} +export declare namespace JobCreateParams { + /** + * @deprecated: The hyperparameters used for the fine-tuning job. This value is now + * deprecated in favor of `method`, and should be passed in under the `method` + * parameter. + */ + interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + interface Integration { + /** + * The type of integration to enable. Currently, only "wandb" (Weights and Biases) + * is supported. + */ + type: 'wandb'; + /** + * The settings for your integration with Weights and Biases. This payload + * specifies the project that metrics will be sent to. Optionally, you can set an + * explicit display name for your run, add tags to your run, and set a default + * entity (team, username, etc) to be associated with your run. + */ + wandb: Integration.Wandb; + } + namespace Integration { + /** + * The settings for your integration with Weights and Biases. This payload + * specifies the project that metrics will be sent to. Optionally, you can set an + * explicit display name for your run, add tags to your run, and set a default + * entity (team, username, etc) to be associated with your run. + */ + interface Wandb { + /** + * The name of the project that the new run will be created under. + */ + project: string; + /** + * The entity to use for the run. This allows you to set the team or username of + * the WandB user that you would like associated with the run. If not set, the + * default entity for the registered WandB API key is used. + */ + entity?: string | null; + /** + * A display name to set for the run. If not set, we will use the Job ID as the + * name. + */ + name?: string | null; + /** + * A list of tags to be attached to the newly created run. These tags are passed + * through directly to WandB. Some default tags are generated by OpenAI: + * "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + */ + tags?: Array; + } + } + /** + * The method used for fine-tuning. + */ + interface Method { + /** + * Configuration for the DPO fine-tuning method. + */ + dpo?: Method.Dpo; + /** + * Configuration for the supervised fine-tuning method. + */ + supervised?: Method.Supervised; + /** + * The type of method. Is either `supervised` or `dpo`. + */ + type?: 'supervised' | 'dpo'; + } + namespace Method { + /** + * Configuration for the DPO fine-tuning method. + */ + interface Dpo { + /** + * The hyperparameters used for the fine-tuning job. + */ + hyperparameters?: Dpo.Hyperparameters; + } + namespace Dpo { + /** + * The hyperparameters used for the fine-tuning job. + */ + interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + /** + * The beta value for the DPO method. A higher beta value will increase the weight + * of the penalty between the policy and reference model. + */ + beta?: 'auto' | number; + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + } + /** + * Configuration for the supervised fine-tuning method. + */ + interface Supervised { + /** + * The hyperparameters used for the fine-tuning job. + */ + hyperparameters?: Supervised.Hyperparameters; + } + namespace Supervised { + /** + * The hyperparameters used for the fine-tuning job. + */ + interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + } + } +} +export interface JobListParams extends CursorPageParams { +} +export interface JobListEventsParams extends CursorPageParams { +} +export declare namespace Jobs { + export { type FineTuningJob as FineTuningJob, type FineTuningJobEvent as FineTuningJobEvent, type FineTuningJobIntegration as FineTuningJobIntegration, type FineTuningJobWandbIntegration as FineTuningJobWandbIntegration, type FineTuningJobWandbIntegrationObject as FineTuningJobWandbIntegrationObject, FineTuningJobsPage as FineTuningJobsPage, FineTuningJobEventsPage as FineTuningJobEventsPage, type JobCreateParams as JobCreateParams, type JobListParams as JobListParams, type JobListEventsParams as JobListEventsParams, }; + export { Checkpoints as Checkpoints, type FineTuningJobCheckpoint as FineTuningJobCheckpoint, FineTuningJobCheckpointsPage as FineTuningJobCheckpointsPage, type CheckpointListParams as CheckpointListParams, }; +} +//# sourceMappingURL=jobs.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/jobs.d.ts.map b/node_modules/openai/resources/fine-tuning/jobs/jobs.d.ts.map new file mode 100644 index 0000000..0ebee71 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/jobs.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"jobs.d.ts","sourceRoot":"","sources":["../../../src/resources/fine-tuning/jobs/jobs.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD,OAAO,KAAK,IAAI,MAAM,eAAe,CAAC;AACtC,OAAO,KAAK,cAAc,MAAM,eAAe,CAAC;AAChD,OAAO,EACL,oBAAoB,EACpB,WAAW,EACX,uBAAuB,EACvB,4BAA4B,EAC7B,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAExE,qBAAa,IAAK,SAAQ,WAAW;IACnC,WAAW,EAAE,cAAc,CAAC,WAAW,CAAgD;IAEvF;;;;;;;;OAQG;IACH,MAAM,CAAC,IAAI,EAAE,eAAe,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC;IAI5F;;;;OAIG;IACH,QAAQ,CAAC,eAAe,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC;IAIhG;;OAEG;IACH,IAAI,CACF,KAAK,CAAC,EAAE,aAAa,EACrB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,kBAAkB,EAAE,aAAa,CAAC;IACtD,IAAI,CAAC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,kBAAkB,EAAE,aAAa,CAAC;IAWxF;;OAEG;IACH,MAAM,CAAC,eAAe,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC;IAI9F;;OAEG;IACH,UAAU,CACR,eAAe,EAAE,MAAM,EACvB,KAAK,CAAC,EAAE,mBAAmB,EAC3B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,uBAAuB,EAAE,kBAAkB,CAAC;IAChE,UAAU,CACR,eAAe,EAAE,MAAM,EACvB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,WAAW,CAAC,uBAAuB,EAAE,kBAAkB,CAAC;CAcjE;AAED,qBAAa,kBAAmB,SAAQ,UAAU,CAAC,aAAa,CAAC;CAAG;AAEpE,qBAAa,uBAAwB,SAAQ,UAAU,CAAC,kBAAkB,CAAC;CAAG;AAE9E;;;GAGG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;;OAGG;IACH,KAAK,EAAE,aAAa,CAAC,KAAK,GAAG,IAAI,CAAC;IAElC;;;OAGG;IACH,gBAAgB,EAAE,MAAM,GAAG,IAAI,CAAC;IAEhC;;;OAGG;IACH,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;;OAGG;IACH,eAAe,EAAE,aAAa,CAAC,eAAe,CAAC;IAE/C;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,MAAM,EAAE,iBAAiB,CAAC;IAE1B;;OAEG;IACH,eAAe,EAAE,MAAM,CAAC;IAExB;;;;OAIG;IACH,YAAY,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAE5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;;OAGG;IACH,MAAM,EAAE,kBAAkB,GAAG,QAAQ,GAAG,SAAS,GAAG,WAAW,GAAG,QAAQ,GAAG,WAAW,CAAC;IAEzF;;;OAGG;IACH,cAAc,EAAE,MAAM,GAAG,IAAI,CAAC;IAE9B;;;OAGG;IACH,aAAa,EAAE,MAAM,CAAC;IAEtB;;;;OAIG;IACH,eAAe,EAAE,MAAM,GAAG,IAAI,CAAC;IAE/B;;;OAGG;IACH,gBAAgB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEjC;;OAEG;IACH,YAAY,CAAC,EAAE,KAAK,CAAC,mCAAmC,CAAC,GAAG,IAAI,CAAC;IAEjE;;OAEG;IACH,MAAM,CAAC,EAAE,aAAa,CAAC,MAAM,CAAC;CAC/B;AAED,yBAAiB,aAAa,CAAC;IAC7B;;;OAGG;IACH,UAAiB,KAAK;QACpB;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;QAEb;;WAEG;QACH,OAAO,EAAE,MAAM,CAAC;QAEhB;;;WAGG;QACH,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;KACtB;IAED;;;OAGG;IACH,UAAiB,eAAe;QAC9B;;;WAGG;QACH,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;QAE7B;;;WAGG;QACH,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;QAE3C;;;WAGG;QACH,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;KAC5B;IAED;;OAEG;IACH,UAAiB,MAAM;QACrB;;WAEG;QACH,GAAG,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC;QAEjB;;WAEG;QACH,UAAU,CAAC,EAAE,MAAM,CAAC,UAAU,CAAC;QAE/B;;WAEG;QACH,IAAI,CAAC,EAAE,YAAY,GAAG,KAAK,CAAC;KAC7B;IAED,UAAiB,MAAM,CAAC;QACtB;;WAEG;QACH,UAAiB,GAAG;YAClB;;eAEG;YACH,eAAe,CAAC,EAAE,GAAG,CAAC,eAAe,CAAC;SACvC;QAED,UAAiB,GAAG,CAAC;YACnB;;eAEG;YACH,UAAiB,eAAe;gBAC9B;;;mBAGG;gBACH,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAE7B;;;mBAGG;gBACH,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAEvB;;;mBAGG;gBACH,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAE3C;;;mBAGG;gBACH,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;aAC5B;SACF;QAED;;WAEG;QACH,UAAiB,UAAU;YACzB;;eAEG;YACH,eAAe,CAAC,EAAE,UAAU,CAAC,eAAe,CAAC;SAC9C;QAED,UAAiB,UAAU,CAAC;YAC1B;;eAEG;YACH,UAAiB,eAAe;gBAC9B;;;mBAGG;gBACH,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAE7B;;;mBAGG;gBACH,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAE3C;;;mBAGG;gBACH,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;aAC5B;SACF;KACF;CACF;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;IAEjC;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,MAAM,EAAE,uBAAuB,CAAC;IAEhC;;OAEG;IACH,IAAI,CAAC,EAAE,OAAO,CAAC;IAEf;;OAEG;IACH,IAAI,CAAC,EAAE,SAAS,GAAG,SAAS,CAAC;CAC9B;AAED,MAAM,MAAM,wBAAwB,GAAG,mCAAmC,CAAC;AAE3E;;;;;GAKG;AACH,MAAM,WAAW,6BAA6B;IAC5C;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;;;OAIG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAErB;;;;OAIG;IACH,IAAI,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;CACtB;AAED,MAAM,WAAW,mCAAmC;IAClD;;OAEG;IACH,IAAI,EAAE,OAAO,CAAC;IAEd;;;;;OAKG;IACH,KAAK,EAAE,6BAA6B,CAAC;CACtC;AAED,MAAM,WAAW,eAAe;IAC9B;;;OAGG;IACH,KAAK,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,aAAa,GAAG,aAAa,GAAG,eAAe,GAAG,aAAa,CAAC;IAEvF;;;;;;;;;;;;;;;;;;OAkBG;IACH,aAAa,EAAE,MAAM,CAAC;IAEtB;;;OAGG;IACH,eAAe,CAAC,EAAE,eAAe,CAAC,eAAe,CAAC;IAElD;;OAEG;IACH,YAAY,CAAC,EAAE,KAAK,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;IAEzD;;OAEG;IACH,MAAM,CAAC,EAAE,eAAe,CAAC,MAAM,CAAC;IAEhC;;;;OAIG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAErB;;;;;;OAMG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEvB;;;;;;;;;;;;;OAaG;IACH,eAAe,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACjC;AAED,yBAAiB,eAAe,CAAC;IAC/B;;;;OAIG;IACH,UAAiB,eAAe;QAC9B;;;WAGG;QACH,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;QAE7B;;;WAGG;QACH,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;QAE3C;;;WAGG;QACH,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;KAC5B;IAED,UAAiB,WAAW;QAC1B;;;WAGG;QACH,IAAI,EAAE,OAAO,CAAC;QAEd;;;;;WAKG;QACH,KAAK,EAAE,WAAW,CAAC,KAAK,CAAC;KAC1B;IAED,UAAiB,WAAW,CAAC;QAC3B;;;;;WAKG;QACH,UAAiB,KAAK;YACpB;;eAEG;YACH,OAAO,EAAE,MAAM,CAAC;YAEhB;;;;eAIG;YACH,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;YAEvB;;;eAGG;YACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;YAErB;;;;eAIG;YACH,IAAI,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;SACtB;KACF;IAED;;OAEG;IACH,UAAiB,MAAM;QACrB;;WAEG;QACH,GAAG,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC;QAEjB;;WAEG;QACH,UAAU,CAAC,EAAE,MAAM,CAAC,UAAU,CAAC;QAE/B;;WAEG;QACH,IAAI,CAAC,EAAE,YAAY,GAAG,KAAK,CAAC;KAC7B;IAED,UAAiB,MAAM,CAAC;QACtB;;WAEG;QACH,UAAiB,GAAG;YAClB;;eAEG;YACH,eAAe,CAAC,EAAE,GAAG,CAAC,eAAe,CAAC;SACvC;QAED,UAAiB,GAAG,CAAC;YACnB;;eAEG;YACH,UAAiB,eAAe;gBAC9B;;;mBAGG;gBACH,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAE7B;;;mBAGG;gBACH,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAEvB;;;mBAGG;gBACH,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAE3C;;;mBAGG;gBACH,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;aAC5B;SACF;QAED;;WAEG;QACH,UAAiB,UAAU;YACzB;;eAEG;YACH,eAAe,CAAC,EAAE,UAAU,CAAC,eAAe,CAAC;SAC9C;QAED,UAAiB,UAAU,CAAC;YAC1B;;eAEG;YACH,UAAiB,eAAe;gBAC9B;;;mBAGG;gBACH,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAE7B;;;mBAGG;gBACH,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;gBAE3C;;;mBAGG;gBACH,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;aAC5B;SACF;KACF;CACF;AAED,MAAM,WAAW,aAAc,SAAQ,gBAAgB;CAAG;AAE1D,MAAM,WAAW,mBAAoB,SAAQ,gBAAgB;CAAG;AAOhE,MAAM,CAAC,OAAO,WAAW,IAAI,CAAC;IAC5B,OAAO,EACL,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,6BAA6B,IAAI,6BAA6B,EACnE,KAAK,mCAAmC,IAAI,mCAAmC,EAC/E,kBAAkB,IAAI,kBAAkB,EACxC,uBAAuB,IAAI,uBAAuB,EAClD,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,aAAa,IAAI,aAAa,EACnC,KAAK,mBAAmB,IAAI,mBAAmB,GAChD,CAAC;IAEF,OAAO,EACL,WAAW,IAAI,WAAW,EAC1B,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,4BAA4B,IAAI,4BAA4B,EAC5D,KAAK,oBAAoB,IAAI,oBAAoB,GAClD,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/jobs.js b/node_modules/openai/resources/fine-tuning/jobs/jobs.js new file mode 100644 index 0000000..83a2279 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/jobs.js @@ -0,0 +1,91 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FineTuningJobEventsPage = exports.FineTuningJobsPage = exports.Jobs = void 0; +const resource_1 = require("../../../resource.js"); +const core_1 = require("../../../core.js"); +const CheckpointsAPI = __importStar(require("./checkpoints.js")); +const checkpoints_1 = require("./checkpoints.js"); +const pagination_1 = require("../../../pagination.js"); +class Jobs extends resource_1.APIResource { + constructor() { + super(...arguments); + this.checkpoints = new CheckpointsAPI.Checkpoints(this._client); + } + /** + * Creates a fine-tuning job which begins the process of creating a new model from + * a given dataset. + * + * Response includes details of the enqueued job including job status and the name + * of the fine-tuned models once complete. + * + * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + */ + create(body, options) { + return this._client.post('/fine_tuning/jobs', { body, ...options }); + } + /** + * Get info about a fine-tuning job. + * + * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + */ + retrieve(fineTuningJobId, options) { + return this._client.get(`/fine_tuning/jobs/${fineTuningJobId}`, options); + } + list(query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/fine_tuning/jobs', FineTuningJobsPage, { query, ...options }); + } + /** + * Immediately cancel a fine-tune job. + */ + cancel(fineTuningJobId, options) { + return this._client.post(`/fine_tuning/jobs/${fineTuningJobId}/cancel`, options); + } + listEvents(fineTuningJobId, query = {}, options) { + if ((0, core_1.isRequestOptions)(query)) { + return this.listEvents(fineTuningJobId, {}, query); + } + return this._client.getAPIList(`/fine_tuning/jobs/${fineTuningJobId}/events`, FineTuningJobEventsPage, { + query, + ...options, + }); + } +} +exports.Jobs = Jobs; +class FineTuningJobsPage extends pagination_1.CursorPage { +} +exports.FineTuningJobsPage = FineTuningJobsPage; +class FineTuningJobEventsPage extends pagination_1.CursorPage { +} +exports.FineTuningJobEventsPage = FineTuningJobEventsPage; +Jobs.FineTuningJobsPage = FineTuningJobsPage; +Jobs.FineTuningJobEventsPage = FineTuningJobEventsPage; +Jobs.Checkpoints = checkpoints_1.Checkpoints; +Jobs.FineTuningJobCheckpointsPage = checkpoints_1.FineTuningJobCheckpointsPage; +//# sourceMappingURL=jobs.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/jobs.js.map b/node_modules/openai/resources/fine-tuning/jobs/jobs.js.map new file mode 100644 index 0000000..f3dff12 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/jobs.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jobs.js","sourceRoot":"","sources":["../../../src/resources/fine-tuning/jobs/jobs.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,mDAAgD;AAChD,2CAAiD;AAEjD,iEAAgD;AAChD,kDAKuB;AACvB,uDAAwE;AAExE,MAAa,IAAK,SAAQ,sBAAW;IAArC;;QACE,gBAAW,GAA+B,IAAI,cAAc,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IA0EzF,CAAC;IAxEC;;;;;;;;OAQG;IACH,MAAM,CAAC,IAAqB,EAAE,OAA6B;QACzD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,eAAuB,EAAE,OAA6B;QAC7D,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,qBAAqB,eAAe,EAAE,EAAE,OAAO,CAAC,CAAC;IAC3E,CAAC;IAUD,IAAI,CACF,QAA6C,EAAE,EAC/C,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,mBAAmB,EAAE,kBAAkB,EAAE,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACjG,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,eAAuB,EAAE,OAA6B;QAC3D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,qBAAqB,eAAe,SAAS,EAAE,OAAO,CAAC,CAAC;IACnF,CAAC;IAcD,UAAU,CACR,eAAuB,EACvB,QAAmD,EAAE,EACrD,OAA6B;QAE7B,IAAI,IAAA,uBAAgB,EAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,UAAU,CAAC,eAAe,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SACpD;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,qBAAqB,eAAe,SAAS,EAAE,uBAAuB,EAAE;YACrG,KAAK;YACL,GAAG,OAAO;SACX,CAAC,CAAC;IACL,CAAC;CACF;AA3ED,oBA2EC;AAED,MAAa,kBAAmB,SAAQ,uBAAyB;CAAG;AAApE,gDAAoE;AAEpE,MAAa,uBAAwB,SAAQ,uBAA8B;CAAG;AAA9E,0DAA8E;AA2lB9E,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,IAAI,CAAC,uBAAuB,GAAG,uBAAuB,CAAC;AACvD,IAAI,CAAC,WAAW,GAAG,yBAAW,CAAC;AAC/B,IAAI,CAAC,4BAA4B,GAAG,0CAA4B,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/jobs.mjs b/node_modules/openai/resources/fine-tuning/jobs/jobs.mjs new file mode 100644 index 0000000..2e7bdce --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/jobs.mjs @@ -0,0 +1,62 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../../resource.mjs"; +import { isRequestOptions } from "../../../core.mjs"; +import * as CheckpointsAPI from "./checkpoints.mjs"; +import { Checkpoints, FineTuningJobCheckpointsPage, } from "./checkpoints.mjs"; +import { CursorPage } from "../../../pagination.mjs"; +export class Jobs extends APIResource { + constructor() { + super(...arguments); + this.checkpoints = new CheckpointsAPI.Checkpoints(this._client); + } + /** + * Creates a fine-tuning job which begins the process of creating a new model from + * a given dataset. + * + * Response includes details of the enqueued job including job status and the name + * of the fine-tuned models once complete. + * + * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + */ + create(body, options) { + return this._client.post('/fine_tuning/jobs', { body, ...options }); + } + /** + * Get info about a fine-tuning job. + * + * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + */ + retrieve(fineTuningJobId, options) { + return this._client.get(`/fine_tuning/jobs/${fineTuningJobId}`, options); + } + list(query = {}, options) { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/fine_tuning/jobs', FineTuningJobsPage, { query, ...options }); + } + /** + * Immediately cancel a fine-tune job. + */ + cancel(fineTuningJobId, options) { + return this._client.post(`/fine_tuning/jobs/${fineTuningJobId}/cancel`, options); + } + listEvents(fineTuningJobId, query = {}, options) { + if (isRequestOptions(query)) { + return this.listEvents(fineTuningJobId, {}, query); + } + return this._client.getAPIList(`/fine_tuning/jobs/${fineTuningJobId}/events`, FineTuningJobEventsPage, { + query, + ...options, + }); + } +} +export class FineTuningJobsPage extends CursorPage { +} +export class FineTuningJobEventsPage extends CursorPage { +} +Jobs.FineTuningJobsPage = FineTuningJobsPage; +Jobs.FineTuningJobEventsPage = FineTuningJobEventsPage; +Jobs.Checkpoints = Checkpoints; +Jobs.FineTuningJobCheckpointsPage = FineTuningJobCheckpointsPage; +//# sourceMappingURL=jobs.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/fine-tuning/jobs/jobs.mjs.map b/node_modules/openai/resources/fine-tuning/jobs/jobs.mjs.map new file mode 100644 index 0000000..abe28a0 --- /dev/null +++ b/node_modules/openai/resources/fine-tuning/jobs/jobs.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"jobs.mjs","sourceRoot":"","sources":["../../../src/resources/fine-tuning/jobs/jobs.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,EAAE,gBAAgB,EAAE;OAEpB,KAAK,cAAc;OACnB,EAEL,WAAW,EAEX,4BAA4B,GAC7B;OACM,EAAE,UAAU,EAAyB;AAE5C,MAAM,OAAO,IAAK,SAAQ,WAAW;IAArC;;QACE,gBAAW,GAA+B,IAAI,cAAc,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IA0EzF,CAAC;IAxEC;;;;;;;;OAQG;IACH,MAAM,CAAC,IAAqB,EAAE,OAA6B;QACzD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,eAAuB,EAAE,OAA6B;QAC7D,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,qBAAqB,eAAe,EAAE,EAAE,OAAO,CAAC,CAAC;IAC3E,CAAC;IAUD,IAAI,CACF,QAA6C,EAAE,EAC/C,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,mBAAmB,EAAE,kBAAkB,EAAE,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACjG,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,eAAuB,EAAE,OAA6B;QAC3D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,qBAAqB,eAAe,SAAS,EAAE,OAAO,CAAC,CAAC;IACnF,CAAC;IAcD,UAAU,CACR,eAAuB,EACvB,QAAmD,EAAE,EACrD,OAA6B;QAE7B,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;YAC3B,OAAO,IAAI,CAAC,UAAU,CAAC,eAAe,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SACpD;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,qBAAqB,eAAe,SAAS,EAAE,uBAAuB,EAAE;YACrG,KAAK;YACL,GAAG,OAAO;SACX,CAAC,CAAC;IACL,CAAC;CACF;AAED,MAAM,OAAO,kBAAmB,SAAQ,UAAyB;CAAG;AAEpE,MAAM,OAAO,uBAAwB,SAAQ,UAA8B;CAAG;AA2lB9E,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,IAAI,CAAC,uBAAuB,GAAG,uBAAuB,CAAC;AACvD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,IAAI,CAAC,4BAA4B,GAAG,4BAA4B,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/images.d.ts b/node_modules/openai/resources/images.d.ts new file mode 100644 index 0000000..3258f0b --- /dev/null +++ b/node_modules/openai/resources/images.d.ts @@ -0,0 +1,169 @@ +import { APIResource } from "../resource.js"; +import * as Core from "../core.js"; +export declare class Images extends APIResource { + /** + * Creates a variation of a given image. + */ + createVariation(body: ImageCreateVariationParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Creates an edited or extended image given an original image and a prompt. + */ + edit(body: ImageEditParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Creates an image given a prompt. + */ + generate(body: ImageGenerateParams, options?: Core.RequestOptions): Core.APIPromise; +} +/** + * Represents the url or the content of an image generated by the OpenAI API. + */ +export interface Image { + /** + * The base64-encoded JSON of the generated image, if `response_format` is + * `b64_json`. + */ + b64_json?: string; + /** + * The prompt that was used to generate the image, if there was any revision to the + * prompt. + */ + revised_prompt?: string; + /** + * The URL of the generated image, if `response_format` is `url` (default). + */ + url?: string; +} +export type ImageModel = 'dall-e-2' | 'dall-e-3'; +export interface ImagesResponse { + created: number; + data: Array; +} +export interface ImageCreateVariationParams { + /** + * The image to use as the basis for the variation(s). Must be a valid PNG file, + * less than 4MB, and square. + */ + image: Core.Uploadable; + /** + * The model to use for image generation. Only `dall-e-2` is supported at this + * time. + */ + model?: (string & {}) | ImageModel | null; + /** + * The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + * `n=1` is supported. + */ + n?: number | null; + /** + * The format in which the generated images are returned. Must be one of `url` or + * `b64_json`. URLs are only valid for 60 minutes after the image has been + * generated. + */ + response_format?: 'url' | 'b64_json' | null; + /** + * The size of the generated images. Must be one of `256x256`, `512x512`, or + * `1024x1024`. + */ + size?: '256x256' | '512x512' | '1024x1024' | null; + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} +export interface ImageEditParams { + /** + * The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask + * is not provided, image must have transparency, which will be used as the mask. + */ + image: Core.Uploadable; + /** + * A text description of the desired image(s). The maximum length is 1000 + * characters. + */ + prompt: string; + /** + * An additional image whose fully transparent areas (e.g. where alpha is zero) + * indicate where `image` should be edited. Must be a valid PNG file, less than + * 4MB, and have the same dimensions as `image`. + */ + mask?: Core.Uploadable; + /** + * The model to use for image generation. Only `dall-e-2` is supported at this + * time. + */ + model?: (string & {}) | ImageModel | null; + /** + * The number of images to generate. Must be between 1 and 10. + */ + n?: number | null; + /** + * The format in which the generated images are returned. Must be one of `url` or + * `b64_json`. URLs are only valid for 60 minutes after the image has been + * generated. + */ + response_format?: 'url' | 'b64_json' | null; + /** + * The size of the generated images. Must be one of `256x256`, `512x512`, or + * `1024x1024`. + */ + size?: '256x256' | '512x512' | '1024x1024' | null; + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} +export interface ImageGenerateParams { + /** + * A text description of the desired image(s). The maximum length is 1000 + * characters for `dall-e-2` and 4000 characters for `dall-e-3`. + */ + prompt: string; + /** + * The model to use for image generation. + */ + model?: (string & {}) | ImageModel | null; + /** + * The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + * `n=1` is supported. + */ + n?: number | null; + /** + * The quality of the image that will be generated. `hd` creates images with finer + * details and greater consistency across the image. This param is only supported + * for `dall-e-3`. + */ + quality?: 'standard' | 'hd'; + /** + * The format in which the generated images are returned. Must be one of `url` or + * `b64_json`. URLs are only valid for 60 minutes after the image has been + * generated. + */ + response_format?: 'url' | 'b64_json' | null; + /** + * The size of the generated images. Must be one of `256x256`, `512x512`, or + * `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + * `1024x1792` for `dall-e-3` models. + */ + size?: '256x256' | '512x512' | '1024x1024' | '1792x1024' | '1024x1792' | null; + /** + * The style of the generated images. Must be one of `vivid` or `natural`. Vivid + * causes the model to lean towards generating hyper-real and dramatic images. + * Natural causes the model to produce more natural, less hyper-real looking + * images. This param is only supported for `dall-e-3`. + */ + style?: 'vivid' | 'natural' | null; + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} +export declare namespace Images { + export { type Image as Image, type ImageModel as ImageModel, type ImagesResponse as ImagesResponse, type ImageCreateVariationParams as ImageCreateVariationParams, type ImageEditParams as ImageEditParams, type ImageGenerateParams as ImageGenerateParams, }; +} +//# sourceMappingURL=images.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/images.d.ts.map b/node_modules/openai/resources/images.d.ts.map new file mode 100644 index 0000000..1193cf9 --- /dev/null +++ b/node_modules/openai/resources/images.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"images.d.ts","sourceRoot":"","sources":["../src/resources/images.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC1C,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAEhC,qBAAa,MAAO,SAAQ,WAAW;IACrC;;OAEG;IACH,eAAe,CACb,IAAI,EAAE,0BAA0B,EAChC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC;IAIlC;;OAEG;IACH,IAAI,CAAC,IAAI,EAAE,eAAe,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC;IAI3F;;OAEG;IACH,QAAQ,CAAC,IAAI,EAAE,mBAAmB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC;CAGpG;AAED;;GAEG;AACH,MAAM,WAAW,KAAK;IACpB;;;OAGG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAElB;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;OAEG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED,MAAM,MAAM,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAEjD,MAAM,WAAW,cAAc;IAC7B,OAAO,EAAE,MAAM,CAAC;IAEhB,IAAI,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;CACpB;AAED,MAAM,WAAW,0BAA0B;IACzC;;;OAGG;IACH,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC;IAEvB;;;OAGG;IACH,KAAK,CAAC,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,UAAU,GAAG,IAAI,CAAC;IAE1C;;;OAGG;IACH,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElB;;;;OAIG;IACH,eAAe,CAAC,EAAE,KAAK,GAAG,UAAU,GAAG,IAAI,CAAC;IAE5C;;;OAGG;IACH,IAAI,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,WAAW,GAAG,IAAI,CAAC;IAElD;;;;OAIG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,eAAe;IAC9B;;;OAGG;IACH,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC;IAEvB;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;;;OAIG;IACH,IAAI,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC;IAEvB;;;OAGG;IACH,KAAK,CAAC,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,UAAU,GAAG,IAAI,CAAC;IAE1C;;OAEG;IACH,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElB;;;;OAIG;IACH,eAAe,CAAC,EAAE,KAAK,GAAG,UAAU,GAAG,IAAI,CAAC;IAE5C;;;OAGG;IACH,IAAI,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,WAAW,GAAG,IAAI,CAAC;IAElD;;;;OAIG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,mBAAmB;IAClC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,UAAU,GAAG,IAAI,CAAC;IAE1C;;;OAGG;IACH,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAElB;;;;OAIG;IACH,OAAO,CAAC,EAAE,UAAU,GAAG,IAAI,CAAC;IAE5B;;;;OAIG;IACH,eAAe,CAAC,EAAE,KAAK,GAAG,UAAU,GAAG,IAAI,CAAC;IAE5C;;;;OAIG;IACH,IAAI,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,WAAW,GAAG,WAAW,GAAG,WAAW,GAAG,IAAI,CAAC;IAE9E;;;;;OAKG;IACH,KAAK,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,IAAI,CAAC;IAEnC;;;;OAIG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,CAAC,OAAO,WAAW,MAAM,CAAC;IAC9B,OAAO,EACL,KAAK,KAAK,IAAI,KAAK,EACnB,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,cAAc,IAAI,cAAc,EACrC,KAAK,0BAA0B,IAAI,0BAA0B,EAC7D,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,mBAAmB,IAAI,mBAAmB,GAChD,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/images.js b/node_modules/openai/resources/images.js new file mode 100644 index 0000000..5be326a --- /dev/null +++ b/node_modules/openai/resources/images.js @@ -0,0 +1,51 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Images = void 0; +const resource_1 = require("../resource.js"); +const Core = __importStar(require("../core.js")); +class Images extends resource_1.APIResource { + /** + * Creates a variation of a given image. + */ + createVariation(body, options) { + return this._client.post('/images/variations', Core.multipartFormRequestOptions({ body, ...options })); + } + /** + * Creates an edited or extended image given an original image and a prompt. + */ + edit(body, options) { + return this._client.post('/images/edits', Core.multipartFormRequestOptions({ body, ...options })); + } + /** + * Creates an image given a prompt. + */ + generate(body, options) { + return this._client.post('/images/generations', { body, ...options }); + } +} +exports.Images = Images; +//# sourceMappingURL=images.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/images.js.map b/node_modules/openai/resources/images.js.map new file mode 100644 index 0000000..aab6dd0 --- /dev/null +++ b/node_modules/openai/resources/images.js.map @@ -0,0 +1 @@ +{"version":3,"file":"images.js","sourceRoot":"","sources":["../src/resources/images.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,6CAA0C;AAC1C,iDAAgC;AAEhC,MAAa,MAAO,SAAQ,sBAAW;IACrC;;OAEG;IACH,eAAe,CACb,IAAgC,EAChC,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,oBAAoB,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IACzG,CAAC;IAED;;OAEG;IACH,IAAI,CAAC,IAAqB,EAAE,OAA6B;QACvD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IACpG,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,IAAyB,EAAE,OAA6B;QAC/D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,qBAAqB,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACxE,CAAC;CACF;AAxBD,wBAwBC"} \ No newline at end of file diff --git a/node_modules/openai/resources/images.mjs b/node_modules/openai/resources/images.mjs new file mode 100644 index 0000000..3a42f9e --- /dev/null +++ b/node_modules/openai/resources/images.mjs @@ -0,0 +1,24 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../resource.mjs"; +import * as Core from "../core.mjs"; +export class Images extends APIResource { + /** + * Creates a variation of a given image. + */ + createVariation(body, options) { + return this._client.post('/images/variations', Core.multipartFormRequestOptions({ body, ...options })); + } + /** + * Creates an edited or extended image given an original image and a prompt. + */ + edit(body, options) { + return this._client.post('/images/edits', Core.multipartFormRequestOptions({ body, ...options })); + } + /** + * Creates an image given a prompt. + */ + generate(body, options) { + return this._client.post('/images/generations', { body, ...options }); + } +} +//# sourceMappingURL=images.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/images.mjs.map b/node_modules/openai/resources/images.mjs.map new file mode 100644 index 0000000..dc7aaad --- /dev/null +++ b/node_modules/openai/resources/images.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"images.mjs","sourceRoot":"","sources":["../src/resources/images.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,KAAK,IAAI;AAEhB,MAAM,OAAO,MAAO,SAAQ,WAAW;IACrC;;OAEG;IACH,eAAe,CACb,IAAgC,EAChC,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,oBAAoB,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IACzG,CAAC;IAED;;OAEG;IACH,IAAI,CAAC,IAAqB,EAAE,OAA6B;QACvD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,CAAC;IACpG,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,IAAyB,EAAE,OAA6B;QAC/D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,qBAAqB,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACxE,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/index.d.ts b/node_modules/openai/resources/index.d.ts new file mode 100644 index 0000000..e7a9683 --- /dev/null +++ b/node_modules/openai/resources/index.d.ts @@ -0,0 +1,14 @@ +export * from "./chat/index.js"; +export * from "./shared.js"; +export { Audio, type AudioModel, type AudioResponseFormat } from "./audio/audio.js"; +export { BatchesPage, Batches, type Batch, type BatchError, type BatchRequestCounts, type BatchCreateParams, type BatchListParams, } from "./batches.js"; +export { Beta } from "./beta/beta.js"; +export { Completions, type Completion, type CompletionChoice, type CompletionUsage, type CompletionCreateParams, type CompletionCreateParamsNonStreaming, type CompletionCreateParamsStreaming, } from "./completions.js"; +export { Embeddings, type CreateEmbeddingResponse, type Embedding, type EmbeddingModel, type EmbeddingCreateParams, } from "./embeddings.js"; +export { FileObjectsPage, Files, type FileContent, type FileDeleted, type FileObject, type FilePurpose, type FileCreateParams, type FileListParams, } from "./files.js"; +export { FineTuning } from "./fine-tuning/fine-tuning.js"; +export { Images, type Image, type ImageModel, type ImagesResponse, type ImageCreateVariationParams, type ImageEditParams, type ImageGenerateParams, } from "./images.js"; +export { ModelsPage, Models, type Model, type ModelDeleted } from "./models.js"; +export { Moderations, type Moderation, type ModerationImageURLInput, type ModerationModel, type ModerationMultiModalInput, type ModerationTextInput, type ModerationCreateResponse, type ModerationCreateParams, } from "./moderations.js"; +export { Uploads, type Upload, type UploadCreateParams, type UploadCompleteParams } from "./uploads/uploads.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/index.d.ts.map b/node_modules/openai/resources/index.d.ts.map new file mode 100644 index 0000000..9bc11d1 --- /dev/null +++ b/node_modules/openai/resources/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/resources/index.ts"],"names":[],"mappings":"AAEA,cAAc,cAAc,CAAC;AAC7B,cAAc,UAAU,CAAC;AACzB,OAAO,EAAE,KAAK,EAAE,KAAK,UAAU,EAAE,KAAK,mBAAmB,EAAE,MAAM,eAAe,CAAC;AACjF,OAAO,EACL,WAAW,EACX,OAAO,EACP,KAAK,KAAK,EACV,KAAK,UAAU,EACf,KAAK,kBAAkB,EACvB,KAAK,iBAAiB,EACtB,KAAK,eAAe,GACrB,MAAM,WAAW,CAAC;AACnB,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAC;AACnC,OAAO,EACL,WAAW,EACX,KAAK,UAAU,EACf,KAAK,gBAAgB,EACrB,KAAK,eAAe,EACpB,KAAK,sBAAsB,EAC3B,KAAK,kCAAkC,EACvC,KAAK,+BAA+B,GACrC,MAAM,eAAe,CAAC;AACvB,OAAO,EACL,UAAU,EACV,KAAK,uBAAuB,EAC5B,KAAK,SAAS,EACd,KAAK,cAAc,EACnB,KAAK,qBAAqB,GAC3B,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,eAAe,EACf,KAAK,EACL,KAAK,WAAW,EAChB,KAAK,WAAW,EAChB,KAAK,UAAU,EACf,KAAK,WAAW,EAChB,KAAK,gBAAgB,EACrB,KAAK,cAAc,GACpB,MAAM,SAAS,CAAC;AACjB,OAAO,EAAE,UAAU,EAAE,MAAM,2BAA2B,CAAC;AACvD,OAAO,EACL,MAAM,EACN,KAAK,KAAK,EACV,KAAK,UAAU,EACf,KAAK,cAAc,EACnB,KAAK,0BAA0B,EAC/B,KAAK,eAAe,EACpB,KAAK,mBAAmB,GACzB,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,KAAK,KAAK,EAAE,KAAK,YAAY,EAAE,MAAM,UAAU,CAAC;AAC7E,OAAO,EACL,WAAW,EACX,KAAK,UAAU,EACf,KAAK,uBAAuB,EAC5B,KAAK,eAAe,EACpB,KAAK,yBAAyB,EAC9B,KAAK,mBAAmB,EACxB,KAAK,wBAAwB,EAC7B,KAAK,sBAAsB,GAC5B,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,OAAO,EAAE,KAAK,MAAM,EAAE,KAAK,kBAAkB,EAAE,KAAK,oBAAoB,EAAE,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/index.js b/node_modules/openai/resources/index.js new file mode 100644 index 0000000..91cf2ee --- /dev/null +++ b/node_modules/openai/resources/index.js @@ -0,0 +1,46 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Uploads = exports.Moderations = exports.Models = exports.ModelsPage = exports.Images = exports.FineTuning = exports.Files = exports.FileObjectsPage = exports.Embeddings = exports.Completions = exports.Beta = exports.Batches = exports.BatchesPage = exports.Audio = void 0; +__exportStar(require("./chat/index.js"), exports); +__exportStar(require("./shared.js"), exports); +var audio_1 = require("./audio/audio.js"); +Object.defineProperty(exports, "Audio", { enumerable: true, get: function () { return audio_1.Audio; } }); +var batches_1 = require("./batches.js"); +Object.defineProperty(exports, "BatchesPage", { enumerable: true, get: function () { return batches_1.BatchesPage; } }); +Object.defineProperty(exports, "Batches", { enumerable: true, get: function () { return batches_1.Batches; } }); +var beta_1 = require("./beta/beta.js"); +Object.defineProperty(exports, "Beta", { enumerable: true, get: function () { return beta_1.Beta; } }); +var completions_1 = require("./completions.js"); +Object.defineProperty(exports, "Completions", { enumerable: true, get: function () { return completions_1.Completions; } }); +var embeddings_1 = require("./embeddings.js"); +Object.defineProperty(exports, "Embeddings", { enumerable: true, get: function () { return embeddings_1.Embeddings; } }); +var files_1 = require("./files.js"); +Object.defineProperty(exports, "FileObjectsPage", { enumerable: true, get: function () { return files_1.FileObjectsPage; } }); +Object.defineProperty(exports, "Files", { enumerable: true, get: function () { return files_1.Files; } }); +var fine_tuning_1 = require("./fine-tuning/fine-tuning.js"); +Object.defineProperty(exports, "FineTuning", { enumerable: true, get: function () { return fine_tuning_1.FineTuning; } }); +var images_1 = require("./images.js"); +Object.defineProperty(exports, "Images", { enumerable: true, get: function () { return images_1.Images; } }); +var models_1 = require("./models.js"); +Object.defineProperty(exports, "ModelsPage", { enumerable: true, get: function () { return models_1.ModelsPage; } }); +Object.defineProperty(exports, "Models", { enumerable: true, get: function () { return models_1.Models; } }); +var moderations_1 = require("./moderations.js"); +Object.defineProperty(exports, "Moderations", { enumerable: true, get: function () { return moderations_1.Moderations; } }); +var uploads_1 = require("./uploads/uploads.js"); +Object.defineProperty(exports, "Uploads", { enumerable: true, get: function () { return uploads_1.Uploads; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/index.js.map b/node_modules/openai/resources/index.js.map new file mode 100644 index 0000000..c86c2bb --- /dev/null +++ b/node_modules/openai/resources/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/resources/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;AAEtF,kDAA6B;AAC7B,8CAAyB;AACzB,0CAAiF;AAAxE,8FAAA,KAAK,OAAA;AACd,wCAQmB;AAPjB,sGAAA,WAAW,OAAA;AACX,kGAAA,OAAO,OAAA;AAOT,uCAAmC;AAA1B,4FAAA,IAAI,OAAA;AACb,gDAQuB;AAPrB,0GAAA,WAAW,OAAA;AAQb,8CAMsB;AALpB,wGAAA,UAAU,OAAA;AAMZ,oCASiB;AARf,wGAAA,eAAe,OAAA;AACf,8FAAA,KAAK,OAAA;AAQP,4DAAuD;AAA9C,yGAAA,UAAU,OAAA;AACnB,sCAQkB;AAPhB,gGAAA,MAAM,OAAA;AAQR,sCAA6E;AAApE,oGAAA,UAAU,OAAA;AAAE,gGAAA,MAAM,OAAA;AAC3B,gDASuB;AARrB,0GAAA,WAAW,OAAA;AASb,gDAA6G;AAApG,kGAAA,OAAO,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/index.mjs b/node_modules/openai/resources/index.mjs new file mode 100644 index 0000000..e713664 --- /dev/null +++ b/node_modules/openai/resources/index.mjs @@ -0,0 +1,15 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export * from "./chat/index.mjs"; +export * from "./shared.mjs"; +export { Audio } from "./audio/audio.mjs"; +export { BatchesPage, Batches, } from "./batches.mjs"; +export { Beta } from "./beta/beta.mjs"; +export { Completions, } from "./completions.mjs"; +export { Embeddings, } from "./embeddings.mjs"; +export { FileObjectsPage, Files, } from "./files.mjs"; +export { FineTuning } from "./fine-tuning/fine-tuning.mjs"; +export { Images, } from "./images.mjs"; +export { ModelsPage, Models } from "./models.mjs"; +export { Moderations, } from "./moderations.mjs"; +export { Uploads } from "./uploads/uploads.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/index.mjs.map b/node_modules/openai/resources/index.mjs.map new file mode 100644 index 0000000..510bfd5 --- /dev/null +++ b/node_modules/openai/resources/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../src/resources/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;;;OAI/E,EAAE,KAAK,EAA6C;OACpD,EACL,WAAW,EACX,OAAO,GAMR;OACM,EAAE,IAAI,EAAE;OACR,EACL,WAAW,GAOZ;OACM,EACL,UAAU,GAKX;OACM,EACL,eAAe,EACf,KAAK,GAON;OACM,EAAE,UAAU,EAAE;OACd,EACL,MAAM,GAOP;OACM,EAAE,UAAU,EAAE,MAAM,EAAiC;OACrD,EACL,WAAW,GAQZ;OACM,EAAE,OAAO,EAAmE"} \ No newline at end of file diff --git a/node_modules/openai/resources/models.d.ts b/node_modules/openai/resources/models.d.ts new file mode 100644 index 0000000..4ac2bd0 --- /dev/null +++ b/node_modules/openai/resources/models.d.ts @@ -0,0 +1,55 @@ +import { APIResource } from "../resource.js"; +import * as Core from "../core.js"; +import { Page } from "../pagination.js"; +export declare class Models extends APIResource { + /** + * Retrieves a model instance, providing basic information about the model such as + * the owner and permissioning. + */ + retrieve(model: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Lists the currently available models, and provides basic information about each + * one such as the owner and availability. + */ + list(options?: Core.RequestOptions): Core.PagePromise; + /** + * Delete a fine-tuned model. You must have the Owner role in your organization to + * delete a model. + */ + del(model: string, options?: Core.RequestOptions): Core.APIPromise; +} +/** + * Note: no pagination actually occurs yet, this is for forwards-compatibility. + */ +export declare class ModelsPage extends Page { +} +/** + * Describes an OpenAI model offering that can be used with the API. + */ +export interface Model { + /** + * The model identifier, which can be referenced in the API endpoints. + */ + id: string; + /** + * The Unix timestamp (in seconds) when the model was created. + */ + created: number; + /** + * The object type, which is always "model". + */ + object: 'model'; + /** + * The organization that owns the model. + */ + owned_by: string; +} +export interface ModelDeleted { + id: string; + deleted: boolean; + object: string; +} +export declare namespace Models { + export { type Model as Model, type ModelDeleted as ModelDeleted, ModelsPage as ModelsPage }; +} +//# sourceMappingURL=models.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/models.d.ts.map b/node_modules/openai/resources/models.d.ts.map new file mode 100644 index 0000000..cea19e8 --- /dev/null +++ b/node_modules/openai/resources/models.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"models.d.ts","sourceRoot":"","sources":["../src/resources/models.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC1C,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,IAAI,EAAE,MAAM,eAAe,CAAC;AAErC,qBAAa,MAAO,SAAQ,WAAW;IACrC;;;OAGG;IACH,QAAQ,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC;IAI9E;;;OAGG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,UAAU,EAAE,KAAK,CAAC;IAIxE;;;OAGG;IACH,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC;CAGjF;AAED;;GAEG;AACH,qBAAa,UAAW,SAAQ,IAAI,CAAC,KAAK,CAAC;CAAG;AAE9C;;GAEG;AACH,MAAM,WAAW,KAAK;IACpB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,MAAM,EAAE,OAAO,CAAC;IAEhB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE,MAAM,CAAC;IAEX,OAAO,EAAE,OAAO,CAAC;IAEjB,MAAM,EAAE,MAAM,CAAC;CAChB;AAID,MAAM,CAAC,OAAO,WAAW,MAAM,CAAC;IAC9B,OAAO,EAAE,KAAK,KAAK,IAAI,KAAK,EAAE,KAAK,YAAY,IAAI,YAAY,EAAE,UAAU,IAAI,UAAU,EAAE,CAAC;CAC7F"} \ No newline at end of file diff --git a/node_modules/openai/resources/models.js b/node_modules/openai/resources/models.js new file mode 100644 index 0000000..42851ca --- /dev/null +++ b/node_modules/openai/resources/models.js @@ -0,0 +1,38 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ModelsPage = exports.Models = void 0; +const resource_1 = require("../resource.js"); +const pagination_1 = require("../pagination.js"); +class Models extends resource_1.APIResource { + /** + * Retrieves a model instance, providing basic information about the model such as + * the owner and permissioning. + */ + retrieve(model, options) { + return this._client.get(`/models/${model}`, options); + } + /** + * Lists the currently available models, and provides basic information about each + * one such as the owner and availability. + */ + list(options) { + return this._client.getAPIList('/models', ModelsPage, options); + } + /** + * Delete a fine-tuned model. You must have the Owner role in your organization to + * delete a model. + */ + del(model, options) { + return this._client.delete(`/models/${model}`, options); + } +} +exports.Models = Models; +/** + * Note: no pagination actually occurs yet, this is for forwards-compatibility. + */ +class ModelsPage extends pagination_1.Page { +} +exports.ModelsPage = ModelsPage; +Models.ModelsPage = ModelsPage; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/models.js.map b/node_modules/openai/resources/models.js.map new file mode 100644 index 0000000..617389d --- /dev/null +++ b/node_modules/openai/resources/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../src/resources/models.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,6CAA0C;AAE1C,iDAAqC;AAErC,MAAa,MAAO,SAAQ,sBAAW;IACrC;;;OAGG;IACH,QAAQ,CAAC,KAAa,EAAE,OAA6B;QACnD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,WAAW,KAAK,EAAE,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACH,IAAI,CAAC,OAA6B;QAChC,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,SAAS,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;IACjE,CAAC;IAED;;;OAGG;IACH,GAAG,CAAC,KAAa,EAAE,OAA6B;QAC9C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,KAAK,EAAE,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;CACF;AAxBD,wBAwBC;AAED;;GAEG;AACH,MAAa,UAAW,SAAQ,iBAAW;CAAG;AAA9C,gCAA8C;AAmC9C,MAAM,CAAC,UAAU,GAAG,UAAU,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/models.mjs b/node_modules/openai/resources/models.mjs new file mode 100644 index 0000000..4ee912b --- /dev/null +++ b/node_modules/openai/resources/models.mjs @@ -0,0 +1,33 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../resource.mjs"; +import { Page } from "../pagination.mjs"; +export class Models extends APIResource { + /** + * Retrieves a model instance, providing basic information about the model such as + * the owner and permissioning. + */ + retrieve(model, options) { + return this._client.get(`/models/${model}`, options); + } + /** + * Lists the currently available models, and provides basic information about each + * one such as the owner and availability. + */ + list(options) { + return this._client.getAPIList('/models', ModelsPage, options); + } + /** + * Delete a fine-tuned model. You must have the Owner role in your organization to + * delete a model. + */ + del(model, options) { + return this._client.delete(`/models/${model}`, options); + } +} +/** + * Note: no pagination actually occurs yet, this is for forwards-compatibility. + */ +export class ModelsPage extends Page { +} +Models.ModelsPage = ModelsPage; +//# sourceMappingURL=models.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/models.mjs.map b/node_modules/openai/resources/models.mjs.map new file mode 100644 index 0000000..e938d84 --- /dev/null +++ b/node_modules/openai/resources/models.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"models.mjs","sourceRoot":"","sources":["../src/resources/models.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OAEf,EAAE,IAAI,EAAE;AAEf,MAAM,OAAO,MAAO,SAAQ,WAAW;IACrC;;;OAGG;IACH,QAAQ,CAAC,KAAa,EAAE,OAA6B;QACnD,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,WAAW,KAAK,EAAE,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACH,IAAI,CAAC,OAA6B;QAChC,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,SAAS,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;IACjE,CAAC;IAED;;;OAGG;IACH,GAAG,CAAC,KAAa,EAAE,OAA6B;QAC9C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,KAAK,EAAE,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;CACF;AAED;;GAEG;AACH,MAAM,OAAO,UAAW,SAAQ,IAAW;CAAG;AAmC9C,MAAM,CAAC,UAAU,GAAG,UAAU,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/moderations.d.ts b/node_modules/openai/resources/moderations.d.ts new file mode 100644 index 0000000..58fedca --- /dev/null +++ b/node_modules/openai/resources/moderations.d.ts @@ -0,0 +1,294 @@ +import { APIResource } from "../resource.js"; +import * as Core from "../core.js"; +export declare class Moderations extends APIResource { + /** + * Classifies if text and/or image inputs are potentially harmful. Learn more in + * the [moderation guide](https://platform.openai.com/docs/guides/moderation). + */ + create(body: ModerationCreateParams, options?: Core.RequestOptions): Core.APIPromise; +} +export interface Moderation { + /** + * A list of the categories, and whether they are flagged or not. + */ + categories: Moderation.Categories; + /** + * A list of the categories along with the input type(s) that the score applies to. + */ + category_applied_input_types: Moderation.CategoryAppliedInputTypes; + /** + * A list of the categories along with their scores as predicted by model. + */ + category_scores: Moderation.CategoryScores; + /** + * Whether any of the below categories are flagged. + */ + flagged: boolean; +} +export declare namespace Moderation { + /** + * A list of the categories, and whether they are flagged or not. + */ + interface Categories { + /** + * Content that expresses, incites, or promotes harassing language towards any + * target. + */ + harassment: boolean; + /** + * Harassment content that also includes violence or serious harm towards any + * target. + */ + 'harassment/threatening': boolean; + /** + * Content that expresses, incites, or promotes hate based on race, gender, + * ethnicity, religion, nationality, sexual orientation, disability status, or + * caste. Hateful content aimed at non-protected groups (e.g., chess players) is + * harassment. + */ + hate: boolean; + /** + * Hateful content that also includes violence or serious harm towards the targeted + * group based on race, gender, ethnicity, religion, nationality, sexual + * orientation, disability status, or caste. + */ + 'hate/threatening': boolean; + /** + * Content that includes instructions or advice that facilitate the planning or + * execution of wrongdoing, or that gives advice or instruction on how to commit + * illicit acts. For example, "how to shoplift" would fit this category. + */ + illicit: boolean; + /** + * Content that includes instructions or advice that facilitate the planning or + * execution of wrongdoing that also includes violence, or that gives advice or + * instruction on the procurement of any weapon. + */ + 'illicit/violent': boolean; + /** + * Content that promotes, encourages, or depicts acts of self-harm, such as + * suicide, cutting, and eating disorders. + */ + 'self-harm': boolean; + /** + * Content that encourages performing acts of self-harm, such as suicide, cutting, + * and eating disorders, or that gives instructions or advice on how to commit such + * acts. + */ + 'self-harm/instructions': boolean; + /** + * Content where the speaker expresses that they are engaging or intend to engage + * in acts of self-harm, such as suicide, cutting, and eating disorders. + */ + 'self-harm/intent': boolean; + /** + * Content meant to arouse sexual excitement, such as the description of sexual + * activity, or that promotes sexual services (excluding sex education and + * wellness). + */ + sexual: boolean; + /** + * Sexual content that includes an individual who is under 18 years old. + */ + 'sexual/minors': boolean; + /** + * Content that depicts death, violence, or physical injury. + */ + violence: boolean; + /** + * Content that depicts death, violence, or physical injury in graphic detail. + */ + 'violence/graphic': boolean; + } + /** + * A list of the categories along with the input type(s) that the score applies to. + */ + interface CategoryAppliedInputTypes { + /** + * The applied input type(s) for the category 'harassment'. + */ + harassment: Array<'text'>; + /** + * The applied input type(s) for the category 'harassment/threatening'. + */ + 'harassment/threatening': Array<'text'>; + /** + * The applied input type(s) for the category 'hate'. + */ + hate: Array<'text'>; + /** + * The applied input type(s) for the category 'hate/threatening'. + */ + 'hate/threatening': Array<'text'>; + /** + * The applied input type(s) for the category 'illicit'. + */ + illicit: Array<'text'>; + /** + * The applied input type(s) for the category 'illicit/violent'. + */ + 'illicit/violent': Array<'text'>; + /** + * The applied input type(s) for the category 'self-harm'. + */ + 'self-harm': Array<'text' | 'image'>; + /** + * The applied input type(s) for the category 'self-harm/instructions'. + */ + 'self-harm/instructions': Array<'text' | 'image'>; + /** + * The applied input type(s) for the category 'self-harm/intent'. + */ + 'self-harm/intent': Array<'text' | 'image'>; + /** + * The applied input type(s) for the category 'sexual'. + */ + sexual: Array<'text' | 'image'>; + /** + * The applied input type(s) for the category 'sexual/minors'. + */ + 'sexual/minors': Array<'text'>; + /** + * The applied input type(s) for the category 'violence'. + */ + violence: Array<'text' | 'image'>; + /** + * The applied input type(s) for the category 'violence/graphic'. + */ + 'violence/graphic': Array<'text' | 'image'>; + } + /** + * A list of the categories along with their scores as predicted by model. + */ + interface CategoryScores { + /** + * The score for the category 'harassment'. + */ + harassment: number; + /** + * The score for the category 'harassment/threatening'. + */ + 'harassment/threatening': number; + /** + * The score for the category 'hate'. + */ + hate: number; + /** + * The score for the category 'hate/threatening'. + */ + 'hate/threatening': number; + /** + * The score for the category 'illicit'. + */ + illicit: number; + /** + * The score for the category 'illicit/violent'. + */ + 'illicit/violent': number; + /** + * The score for the category 'self-harm'. + */ + 'self-harm': number; + /** + * The score for the category 'self-harm/instructions'. + */ + 'self-harm/instructions': number; + /** + * The score for the category 'self-harm/intent'. + */ + 'self-harm/intent': number; + /** + * The score for the category 'sexual'. + */ + sexual: number; + /** + * The score for the category 'sexual/minors'. + */ + 'sexual/minors': number; + /** + * The score for the category 'violence'. + */ + violence: number; + /** + * The score for the category 'violence/graphic'. + */ + 'violence/graphic': number; + } +} +/** + * An object describing an image to classify. + */ +export interface ModerationImageURLInput { + /** + * Contains either an image URL or a data URL for a base64 encoded image. + */ + image_url: ModerationImageURLInput.ImageURL; + /** + * Always `image_url`. + */ + type: 'image_url'; +} +export declare namespace ModerationImageURLInput { + /** + * Contains either an image URL or a data URL for a base64 encoded image. + */ + interface ImageURL { + /** + * Either a URL of the image or the base64 encoded image data. + */ + url: string; + } +} +export type ModerationModel = 'omni-moderation-latest' | 'omni-moderation-2024-09-26' | 'text-moderation-latest' | 'text-moderation-stable'; +/** + * An object describing an image to classify. + */ +export type ModerationMultiModalInput = ModerationImageURLInput | ModerationTextInput; +/** + * An object describing text to classify. + */ +export interface ModerationTextInput { + /** + * A string of text to classify. + */ + text: string; + /** + * Always `text`. + */ + type: 'text'; +} +/** + * Represents if a given text input is potentially harmful. + */ +export interface ModerationCreateResponse { + /** + * The unique identifier for the moderation request. + */ + id: string; + /** + * The model used to generate the moderation results. + */ + model: string; + /** + * A list of moderation objects. + */ + results: Array; +} +export interface ModerationCreateParams { + /** + * Input (or inputs) to classify. Can be a single string, an array of strings, or + * an array of multi-modal input objects similar to other models. + */ + input: string | Array | Array; + /** + * The content moderation model you would like to use. Learn more in + * [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + * learn about available models + * [here](https://platform.openai.com/docs/models#moderation). + */ + model?: (string & {}) | ModerationModel; +} +export declare namespace Moderations { + export { type Moderation as Moderation, type ModerationImageURLInput as ModerationImageURLInput, type ModerationModel as ModerationModel, type ModerationMultiModalInput as ModerationMultiModalInput, type ModerationTextInput as ModerationTextInput, type ModerationCreateResponse as ModerationCreateResponse, type ModerationCreateParams as ModerationCreateParams, }; +} +//# sourceMappingURL=moderations.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/moderations.d.ts.map b/node_modules/openai/resources/moderations.d.ts.map new file mode 100644 index 0000000..771f725 --- /dev/null +++ b/node_modules/openai/resources/moderations.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"moderations.d.ts","sourceRoot":"","sources":["../src/resources/moderations.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC1C,OAAO,KAAK,IAAI,MAAM,SAAS,CAAC;AAEhC,qBAAa,WAAY,SAAQ,WAAW;IAC1C;;;OAGG;IACH,MAAM,CACJ,IAAI,EAAE,sBAAsB,EAC5B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,wBAAwB,CAAC;CAG7C;AAED,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC;IAElC;;OAEG;IACH,4BAA4B,EAAE,UAAU,CAAC,yBAAyB,CAAC;IAEnE;;OAEG;IACH,eAAe,EAAE,UAAU,CAAC,cAAc,CAAC;IAE3C;;OAEG;IACH,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,yBAAiB,UAAU,CAAC;IAC1B;;OAEG;IACH,UAAiB,UAAU;QACzB;;;WAGG;QACH,UAAU,EAAE,OAAO,CAAC;QAEpB;;;WAGG;QACH,wBAAwB,EAAE,OAAO,CAAC;QAElC;;;;;WAKG;QACH,IAAI,EAAE,OAAO,CAAC;QAEd;;;;WAIG;QACH,kBAAkB,EAAE,OAAO,CAAC;QAE5B;;;;WAIG;QACH,OAAO,EAAE,OAAO,CAAC;QAEjB;;;;WAIG;QACH,iBAAiB,EAAE,OAAO,CAAC;QAE3B;;;WAGG;QACH,WAAW,EAAE,OAAO,CAAC;QAErB;;;;WAIG;QACH,wBAAwB,EAAE,OAAO,CAAC;QAElC;;;WAGG;QACH,kBAAkB,EAAE,OAAO,CAAC;QAE5B;;;;WAIG;QACH,MAAM,EAAE,OAAO,CAAC;QAEhB;;WAEG;QACH,eAAe,EAAE,OAAO,CAAC;QAEzB;;WAEG;QACH,QAAQ,EAAE,OAAO,CAAC;QAElB;;WAEG;QACH,kBAAkB,EAAE,OAAO,CAAC;KAC7B;IAED;;OAEG;IACH,UAAiB,yBAAyB;QACxC;;WAEG;QACH,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAE1B;;WAEG;QACH,wBAAwB,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAExC;;WAEG;QACH,IAAI,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAEpB;;WAEG;QACH,kBAAkB,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAElC;;WAEG;QACH,OAAO,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAEvB;;WAEG;QACH,iBAAiB,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAEjC;;WAEG;QACH,WAAW,EAAE,KAAK,CAAC,MAAM,GAAG,OAAO,CAAC,CAAC;QAErC;;WAEG;QACH,wBAAwB,EAAE,KAAK,CAAC,MAAM,GAAG,OAAO,CAAC,CAAC;QAElD;;WAEG;QACH,kBAAkB,EAAE,KAAK,CAAC,MAAM,GAAG,OAAO,CAAC,CAAC;QAE5C;;WAEG;QACH,MAAM,EAAE,KAAK,CAAC,MAAM,GAAG,OAAO,CAAC,CAAC;QAEhC;;WAEG;QACH,eAAe,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAE/B;;WAEG;QACH,QAAQ,EAAE,KAAK,CAAC,MAAM,GAAG,OAAO,CAAC,CAAC;QAElC;;WAEG;QACH,kBAAkB,EAAE,KAAK,CAAC,MAAM,GAAG,OAAO,CAAC,CAAC;KAC7C;IAED;;OAEG;IACH,UAAiB,cAAc;QAC7B;;WAEG;QACH,UAAU,EAAE,MAAM,CAAC;QAEnB;;WAEG;QACH,wBAAwB,EAAE,MAAM,CAAC;QAEjC;;WAEG;QACH,IAAI,EAAE,MAAM,CAAC;QAEb;;WAEG;QACH,kBAAkB,EAAE,MAAM,CAAC;QAE3B;;WAEG;QACH,OAAO,EAAE,MAAM,CAAC;QAEhB;;WAEG;QACH,iBAAiB,EAAE,MAAM,CAAC;QAE1B;;WAEG;QACH,WAAW,EAAE,MAAM,CAAC;QAEpB;;WAEG;QACH,wBAAwB,EAAE,MAAM,CAAC;QAEjC;;WAEG;QACH,kBAAkB,EAAE,MAAM,CAAC;QAE3B;;WAEG;QACH,MAAM,EAAE,MAAM,CAAC;QAEf;;WAEG;QACH,eAAe,EAAE,MAAM,CAAC;QAExB;;WAEG;QACH,QAAQ,EAAE,MAAM,CAAC;QAEjB;;WAEG;QACH,kBAAkB,EAAE,MAAM,CAAC;KAC5B;CACF;AAED;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,SAAS,EAAE,uBAAuB,CAAC,QAAQ,CAAC;IAE5C;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;CACnB;AAED,yBAAiB,uBAAuB,CAAC;IACvC;;OAEG;IACH,UAAiB,QAAQ;QACvB;;WAEG;QACH,GAAG,EAAE,MAAM,CAAC;KACb;CACF;AAED,MAAM,MAAM,eAAe,GACvB,wBAAwB,GACxB,4BAA4B,GAC5B,wBAAwB,GACxB,wBAAwB,CAAC;AAE7B;;GAEG;AACH,MAAM,MAAM,yBAAyB,GAAG,uBAAuB,GAAG,mBAAmB,CAAC;AAEtF;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,OAAO,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;CAC5B;AAED,MAAM,WAAW,sBAAsB;IACrC;;;OAGG;IACH,KAAK,EAAE,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAEjE;;;;;OAKG;IACH,KAAK,CAAC,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,eAAe,CAAC;CACzC;AAED,MAAM,CAAC,OAAO,WAAW,WAAW,CAAC;IACnC,OAAO,EACL,KAAK,UAAU,IAAI,UAAU,EAC7B,KAAK,uBAAuB,IAAI,uBAAuB,EACvD,KAAK,eAAe,IAAI,eAAe,EACvC,KAAK,yBAAyB,IAAI,yBAAyB,EAC3D,KAAK,mBAAmB,IAAI,mBAAmB,EAC/C,KAAK,wBAAwB,IAAI,wBAAwB,EACzD,KAAK,sBAAsB,IAAI,sBAAsB,GACtD,CAAC;CACH"} \ No newline at end of file diff --git a/node_modules/openai/resources/moderations.js b/node_modules/openai/resources/moderations.js new file mode 100644 index 0000000..da90150 --- /dev/null +++ b/node_modules/openai/resources/moderations.js @@ -0,0 +1,16 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Moderations = void 0; +const resource_1 = require("../resource.js"); +class Moderations extends resource_1.APIResource { + /** + * Classifies if text and/or image inputs are potentially harmful. Learn more in + * the [moderation guide](https://platform.openai.com/docs/guides/moderation). + */ + create(body, options) { + return this._client.post('/moderations', { body, ...options }); + } +} +exports.Moderations = Moderations; +//# sourceMappingURL=moderations.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/moderations.js.map b/node_modules/openai/resources/moderations.js.map new file mode 100644 index 0000000..3fdc44e --- /dev/null +++ b/node_modules/openai/resources/moderations.js.map @@ -0,0 +1 @@ +{"version":3,"file":"moderations.js","sourceRoot":"","sources":["../src/resources/moderations.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,6CAA0C;AAG1C,MAAa,WAAY,SAAQ,sBAAW;IAC1C;;;OAGG;IACH,MAAM,CACJ,IAA4B,EAC5B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,cAAc,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACjE,CAAC;CACF;AAXD,kCAWC"} \ No newline at end of file diff --git a/node_modules/openai/resources/moderations.mjs b/node_modules/openai/resources/moderations.mjs new file mode 100644 index 0000000..9a75d58 --- /dev/null +++ b/node_modules/openai/resources/moderations.mjs @@ -0,0 +1,12 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../resource.mjs"; +export class Moderations extends APIResource { + /** + * Classifies if text and/or image inputs are potentially harmful. Learn more in + * the [moderation guide](https://platform.openai.com/docs/guides/moderation). + */ + create(body, options) { + return this._client.post('/moderations', { body, ...options }); + } +} +//# sourceMappingURL=moderations.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/moderations.mjs.map b/node_modules/openai/resources/moderations.mjs.map new file mode 100644 index 0000000..b694d3e --- /dev/null +++ b/node_modules/openai/resources/moderations.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"moderations.mjs","sourceRoot":"","sources":["../src/resources/moderations.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;AAGtB,MAAM,OAAO,WAAY,SAAQ,WAAW;IAC1C;;;OAGG;IACH,MAAM,CACJ,IAA4B,EAC5B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,cAAc,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IACjE,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/shared.d.ts b/node_modules/openai/resources/shared.d.ts new file mode 100644 index 0000000..3fd6dfd --- /dev/null +++ b/node_modules/openai/resources/shared.d.ts @@ -0,0 +1,92 @@ +export interface ErrorObject { + code: string | null; + message: string; + param: string | null; + type: string; +} +export interface FunctionDefinition { + /** + * The name of the function to be called. Must be a-z, A-Z, 0-9, or contain + * underscores and dashes, with a maximum length of 64. + */ + name: string; + /** + * A description of what the function does, used by the model to choose when and + * how to call the function. + */ + description?: string; + /** + * The parameters the functions accepts, described as a JSON Schema object. See the + * [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + * and the + * [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + * documentation about the format. + * + * Omitting `parameters` defines a function with an empty parameter list. + */ + parameters?: FunctionParameters; + /** + * Whether to enable strict schema adherence when generating the function call. If + * set to true, the model will follow the exact schema defined in the `parameters` + * field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn + * more about Structured Outputs in the + * [function calling guide](docs/guides/function-calling). + */ + strict?: boolean | null; +} +/** + * The parameters the functions accepts, described as a JSON Schema object. See the + * [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + * and the + * [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + * documentation about the format. + * + * Omitting `parameters` defines a function with an empty parameter list. + */ +export type FunctionParameters = Record; +export interface ResponseFormatJSONObject { + /** + * The type of response format being defined: `json_object` + */ + type: 'json_object'; +} +export interface ResponseFormatJSONSchema { + json_schema: ResponseFormatJSONSchema.JSONSchema; + /** + * The type of response format being defined: `json_schema` + */ + type: 'json_schema'; +} +export declare namespace ResponseFormatJSONSchema { + interface JSONSchema { + /** + * The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores + * and dashes, with a maximum length of 64. + */ + name: string; + /** + * A description of what the response format is for, used by the model to determine + * how to respond in the format. + */ + description?: string; + /** + * The schema for the response format, described as a JSON Schema object. + */ + schema?: Record; + /** + * Whether to enable strict schema adherence when generating the output. If set to + * true, the model will always follow the exact schema defined in the `schema` + * field. Only a subset of JSON Schema is supported when `strict` is `true`. To + * learn more, read the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + */ + strict?: boolean | null; + } +} +export interface ResponseFormatText { + /** + * The type of response format being defined: `text` + */ + type: 'text'; +} +//# sourceMappingURL=shared.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/shared.d.ts.map b/node_modules/openai/resources/shared.d.ts.map new file mode 100644 index 0000000..c1e083f --- /dev/null +++ b/node_modules/openai/resources/shared.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"shared.d.ts","sourceRoot":"","sources":["../src/resources/shared.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IAEpB,OAAO,EAAE,MAAM,CAAC;IAEhB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IAErB,IAAI,EAAE,MAAM,CAAC;CACd;AAED,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,kBAAkB,CAAC;IAEhC;;;;;;OAMG;IACH,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;CACzB;AAED;;;;;;;;GAQG;AACH,MAAM,MAAM,kBAAkB,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAEzD,MAAM,WAAW,wBAAwB;IACvC;;OAEG;IACH,IAAI,EAAE,aAAa,CAAC;CACrB;AAED,MAAM,WAAW,wBAAwB;IACvC,WAAW,EAAE,wBAAwB,CAAC,UAAU,CAAC;IAEjD;;OAEG;IACH,IAAI,EAAE,aAAa,CAAC;CACrB;AAED,yBAAiB,wBAAwB,CAAC;IACxC,UAAiB,UAAU;QACzB;;;WAGG;QACH,IAAI,EAAE,MAAM,CAAC;QAEb;;;WAGG;QACH,WAAW,CAAC,EAAE,MAAM,CAAC;QAErB;;WAEG;QACH,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAEjC;;;;;;WAMG;QACH,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;KACzB;CACF;AAED,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACd"} \ No newline at end of file diff --git a/node_modules/openai/resources/shared.js b/node_modules/openai/resources/shared.js new file mode 100644 index 0000000..db0cae1 --- /dev/null +++ b/node_modules/openai/resources/shared.js @@ -0,0 +1,4 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=shared.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/shared.js.map b/node_modules/openai/resources/shared.js.map new file mode 100644 index 0000000..e532ab2 --- /dev/null +++ b/node_modules/openai/resources/shared.js.map @@ -0,0 +1 @@ +{"version":3,"file":"shared.js","sourceRoot":"","sources":["../src/resources/shared.ts"],"names":[],"mappings":";AAAA,sFAAsF"} \ No newline at end of file diff --git a/node_modules/openai/resources/shared.mjs b/node_modules/openai/resources/shared.mjs new file mode 100644 index 0000000..a88d4e1 --- /dev/null +++ b/node_modules/openai/resources/shared.mjs @@ -0,0 +1,3 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export {}; +//# sourceMappingURL=shared.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/shared.mjs.map b/node_modules/openai/resources/shared.mjs.map new file mode 100644 index 0000000..ac8fa86 --- /dev/null +++ b/node_modules/openai/resources/shared.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"shared.mjs","sourceRoot":"","sources":["../src/resources/shared.ts"],"names":[],"mappings":"AAAA,sFAAsF"} \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/index.d.ts b/node_modules/openai/resources/uploads/index.d.ts new file mode 100644 index 0000000..745ba63 --- /dev/null +++ b/node_modules/openai/resources/uploads/index.d.ts @@ -0,0 +1,3 @@ +export { Parts, type UploadPart, type PartCreateParams } from "./parts.js"; +export { Uploads, type Upload, type UploadCreateParams, type UploadCompleteParams } from "./uploads.js"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/index.d.ts.map b/node_modules/openai/resources/uploads/index.d.ts.map new file mode 100644 index 0000000..6a681d2 --- /dev/null +++ b/node_modules/openai/resources/uploads/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/resources/uploads/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,KAAK,EAAE,KAAK,UAAU,EAAE,KAAK,gBAAgB,EAAE,MAAM,SAAS,CAAC;AACxE,OAAO,EAAE,OAAO,EAAE,KAAK,MAAM,EAAE,KAAK,kBAAkB,EAAE,KAAK,oBAAoB,EAAE,MAAM,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/index.js b/node_modules/openai/resources/uploads/index.js new file mode 100644 index 0000000..bce24df --- /dev/null +++ b/node_modules/openai/resources/uploads/index.js @@ -0,0 +1,9 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Uploads = exports.Parts = void 0; +var parts_1 = require("./parts.js"); +Object.defineProperty(exports, "Parts", { enumerable: true, get: function () { return parts_1.Parts; } }); +var uploads_1 = require("./uploads.js"); +Object.defineProperty(exports, "Uploads", { enumerable: true, get: function () { return uploads_1.Uploads; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/index.js.map b/node_modules/openai/resources/uploads/index.js.map new file mode 100644 index 0000000..ea2e541 --- /dev/null +++ b/node_modules/openai/resources/uploads/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/resources/uploads/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,oCAAwE;AAA/D,8FAAA,KAAK,OAAA;AACd,wCAAqG;AAA5F,kGAAA,OAAO,OAAA"} \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/index.mjs b/node_modules/openai/resources/uploads/index.mjs new file mode 100644 index 0000000..65b5280 --- /dev/null +++ b/node_modules/openai/resources/uploads/index.mjs @@ -0,0 +1,4 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +export { Parts } from "./parts.mjs"; +export { Uploads } from "./uploads.mjs"; +//# sourceMappingURL=index.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/index.mjs.map b/node_modules/openai/resources/uploads/index.mjs.map new file mode 100644 index 0000000..5d3a737 --- /dev/null +++ b/node_modules/openai/resources/uploads/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sourceRoot":"","sources":["../../src/resources/uploads/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,KAAK,EAA0C;OACjD,EAAE,OAAO,EAAmE"} \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/parts.d.ts b/node_modules/openai/resources/uploads/parts.d.ts new file mode 100644 index 0000000..15fabf3 --- /dev/null +++ b/node_modules/openai/resources/uploads/parts.d.ts @@ -0,0 +1,49 @@ +import { APIResource } from "../../resource.js"; +import * as Core from "../../core.js"; +export declare class Parts extends APIResource { + /** + * Adds a + * [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. + * A Part represents a chunk of bytes from the file you are trying to upload. + * + * Each Part can be at most 64 MB, and you can add Parts until you hit the Upload + * maximum of 8 GB. + * + * It is possible to add multiple Parts in parallel. You can decide the intended + * order of the Parts when you + * [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). + */ + create(uploadId: string, body: PartCreateParams, options?: Core.RequestOptions): Core.APIPromise; +} +/** + * The upload Part represents a chunk of bytes we can add to an Upload object. + */ +export interface UploadPart { + /** + * The upload Part unique identifier, which can be referenced in API endpoints. + */ + id: string; + /** + * The Unix timestamp (in seconds) for when the Part was created. + */ + created_at: number; + /** + * The object type, which is always `upload.part`. + */ + object: 'upload.part'; + /** + * The ID of the Upload object that this Part was added to. + */ + upload_id: string; +} +export interface PartCreateParams { + /** + * The chunk of bytes for this Part. + */ + data: Core.Uploadable; +} +export declare namespace Parts { + export { type UploadPart as UploadPart, type PartCreateParams as PartCreateParams }; +} +//# sourceMappingURL=parts.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/parts.d.ts.map b/node_modules/openai/resources/uploads/parts.d.ts.map new file mode 100644 index 0000000..6cd7632 --- /dev/null +++ b/node_modules/openai/resources/uploads/parts.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"parts.d.ts","sourceRoot":"","sources":["../../src/resources/uploads/parts.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,IAAI,MAAM,YAAY,CAAC;AAEnC,qBAAa,KAAM,SAAQ,WAAW;IACpC;;;;;;;;;;;;OAYG;IACH,MAAM,CACJ,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,gBAAgB,EACtB,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;CAM/B;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,MAAM,EAAE,aAAa,CAAC;IAEtB;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,gBAAgB;IAC/B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC;CACvB;AAED,MAAM,CAAC,OAAO,WAAW,KAAK,CAAC;IAC7B,OAAO,EAAE,KAAK,UAAU,IAAI,UAAU,EAAE,KAAK,gBAAgB,IAAI,gBAAgB,EAAE,CAAC;CACrF"} \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/parts.js b/node_modules/openai/resources/uploads/parts.js new file mode 100644 index 0000000..f34bfcf --- /dev/null +++ b/node_modules/openai/resources/uploads/parts.js @@ -0,0 +1,49 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Parts = void 0; +const resource_1 = require("../../resource.js"); +const Core = __importStar(require("../../core.js")); +class Parts extends resource_1.APIResource { + /** + * Adds a + * [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. + * A Part represents a chunk of bytes from the file you are trying to upload. + * + * Each Part can be at most 64 MB, and you can add Parts until you hit the Upload + * maximum of 8 GB. + * + * It is possible to add multiple Parts in parallel. You can decide the intended + * order of the Parts when you + * [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). + */ + create(uploadId, body, options) { + return this._client.post(`/uploads/${uploadId}/parts`, Core.multipartFormRequestOptions({ body, ...options })); + } +} +exports.Parts = Parts; +//# sourceMappingURL=parts.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/parts.js.map b/node_modules/openai/resources/uploads/parts.js.map new file mode 100644 index 0000000..953fc76 --- /dev/null +++ b/node_modules/openai/resources/uploads/parts.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parts.js","sourceRoot":"","sources":["../../src/resources/uploads/parts.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,gDAA6C;AAC7C,oDAAmC;AAEnC,MAAa,KAAM,SAAQ,sBAAW;IACpC;;;;;;;;;;;;OAYG;IACH,MAAM,CACJ,QAAgB,EAChB,IAAsB,EACtB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CACtB,YAAY,QAAQ,QAAQ,EAC5B,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CACvD,CAAC;IACJ,CAAC;CACF;AAxBD,sBAwBC"} \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/parts.mjs b/node_modules/openai/resources/uploads/parts.mjs new file mode 100644 index 0000000..1235a0b --- /dev/null +++ b/node_modules/openai/resources/uploads/parts.mjs @@ -0,0 +1,22 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +import * as Core from "../../core.mjs"; +export class Parts extends APIResource { + /** + * Adds a + * [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. + * A Part represents a chunk of bytes from the file you are trying to upload. + * + * Each Part can be at most 64 MB, and you can add Parts until you hit the Upload + * maximum of 8 GB. + * + * It is possible to add multiple Parts in parallel. You can decide the intended + * order of the Parts when you + * [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). + */ + create(uploadId, body, options) { + return this._client.post(`/uploads/${uploadId}/parts`, Core.multipartFormRequestOptions({ body, ...options })); + } +} +//# sourceMappingURL=parts.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/parts.mjs.map b/node_modules/openai/resources/uploads/parts.mjs.map new file mode 100644 index 0000000..827e4f9 --- /dev/null +++ b/node_modules/openai/resources/uploads/parts.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"parts.mjs","sourceRoot":"","sources":["../../src/resources/uploads/parts.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OACf,KAAK,IAAI;AAEhB,MAAM,OAAO,KAAM,SAAQ,WAAW;IACpC;;;;;;;;;;;;OAYG;IACH,MAAM,CACJ,QAAgB,EAChB,IAAsB,EACtB,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CACtB,YAAY,QAAQ,QAAQ,EAC5B,IAAI,CAAC,2BAA2B,CAAC,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CACvD,CAAC;IACJ,CAAC;CACF"} \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/uploads.d.ts b/node_modules/openai/resources/uploads/uploads.d.ts new file mode 100644 index 0000000..0c7967a --- /dev/null +++ b/node_modules/openai/resources/uploads/uploads.d.ts @@ -0,0 +1,134 @@ +import { APIResource } from "../../resource.js"; +import * as Core from "../../core.js"; +import * as FilesAPI from "../files.js"; +import * as PartsAPI from "./parts.js"; +import { PartCreateParams, Parts, UploadPart } from "./parts.js"; +export declare class Uploads extends APIResource { + parts: PartsAPI.Parts; + /** + * Creates an intermediate + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object + * that you can add + * [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. + * Currently, an Upload can accept at most 8 GB in total and expires after an hour + * after you create it. + * + * Once you complete the Upload, we will create a + * [File](https://platform.openai.com/docs/api-reference/files/object) object that + * contains all the parts you uploaded. This File is usable in the rest of our + * platform as a regular File object. + * + * For certain `purpose`s, the correct `mime_type` must be specified. Please refer + * to documentation for the supported MIME types for your use case: + * + * - [Assistants](https://platform.openai.com/docs/assistants/tools/file-search#supported-files) + * + * For guidance on the proper filename extensions for each purpose, please follow + * the documentation on + * [creating a File](https://platform.openai.com/docs/api-reference/files/create). + */ + create(body: UploadCreateParams, options?: Core.RequestOptions): Core.APIPromise; + /** + * Cancels the Upload. No Parts may be added after an Upload is cancelled. + */ + cancel(uploadId: string, options?: Core.RequestOptions): Core.APIPromise; + /** + * Completes the + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object). + * + * Within the returned Upload object, there is a nested + * [File](https://platform.openai.com/docs/api-reference/files/object) object that + * is ready to use in the rest of the platform. + * + * You can specify the order of the Parts by passing in an ordered list of the Part + * IDs. + * + * The number of bytes uploaded upon completion must match the number of bytes + * initially specified when creating the Upload object. No Parts may be added after + * an Upload is completed. + */ + complete(uploadId: string, body: UploadCompleteParams, options?: Core.RequestOptions): Core.APIPromise; +} +/** + * The Upload object can accept byte chunks in the form of Parts. + */ +export interface Upload { + /** + * The Upload unique identifier, which can be referenced in API endpoints. + */ + id: string; + /** + * The intended number of bytes to be uploaded. + */ + bytes: number; + /** + * The Unix timestamp (in seconds) for when the Upload was created. + */ + created_at: number; + /** + * The Unix timestamp (in seconds) for when the Upload was created. + */ + expires_at: number; + /** + * The name of the file to be uploaded. + */ + filename: string; + /** + * The object type, which is always "upload". + */ + object: 'upload'; + /** + * The intended purpose of the file. + * [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose) + * for acceptable values. + */ + purpose: string; + /** + * The status of the Upload. + */ + status: 'pending' | 'completed' | 'cancelled' | 'expired'; + /** + * The ready File object after the Upload is completed. + */ + file?: FilesAPI.FileObject | null; +} +export interface UploadCreateParams { + /** + * The number of bytes in the file you are uploading. + */ + bytes: number; + /** + * The name of the file to upload. + */ + filename: string; + /** + * The MIME type of the file. + * + * This must fall within the supported MIME types for your file purpose. See the + * supported MIME types for assistants and vision. + */ + mime_type: string; + /** + * The intended purpose of the uploaded file. + * + * See the + * [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). + */ + purpose: FilesAPI.FilePurpose; +} +export interface UploadCompleteParams { + /** + * The ordered list of Part IDs. + */ + part_ids: Array; + /** + * The optional md5 checksum for the file contents to verify if the bytes uploaded + * matches what you expect. + */ + md5?: string; +} +export declare namespace Uploads { + export { type Upload as Upload, type UploadCreateParams as UploadCreateParams, type UploadCompleteParams as UploadCompleteParams, }; + export { Parts as Parts, type UploadPart as UploadPart, type PartCreateParams as PartCreateParams }; +} +//# sourceMappingURL=uploads.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/uploads.d.ts.map b/node_modules/openai/resources/uploads/uploads.d.ts.map new file mode 100644 index 0000000..da22d3a --- /dev/null +++ b/node_modules/openai/resources/uploads/uploads.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"uploads.d.ts","sourceRoot":"","sources":["../../src/resources/uploads/uploads.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,IAAI,MAAM,YAAY,CAAC;AACnC,OAAO,KAAK,QAAQ,MAAM,UAAU,CAAC;AACrC,OAAO,KAAK,QAAQ,MAAM,SAAS,CAAC;AACpC,OAAO,EAAE,gBAAgB,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AAE9D,qBAAa,OAAQ,SAAQ,WAAW;IACtC,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAoC;IAEzD;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,MAAM,CAAC,IAAI,EAAE,kBAAkB,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAIxF;;OAEG;IACH,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAIhF;;;;;;;;;;;;;;OAcG;IACH,QAAQ,CACN,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,oBAAoB,EAC1B,OAAO,CAAC,EAAE,IAAI,CAAC,cAAc,GAC5B,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;CAG3B;AAED;;GAEG;AACH,MAAM,WAAW,MAAM;IACrB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IAEX;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,MAAM,EAAE,QAAQ,CAAC;IAEjB;;;;OAIG;IACH,OAAO,EAAE,MAAM,CAAC;IAEhB;;OAEG;IACH,MAAM,EAAE,SAAS,GAAG,WAAW,GAAG,WAAW,GAAG,SAAS,CAAC;IAE1D;;OAEG;IACH,IAAI,CAAC,EAAE,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;CACnC;AAED,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;;;;OAKG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;;;;OAKG;IACH,OAAO,EAAE,QAAQ,CAAC,WAAW,CAAC;CAC/B;AAED,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,QAAQ,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAExB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAID,MAAM,CAAC,OAAO,WAAW,OAAO,CAAC;IAC/B,OAAO,EACL,KAAK,MAAM,IAAI,MAAM,EACrB,KAAK,kBAAkB,IAAI,kBAAkB,EAC7C,KAAK,oBAAoB,IAAI,oBAAoB,GAClD,CAAC;IAEF,OAAO,EAAE,KAAK,IAAI,KAAK,EAAE,KAAK,UAAU,IAAI,UAAU,EAAE,KAAK,gBAAgB,IAAI,gBAAgB,EAAE,CAAC;CACrG"} \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/uploads.js b/node_modules/openai/resources/uploads/uploads.js new file mode 100644 index 0000000..be3b605 --- /dev/null +++ b/node_modules/openai/resources/uploads/uploads.js @@ -0,0 +1,88 @@ +"use strict"; +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Uploads = void 0; +const resource_1 = require("../../resource.js"); +const PartsAPI = __importStar(require("./parts.js")); +const parts_1 = require("./parts.js"); +class Uploads extends resource_1.APIResource { + constructor() { + super(...arguments); + this.parts = new PartsAPI.Parts(this._client); + } + /** + * Creates an intermediate + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object + * that you can add + * [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. + * Currently, an Upload can accept at most 8 GB in total and expires after an hour + * after you create it. + * + * Once you complete the Upload, we will create a + * [File](https://platform.openai.com/docs/api-reference/files/object) object that + * contains all the parts you uploaded. This File is usable in the rest of our + * platform as a regular File object. + * + * For certain `purpose`s, the correct `mime_type` must be specified. Please refer + * to documentation for the supported MIME types for your use case: + * + * - [Assistants](https://platform.openai.com/docs/assistants/tools/file-search#supported-files) + * + * For guidance on the proper filename extensions for each purpose, please follow + * the documentation on + * [creating a File](https://platform.openai.com/docs/api-reference/files/create). + */ + create(body, options) { + return this._client.post('/uploads', { body, ...options }); + } + /** + * Cancels the Upload. No Parts may be added after an Upload is cancelled. + */ + cancel(uploadId, options) { + return this._client.post(`/uploads/${uploadId}/cancel`, options); + } + /** + * Completes the + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object). + * + * Within the returned Upload object, there is a nested + * [File](https://platform.openai.com/docs/api-reference/files/object) object that + * is ready to use in the rest of the platform. + * + * You can specify the order of the Parts by passing in an ordered list of the Part + * IDs. + * + * The number of bytes uploaded upon completion must match the number of bytes + * initially specified when creating the Upload object. No Parts may be added after + * an Upload is completed. + */ + complete(uploadId, body, options) { + return this._client.post(`/uploads/${uploadId}/complete`, { body, ...options }); + } +} +exports.Uploads = Uploads; +Uploads.Parts = parts_1.Parts; +//# sourceMappingURL=uploads.js.map \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/uploads.js.map b/node_modules/openai/resources/uploads/uploads.js.map new file mode 100644 index 0000000..6094807 --- /dev/null +++ b/node_modules/openai/resources/uploads/uploads.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uploads.js","sourceRoot":"","sources":["../../src/resources/uploads/uploads.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;;;;;;;;;;;;;;;;;;;;;;;;AAEtF,gDAA6C;AAG7C,qDAAoC;AACpC,sCAA8D;AAE9D,MAAa,OAAQ,SAAQ,sBAAW;IAAxC;;QACE,UAAK,GAAmB,IAAI,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAyD3D,CAAC;IAvDC;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,MAAM,CAAC,IAAwB,EAAE,OAA6B;QAC5D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAE,OAA6B;QACpD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,SAAS,EAAE,OAAO,CAAC,CAAC;IACnE,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,QAAQ,CACN,QAAgB,EAChB,IAA0B,EAC1B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,WAAW,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAClF,CAAC;CACF;AA1DD,0BA0DC;AA+FD,OAAO,CAAC,KAAK,GAAG,aAAK,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/uploads.mjs b/node_modules/openai/resources/uploads/uploads.mjs new file mode 100644 index 0000000..3a71812 --- /dev/null +++ b/node_modules/openai/resources/uploads/uploads.mjs @@ -0,0 +1,61 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +import { APIResource } from "../../resource.mjs"; +import * as PartsAPI from "./parts.mjs"; +import { Parts } from "./parts.mjs"; +export class Uploads extends APIResource { + constructor() { + super(...arguments); + this.parts = new PartsAPI.Parts(this._client); + } + /** + * Creates an intermediate + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object + * that you can add + * [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. + * Currently, an Upload can accept at most 8 GB in total and expires after an hour + * after you create it. + * + * Once you complete the Upload, we will create a + * [File](https://platform.openai.com/docs/api-reference/files/object) object that + * contains all the parts you uploaded. This File is usable in the rest of our + * platform as a regular File object. + * + * For certain `purpose`s, the correct `mime_type` must be specified. Please refer + * to documentation for the supported MIME types for your use case: + * + * - [Assistants](https://platform.openai.com/docs/assistants/tools/file-search#supported-files) + * + * For guidance on the proper filename extensions for each purpose, please follow + * the documentation on + * [creating a File](https://platform.openai.com/docs/api-reference/files/create). + */ + create(body, options) { + return this._client.post('/uploads', { body, ...options }); + } + /** + * Cancels the Upload. No Parts may be added after an Upload is cancelled. + */ + cancel(uploadId, options) { + return this._client.post(`/uploads/${uploadId}/cancel`, options); + } + /** + * Completes the + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object). + * + * Within the returned Upload object, there is a nested + * [File](https://platform.openai.com/docs/api-reference/files/object) object that + * is ready to use in the rest of the platform. + * + * You can specify the order of the Parts by passing in an ordered list of the Part + * IDs. + * + * The number of bytes uploaded upon completion must match the number of bytes + * initially specified when creating the Upload object. No Parts may be added after + * an Upload is completed. + */ + complete(uploadId, body, options) { + return this._client.post(`/uploads/${uploadId}/complete`, { body, ...options }); + } +} +Uploads.Parts = Parts; +//# sourceMappingURL=uploads.mjs.map \ No newline at end of file diff --git a/node_modules/openai/resources/uploads/uploads.mjs.map b/node_modules/openai/resources/uploads/uploads.mjs.map new file mode 100644 index 0000000..a72141d --- /dev/null +++ b/node_modules/openai/resources/uploads/uploads.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"uploads.mjs","sourceRoot":"","sources":["../../src/resources/uploads/uploads.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,WAAW,EAAE;OAGf,KAAK,QAAQ;OACb,EAAoB,KAAK,EAAc;AAE9C,MAAM,OAAO,OAAQ,SAAQ,WAAW;IAAxC;;QACE,UAAK,GAAmB,IAAI,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAyD3D,CAAC;IAvDC;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,MAAM,CAAC,IAAwB,EAAE,OAA6B;QAC5D,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAE,OAA6B;QACpD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,SAAS,EAAE,OAAO,CAAC,CAAC;IACnE,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,QAAQ,CACN,QAAgB,EAChB,IAA0B,EAC1B,OAA6B;QAE7B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,QAAQ,WAAW,EAAE,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAClF,CAAC;CACF;AA+FD,OAAO,CAAC,KAAK,GAAG,KAAK,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/shims/node.d.ts b/node_modules/openai/shims/node.d.ts new file mode 100644 index 0000000..86fc310 --- /dev/null +++ b/node_modules/openai/shims/node.d.ts @@ -0,0 +1,30 @@ + + + + +import * as types from "../_shims/node-types.js"; +declare module '../_shims/manual-types' { + namespace manual { + type Agent = types.Agent; + export import fetch = types.fetch; + type Request = types.Request; + type RequestInfo = types.RequestInfo; + type RequestInit = types.RequestInit; + type Response = types.Response; + type ResponseInit = types.ResponseInit; + type ResponseType = types.ResponseType; + type BodyInit = types.BodyInit; + type Headers = types.Headers; + type HeadersInit = types.HeadersInit; + type BlobPropertyBag = types.BlobPropertyBag; + type FilePropertyBag = types.FilePropertyBag; + type FileFromPathOptions = types.FileFromPathOptions; + export import FormData = types.FormData; + export import File = types.File; + export import Blob = types.Blob; + type Readable = types.Readable; + type FsReadStream = types.FsReadStream; + export import ReadableStream = types.ReadableStream; + } +} +//# sourceMappingURL=node.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/shims/node.d.ts.map b/node_modules/openai/shims/node.d.ts.map new file mode 100644 index 0000000..bba8760 --- /dev/null +++ b/node_modules/openai/shims/node.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"node.d.ts","sourceRoot":"","sources":["../src/shims/node.ts"],"names":[],"mappings":";;;;AACA,OAAO,KAAK,KAAK,MAAM,sBAAsB,CAAC;AAK9C,OAAO,QAAQ,wBAAwB,CAAC;IACtC,UAAiB,MAAM,CAAC;QAEtB,KAAY,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;QAEhC,MAAM,QAAQ,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;QAElC,KAAY,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAEpC,KAAY,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC;QAE5C,KAAY,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC;QAE5C,KAAY,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAEtC,KAAY,YAAY,GAAG,KAAK,CAAC,YAAY,CAAC;QAE9C,KAAY,YAAY,GAAG,KAAK,CAAC,YAAY,CAAC;QAE9C,KAAY,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAEtC,KAAY,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAEpC,KAAY,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC;QAE5C,KAAY,eAAe,GAAG,KAAK,CAAC,eAAe,CAAC;QAEpD,KAAY,eAAe,GAAG,KAAK,CAAC,eAAe,CAAC;QAEpD,KAAY,mBAAmB,GAAG,KAAK,CAAC,mBAAmB,CAAC;QAE5D,MAAM,QAAQ,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAExC,MAAM,QAAQ,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;QAEhC,MAAM,QAAQ,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;QAEhC,KAAY,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAEtC,KAAY,YAAY,GAAG,KAAK,CAAC,YAAY,CAAC;QAE9C,MAAM,QAAQ,cAAc,GAAG,KAAK,CAAC,cAAc,CAAC;KACrD;CACF"} \ No newline at end of file diff --git a/node_modules/openai/shims/node.js b/node_modules/openai/shims/node.js new file mode 100644 index 0000000..9fb2b01 --- /dev/null +++ b/node_modules/openai/shims/node.js @@ -0,0 +1,31 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +// @ts-ignore +const types = __importStar(require("../_shims/node-types.js")); +const registry_1 = require("../_shims/registry.js"); +const node_runtime_1 = require("../_shims/node-runtime.js"); +(0, registry_1.setShims)((0, node_runtime_1.getRuntime)()); +//# sourceMappingURL=node.js.map \ No newline at end of file diff --git a/node_modules/openai/shims/node.js.map b/node_modules/openai/shims/node.js.map new file mode 100644 index 0000000..181c62e --- /dev/null +++ b/node_modules/openai/shims/node.js.map @@ -0,0 +1 @@ +{"version":3,"file":"node.js","sourceRoot":"","sources":["../src/shims/node.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,aAAa;AACb,+DAA8C;AAC9C,oDAA8C;AAC9C,4DAAoD;AACpD,IAAA,mBAAQ,EAAC,IAAA,yBAAU,GAAE,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/shims/node.mjs b/node_modules/openai/shims/node.mjs new file mode 100644 index 0000000..18b4463 --- /dev/null +++ b/node_modules/openai/shims/node.mjs @@ -0,0 +1,5 @@ +import * as types from "../_shims/node-types.mjs"; +import { setShims } from "../_shims/registry.mjs"; +import { getRuntime } from "../_shims/node-runtime.mjs"; +setShims(getRuntime()); +//# sourceMappingURL=node.mjs.map \ No newline at end of file diff --git a/node_modules/openai/shims/node.mjs.map b/node_modules/openai/shims/node.mjs.map new file mode 100644 index 0000000..3f0a9f1 --- /dev/null +++ b/node_modules/openai/shims/node.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"node.mjs","sourceRoot":"","sources":["../src/shims/node.ts"],"names":[],"mappings":"OACO,KAAK,KAAK;OACV,EAAE,QAAQ,EAAE;OACZ,EAAE,UAAU,EAAE;AACrB,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/shims/web.d.ts b/node_modules/openai/shims/web.d.ts new file mode 100644 index 0000000..3248079 --- /dev/null +++ b/node_modules/openai/shims/web.d.ts @@ -0,0 +1,26 @@ +import * as types from "../_shims/web-types.js"; +declare module '../_shims/manual-types' { + namespace manual { + type Agent = types.Agent; + export import fetch = types.fetch; + type Request = types.Request; + type RequestInfo = types.RequestInfo; + type RequestInit = types.RequestInit; + type Response = types.Response; + type ResponseInit = types.ResponseInit; + type ResponseType = types.ResponseType; + type BodyInit = types.BodyInit; + type Headers = types.Headers; + type HeadersInit = types.HeadersInit; + type BlobPropertyBag = types.BlobPropertyBag; + type FilePropertyBag = types.FilePropertyBag; + type FileFromPathOptions = types.FileFromPathOptions; + export import FormData = types.FormData; + export import File = types.File; + export import Blob = types.Blob; + type Readable = types.Readable; + type FsReadStream = types.FsReadStream; + export import ReadableStream = types.ReadableStream; + } +} +//# sourceMappingURL=web.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/shims/web.d.ts.map b/node_modules/openai/shims/web.d.ts.map new file mode 100644 index 0000000..b478965 --- /dev/null +++ b/node_modules/openai/shims/web.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"web.d.ts","sourceRoot":"","sources":["../src/shims/web.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,KAAK,MAAM,qBAAqB,CAAC;AAK7C,OAAO,QAAQ,wBAAwB,CAAC;IACtC,UAAiB,MAAM,CAAC;QAEtB,KAAY,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;QAEhC,MAAM,QAAQ,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;QAElC,KAAY,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAEpC,KAAY,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC;QAE5C,KAAY,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC;QAE5C,KAAY,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAEtC,KAAY,YAAY,GAAG,KAAK,CAAC,YAAY,CAAC;QAE9C,KAAY,YAAY,GAAG,KAAK,CAAC,YAAY,CAAC;QAE9C,KAAY,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAEtC,KAAY,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAEpC,KAAY,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC;QAE5C,KAAY,eAAe,GAAG,KAAK,CAAC,eAAe,CAAC;QAEpD,KAAY,eAAe,GAAG,KAAK,CAAC,eAAe,CAAC;QAEpD,KAAY,mBAAmB,GAAG,KAAK,CAAC,mBAAmB,CAAC;QAE5D,MAAM,QAAQ,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAExC,MAAM,QAAQ,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;QAEhC,MAAM,QAAQ,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;QAEhC,KAAY,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;QAEtC,KAAY,YAAY,GAAG,KAAK,CAAC,YAAY,CAAC;QAE9C,MAAM,QAAQ,cAAc,GAAG,KAAK,CAAC,cAAc,CAAC;KACrD;CACF"} \ No newline at end of file diff --git a/node_modules/openai/shims/web.js b/node_modules/openai/shims/web.js new file mode 100644 index 0000000..2a1d019 --- /dev/null +++ b/node_modules/openai/shims/web.js @@ -0,0 +1,31 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +// @ts-ignore +const types = __importStar(require("../_shims/web-types.js")); +const registry_1 = require("../_shims/registry.js"); +const web_runtime_1 = require("../_shims/web-runtime.js"); +(0, registry_1.setShims)((0, web_runtime_1.getRuntime)({ manuallyImported: true })); +//# sourceMappingURL=web.js.map \ No newline at end of file diff --git a/node_modules/openai/shims/web.js.map b/node_modules/openai/shims/web.js.map new file mode 100644 index 0000000..3eb0f4c --- /dev/null +++ b/node_modules/openai/shims/web.js.map @@ -0,0 +1 @@ +{"version":3,"file":"web.js","sourceRoot":"","sources":["../src/shims/web.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,aAAa;AACb,8DAA6C;AAC7C,oDAA8C;AAC9C,0DAAmD;AACnD,IAAA,mBAAQ,EAAC,IAAA,wBAAU,EAAC,EAAE,gBAAgB,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/shims/web.mjs b/node_modules/openai/shims/web.mjs new file mode 100644 index 0000000..aa1b96f --- /dev/null +++ b/node_modules/openai/shims/web.mjs @@ -0,0 +1,5 @@ +import * as types from "../_shims/web-types.mjs"; +import { setShims } from "../_shims/registry.mjs"; +import { getRuntime } from "../_shims/web-runtime.mjs"; +setShims(getRuntime({ manuallyImported: true })); +//# sourceMappingURL=web.mjs.map \ No newline at end of file diff --git a/node_modules/openai/shims/web.mjs.map b/node_modules/openai/shims/web.mjs.map new file mode 100644 index 0000000..0ccbb72 --- /dev/null +++ b/node_modules/openai/shims/web.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"web.mjs","sourceRoot":"","sources":["../src/shims/web.ts"],"names":[],"mappings":"OACO,KAAK,KAAK;OACV,EAAE,QAAQ,EAAE;OACZ,EAAE,UAAU,EAAE;AACrB,QAAQ,CAAC,UAAU,CAAC,EAAE,gBAAgB,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/src/_shims/MultipartBody.ts b/node_modules/openai/src/_shims/MultipartBody.ts new file mode 100644 index 0000000..af3b111 --- /dev/null +++ b/node_modules/openai/src/_shims/MultipartBody.ts @@ -0,0 +1,9 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export class MultipartBody { + constructor(public body: any) {} + get [Symbol.toStringTag](): string { + return 'MultipartBody'; + } +} diff --git a/node_modules/openai/src/_shims/README.md b/node_modules/openai/src/_shims/README.md new file mode 100644 index 0000000..b3a349b --- /dev/null +++ b/node_modules/openai/src/_shims/README.md @@ -0,0 +1,46 @@ +# 👋 Wondering what everything in here does? + +`openai` supports a wide variety of runtime environments like Node.js, Deno, Bun, browsers, and various +edge runtimes, as well as both CommonJS (CJS) and EcmaScript Modules (ESM). + +To do this, `openai` provides shims for either using `node-fetch` when in Node (because `fetch` is still experimental there) or the global `fetch` API built into the environment when not in Node. + +It uses [conditional exports](https://nodejs.org/api/packages.html#conditional-exports) to +automatically select the correct shims for each environment. However, conditional exports are a fairly new +feature and not supported everywhere. For instance, the TypeScript `"moduleResolution": "node"` + +setting doesn't consult the `exports` map, compared to `"moduleResolution": "nodeNext"`, which does. +Unfortunately that's still the default setting, and it can result in errors like +getting the wrong raw `Response` type from `.asResponse()`, for example. + +The user can work around these issues by manually importing one of: + +- `import 'openai/shims/node'` +- `import 'openai/shims/web'` + +All of the code here in `_shims` handles selecting the automatic default shims or manual overrides. + +### How it works - Runtime + +Runtime shims get installed by calling `setShims` exported by `openai/_shims/registry`. + +Manually importing `openai/shims/node` or `openai/shims/web`, calls `setShims` with the respective runtime shims. + +All client code imports shims from `openai/_shims/index`, which: + +- checks if shims have been set manually +- if not, calls `setShims` with the shims from `openai/_shims/auto/runtime` +- re-exports the installed shims from `openai/_shims/registry`. + +`openai/_shims/auto/runtime` exports web runtime shims. +If the `node` export condition is set, the export map replaces it with `openai/_shims/auto/runtime-node`. + +### How it works - Type time + +All client code imports shim types from `openai/_shims/index`, which selects the manual types from `openai/_shims/manual-types` if they have been declared, otherwise it exports the auto types from `openai/_shims/auto/types`. + +`openai/_shims/manual-types` exports an empty namespace. +Manually importing `openai/shims/node` or `openai/shims/web` merges declarations into this empty namespace, so they get picked up by `openai/_shims/index`. + +`openai/_shims/auto/types` exports web type definitions. +If the `node` export condition is set, the export map replaces it with `openai/_shims/auto/types-node`, though TS only picks this up if `"moduleResolution": "nodenext"` or `"moduleResolution": "bundler"`. diff --git a/node_modules/openai/src/_shims/auto/runtime-bun.ts b/node_modules/openai/src/_shims/auto/runtime-bun.ts new file mode 100644 index 0000000..e053254 --- /dev/null +++ b/node_modules/openai/src/_shims/auto/runtime-bun.ts @@ -0,0 +1,4 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export * from '../bun-runtime'; diff --git a/node_modules/openai/src/_shims/auto/runtime-node.ts b/node_modules/openai/src/_shims/auto/runtime-node.ts new file mode 100644 index 0000000..0ae2216 --- /dev/null +++ b/node_modules/openai/src/_shims/auto/runtime-node.ts @@ -0,0 +1,4 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export * from '../node-runtime'; diff --git a/node_modules/openai/src/_shims/auto/runtime.ts b/node_modules/openai/src/_shims/auto/runtime.ts new file mode 100644 index 0000000..62b7a39 --- /dev/null +++ b/node_modules/openai/src/_shims/auto/runtime.ts @@ -0,0 +1,4 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export * from '../web-runtime'; diff --git a/node_modules/openai/src/_shims/auto/types-node.ts b/node_modules/openai/src/_shims/auto/types-node.ts new file mode 100644 index 0000000..2625a8b --- /dev/null +++ b/node_modules/openai/src/_shims/auto/types-node.ts @@ -0,0 +1,4 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export * from '../node-types'; diff --git a/node_modules/openai/src/_shims/auto/types.d.ts b/node_modules/openai/src/_shims/auto/types.d.ts new file mode 100644 index 0000000..8fbedac --- /dev/null +++ b/node_modules/openai/src/_shims/auto/types.d.ts @@ -0,0 +1,101 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export type Agent = any; + +// @ts-ignore +declare const _fetch: typeof fetch; +export { _fetch as fetch }; + +// @ts-ignore +type _Request = Request; +export { _Request as Request }; + +// @ts-ignore +type _RequestInfo = RequestInfo; +export { type _RequestInfo as RequestInfo }; + +// @ts-ignore +type _RequestInit = RequestInit; +export { type _RequestInit as RequestInit }; + +// @ts-ignore +type _Response = Response; +export { _Response as Response }; + +// @ts-ignore +type _ResponseInit = ResponseInit; +export { type _ResponseInit as ResponseInit }; + +// @ts-ignore +type _ResponseType = ResponseType; +export { type _ResponseType as ResponseType }; + +// @ts-ignore +type _BodyInit = BodyInit; +export { type _BodyInit as BodyInit }; + +// @ts-ignore +type _Headers = Headers; +export { _Headers as Headers }; + +// @ts-ignore +type _HeadersInit = HeadersInit; +export { type _HeadersInit as HeadersInit }; + +type EndingType = 'native' | 'transparent'; + +export interface BlobPropertyBag { + endings?: EndingType; + type?: string; +} + +export interface FilePropertyBag extends BlobPropertyBag { + lastModified?: number; +} + +export type FileFromPathOptions = Omit; + +// @ts-ignore +type _FormData = FormData; +// @ts-ignore +declare const _FormData: typeof FormData; +export { _FormData as FormData }; + +// @ts-ignore +type _File = File; +// @ts-ignore +declare const _File: typeof File; +export { _File as File }; + +// @ts-ignore +type _Blob = Blob; +// @ts-ignore +declare const _Blob: typeof Blob; +export { _Blob as Blob }; + +export declare class Readable { + readable: boolean; + readonly readableEnded: boolean; + readonly readableFlowing: boolean | null; + readonly readableHighWaterMark: number; + readonly readableLength: number; + readonly readableObjectMode: boolean; + destroyed: boolean; + read(size?: number): any; + pause(): this; + resume(): this; + isPaused(): boolean; + destroy(error?: Error): this; + [Symbol.asyncIterator](): AsyncIterableIterator; +} + +export declare class FsReadStream extends Readable { + path: {}; // node type is string | Buffer +} + +// @ts-ignore +type _ReadableStream = ReadableStream; +// @ts-ignore +declare const _ReadableStream: typeof ReadableStream; +export { _ReadableStream as ReadableStream }; diff --git a/node_modules/openai/src/_shims/auto/types.js b/node_modules/openai/src/_shims/auto/types.js new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/src/_shims/auto/types.js @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/src/_shims/auto/types.mjs b/node_modules/openai/src/_shims/auto/types.mjs new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/src/_shims/auto/types.mjs @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/src/_shims/bun-runtime.ts b/node_modules/openai/src/_shims/bun-runtime.ts new file mode 100644 index 0000000..8d5aaab --- /dev/null +++ b/node_modules/openai/src/_shims/bun-runtime.ts @@ -0,0 +1,14 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import { type Shims } from './registry'; +import { getRuntime as getWebRuntime } from './web-runtime'; +import { ReadStream as FsReadStream } from 'node:fs'; + +export function getRuntime(): Shims { + const runtime = getWebRuntime(); + function isFsReadStream(value: any): value is FsReadStream { + return value instanceof FsReadStream; + } + return { ...runtime, isFsReadStream }; +} diff --git a/node_modules/openai/src/_shims/index.d.ts b/node_modules/openai/src/_shims/index.d.ts new file mode 100644 index 0000000..646e7fe --- /dev/null +++ b/node_modules/openai/src/_shims/index.d.ts @@ -0,0 +1,81 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import { manual } from './manual-types'; +import * as auto from "./auto/types"; +import { type RequestOptions } from '../core'; + +type SelectType = unknown extends Manual ? Auto : Manual; + +export const kind: string; + +// @ts-ignore +export type Agent = SelectType; + +// @ts-ignore +export const fetch: SelectType; + +// @ts-ignore +export type Request = SelectType; +// @ts-ignore +export type RequestInfo = SelectType; +// @ts-ignore +export type RequestInit = SelectType; + +// @ts-ignore +export type Response = SelectType; +// @ts-ignore +export type ResponseInit = SelectType; +// @ts-ignore +export type ResponseType = SelectType; +// @ts-ignore +export type BodyInit = SelectType; +// @ts-ignore +export type Headers = SelectType; +// @ts-ignore +export const Headers: SelectType; +// @ts-ignore +export type HeadersInit = SelectType; + +// @ts-ignore +export type BlobPropertyBag = SelectType; +// @ts-ignore +export type FilePropertyBag = SelectType; +// @ts-ignore +export type FileFromPathOptions = SelectType; +// @ts-ignore +export type FormData = SelectType; +// @ts-ignore +export const FormData: SelectType; +// @ts-ignore +export type File = SelectType; +// @ts-ignore +export const File: SelectType; +// @ts-ignore +export type Blob = SelectType; +// @ts-ignore +export const Blob: SelectType; + +// @ts-ignore +export type Readable = SelectType; +// @ts-ignore +export type FsReadStream = SelectType; +// @ts-ignore +export type ReadableStream = SelectType; +// @ts-ignore +export const ReadableStream: SelectType; + +export function getMultipartRequestOptions>( + form: FormData, + opts: RequestOptions, +): Promise>; + +export function getDefaultAgent(url: string): any; + +// @ts-ignore +export type FileFromPathOptions = SelectType; + +export function fileFromPath(path: string, options?: FileFromPathOptions): Promise; +export function fileFromPath(path: string, filename?: string, options?: FileFromPathOptions): Promise; + +export function isFsReadStream(value: any): value is FsReadStream; diff --git a/node_modules/openai/src/_shims/index.js b/node_modules/openai/src/_shims/index.js new file mode 100644 index 0000000..b5fc822 --- /dev/null +++ b/node_modules/openai/src/_shims/index.js @@ -0,0 +1,13 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +const shims = require('./registry'); +const auto = require('openai/_shims/auto/runtime'); +if (!shims.kind) shims.setShims(auto.getRuntime(), { auto: true }); +for (const property of Object.keys(shims)) { + Object.defineProperty(exports, property, { + get() { + return shims[property]; + }, + }); +} diff --git a/node_modules/openai/src/_shims/index.mjs b/node_modules/openai/src/_shims/index.mjs new file mode 100644 index 0000000..8d111aa --- /dev/null +++ b/node_modules/openai/src/_shims/index.mjs @@ -0,0 +1,7 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import * as shims from './registry.mjs'; +import * as auto from "./auto/runtime"; +if (!shims.kind) shims.setShims(auto.getRuntime(), { auto: true }); +export * from './registry.mjs'; diff --git a/node_modules/openai/src/_shims/manual-types.d.ts b/node_modules/openai/src/_shims/manual-types.d.ts new file mode 100644 index 0000000..3d00fc2 --- /dev/null +++ b/node_modules/openai/src/_shims/manual-types.d.ts @@ -0,0 +1,12 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +/** + * Types will get added to this namespace when you import one of the following: + * + * import 'openai/shims/node' + * import 'openai/shims/web' + * + * Importing more than one will cause type and runtime errors. + */ +export namespace manual {} diff --git a/node_modules/openai/src/_shims/manual-types.js b/node_modules/openai/src/_shims/manual-types.js new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/src/_shims/manual-types.js @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/src/_shims/manual-types.mjs b/node_modules/openai/src/_shims/manual-types.mjs new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/src/_shims/manual-types.mjs @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/src/_shims/node-runtime.ts b/node_modules/openai/src/_shims/node-runtime.ts new file mode 100644 index 0000000..ab9f2ab --- /dev/null +++ b/node_modules/openai/src/_shims/node-runtime.ts @@ -0,0 +1,81 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import * as nf from 'node-fetch'; +import * as fd from 'formdata-node'; +import { type File, type FilePropertyBag } from 'formdata-node'; +import KeepAliveAgent from 'agentkeepalive'; +import { AbortController as AbortControllerPolyfill } from 'abort-controller'; +import { ReadStream as FsReadStream } from 'node:fs'; +import { type Agent } from 'node:http'; +import { FormDataEncoder } from 'form-data-encoder'; +import { Readable } from 'node:stream'; +import { type RequestOptions } from '../core'; +import { MultipartBody } from './MultipartBody'; +import { type Shims } from './registry'; +import { ReadableStream } from 'node:stream/web'; + +type FileFromPathOptions = Omit; + +let fileFromPathWarned = false; + +/** + * @deprecated use fs.createReadStream('./my/file.txt') instead + */ +async function fileFromPath(path: string): Promise; +async function fileFromPath(path: string, filename?: string): Promise; +async function fileFromPath(path: string, options?: FileFromPathOptions): Promise; +async function fileFromPath(path: string, filename?: string, options?: FileFromPathOptions): Promise; +async function fileFromPath(path: string, ...args: any[]): Promise { + // this import fails in environments that don't handle export maps correctly, like old versions of Jest + const { fileFromPath: _fileFromPath } = await import('formdata-node/file-from-path'); + + if (!fileFromPathWarned) { + console.warn(`fileFromPath is deprecated; use fs.createReadStream(${JSON.stringify(path)}) instead`); + fileFromPathWarned = true; + } + // @ts-ignore + return await _fileFromPath(path, ...args); +} + +const defaultHttpAgent: Agent = new KeepAliveAgent({ keepAlive: true, timeout: 5 * 60 * 1000 }); +const defaultHttpsAgent: Agent = new KeepAliveAgent.HttpsAgent({ keepAlive: true, timeout: 5 * 60 * 1000 }); + +async function getMultipartRequestOptions>( + form: fd.FormData, + opts: RequestOptions, +): Promise> { + const encoder = new FormDataEncoder(form); + const readable = Readable.from(encoder); + const body = new MultipartBody(readable); + const headers = { + ...opts.headers, + ...encoder.headers, + 'Content-Length': encoder.contentLength, + }; + + return { ...opts, body: body as any, headers }; +} + +export function getRuntime(): Shims { + // Polyfill global object if needed. + if (typeof AbortController === 'undefined') { + // @ts-expect-error (the types are subtly different, but compatible in practice) + globalThis.AbortController = AbortControllerPolyfill; + } + return { + kind: 'node', + fetch: nf.default, + Request: nf.Request, + Response: nf.Response, + Headers: nf.Headers, + FormData: fd.FormData, + Blob: fd.Blob, + File: fd.File, + ReadableStream, + getMultipartRequestOptions, + getDefaultAgent: (url: string): Agent => (url.startsWith('https') ? defaultHttpsAgent : defaultHttpAgent), + fileFromPath, + isFsReadStream: (value: any): value is FsReadStream => value instanceof FsReadStream, + }; +} diff --git a/node_modules/openai/src/_shims/node-types.d.ts b/node_modules/openai/src/_shims/node-types.d.ts new file mode 100644 index 0000000..c159e5f --- /dev/null +++ b/node_modules/openai/src/_shims/node-types.d.ts @@ -0,0 +1,42 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import * as nf from 'node-fetch'; +import * as fd from 'formdata-node'; + +export { type Agent } from 'node:http'; +export { type Readable } from 'node:stream'; +export { type ReadStream as FsReadStream } from 'node:fs'; +export { ReadableStream } from 'node:stream/web'; + +export const fetch: typeof nf.default; + +export type Request = nf.Request; +export type RequestInfo = nf.RequestInfo; +export type RequestInit = nf.RequestInit; + +export type Response = nf.Response; +export type ResponseInit = nf.ResponseInit; +export type ResponseType = nf.ResponseType; +export type BodyInit = nf.BodyInit; +export type Headers = nf.Headers; +export type HeadersInit = nf.HeadersInit; + +type EndingType = 'native' | 'transparent'; +export interface BlobPropertyBag { + endings?: EndingType; + type?: string; +} + +export interface FilePropertyBag extends BlobPropertyBag { + lastModified?: number; +} + +export type FileFromPathOptions = Omit; + +export type FormData = fd.FormData; +export const FormData: typeof fd.FormData; +export type File = fd.File; +export const File: typeof fd.File; +export type Blob = fd.Blob; +export const Blob: typeof fd.Blob; diff --git a/node_modules/openai/src/_shims/node-types.js b/node_modules/openai/src/_shims/node-types.js new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/src/_shims/node-types.js @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/src/_shims/node-types.mjs b/node_modules/openai/src/_shims/node-types.mjs new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/src/_shims/node-types.mjs @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/src/_shims/registry.ts b/node_modules/openai/src/_shims/registry.ts new file mode 100644 index 0000000..1fa3964 --- /dev/null +++ b/node_modules/openai/src/_shims/registry.ts @@ -0,0 +1,65 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import { type RequestOptions } from '../core'; + +export interface Shims { + kind: string; + fetch: any; + Request: any; + Response: any; + Headers: any; + FormData: any; + Blob: any; + File: any; + ReadableStream: any; + getMultipartRequestOptions: >( + form: Shims['FormData'], + opts: RequestOptions, + ) => Promise>; + getDefaultAgent: (url: string) => any; + fileFromPath: + | ((path: string, filename?: string, options?: {}) => Promise) + | ((path: string, options?: {}) => Promise); + isFsReadStream: (value: any) => boolean; +} + +export let auto = false; +export let kind: Shims['kind'] | undefined = undefined; +export let fetch: Shims['fetch'] | undefined = undefined; +export let Request: Shims['Request'] | undefined = undefined; +export let Response: Shims['Response'] | undefined = undefined; +export let Headers: Shims['Headers'] | undefined = undefined; +export let FormData: Shims['FormData'] | undefined = undefined; +export let Blob: Shims['Blob'] | undefined = undefined; +export let File: Shims['File'] | undefined = undefined; +export let ReadableStream: Shims['ReadableStream'] | undefined = undefined; +export let getMultipartRequestOptions: Shims['getMultipartRequestOptions'] | undefined = undefined; +export let getDefaultAgent: Shims['getDefaultAgent'] | undefined = undefined; +export let fileFromPath: Shims['fileFromPath'] | undefined = undefined; +export let isFsReadStream: Shims['isFsReadStream'] | undefined = undefined; + +export function setShims(shims: Shims, options: { auto: boolean } = { auto: false }) { + if (auto) { + throw new Error( + `you must \`import 'openai/shims/${shims.kind}'\` before importing anything else from openai`, + ); + } + if (kind) { + throw new Error(`can't \`import 'openai/shims/${shims.kind}'\` after \`import 'openai/shims/${kind}'\``); + } + auto = options.auto; + kind = shims.kind; + fetch = shims.fetch; + Request = shims.Request; + Response = shims.Response; + Headers = shims.Headers; + FormData = shims.FormData; + Blob = shims.Blob; + File = shims.File; + ReadableStream = shims.ReadableStream; + getMultipartRequestOptions = shims.getMultipartRequestOptions; + getDefaultAgent = shims.getDefaultAgent; + fileFromPath = shims.fileFromPath; + isFsReadStream = shims.isFsReadStream; +} diff --git a/node_modules/openai/src/_shims/web-runtime.ts b/node_modules/openai/src/_shims/web-runtime.ts new file mode 100644 index 0000000..a5e90bc --- /dev/null +++ b/node_modules/openai/src/_shims/web-runtime.ts @@ -0,0 +1,103 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +import { MultipartBody } from './MultipartBody'; +import { type RequestOptions } from '../core'; +import { type Shims } from './registry'; + +export function getRuntime({ manuallyImported }: { manuallyImported?: boolean } = {}): Shims { + const recommendation = + manuallyImported ? + `You may need to use polyfills` + : `Add one of these imports before your first \`import … from 'openai'\`: +- \`import 'openai/shims/node'\` (if you're running on Node) +- \`import 'openai/shims/web'\` (otherwise) +`; + + let _fetch, _Request, _Response, _Headers; + try { + // @ts-ignore + _fetch = fetch; + // @ts-ignore + _Request = Request; + // @ts-ignore + _Response = Response; + // @ts-ignore + _Headers = Headers; + } catch (error) { + throw new Error( + `this environment is missing the following Web Fetch API type: ${ + (error as any).message + }. ${recommendation}`, + ); + } + + return { + kind: 'web', + fetch: _fetch, + Request: _Request, + Response: _Response, + Headers: _Headers, + FormData: + // @ts-ignore + typeof FormData !== 'undefined' ? FormData : ( + class FormData { + // @ts-ignore + constructor() { + throw new Error( + `file uploads aren't supported in this environment yet as 'FormData' is undefined. ${recommendation}`, + ); + } + } + ), + Blob: + typeof Blob !== 'undefined' ? Blob : ( + class Blob { + constructor() { + throw new Error( + `file uploads aren't supported in this environment yet as 'Blob' is undefined. ${recommendation}`, + ); + } + } + ), + File: + // @ts-ignore + typeof File !== 'undefined' ? File : ( + class File { + // @ts-ignore + constructor() { + throw new Error( + `file uploads aren't supported in this environment yet as 'File' is undefined. ${recommendation}`, + ); + } + } + ), + ReadableStream: + // @ts-ignore + typeof ReadableStream !== 'undefined' ? ReadableStream : ( + class ReadableStream { + // @ts-ignore + constructor() { + throw new Error( + `streaming isn't supported in this environment yet as 'ReadableStream' is undefined. ${recommendation}`, + ); + } + } + ), + getMultipartRequestOptions: async >( + // @ts-ignore + form: FormData, + opts: RequestOptions, + ): Promise> => ({ + ...opts, + body: new MultipartBody(form) as any, + }), + getDefaultAgent: (url: string) => undefined, + fileFromPath: () => { + throw new Error( + 'The `fileFromPath` function is only supported in Node. See the README for more details: https://www.github.com/openai/openai-node#file-uploads', + ); + }, + isFsReadStream: (value: any) => false, + }; +} diff --git a/node_modules/openai/src/_shims/web-types.d.ts b/node_modules/openai/src/_shims/web-types.d.ts new file mode 100644 index 0000000..4ff3513 --- /dev/null +++ b/node_modules/openai/src/_shims/web-types.d.ts @@ -0,0 +1,83 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +export type Agent = any; + +declare const _fetch: typeof fetch; +export { _fetch as fetch }; + +type _Request = Request; +export { _Request as Request }; + +type _RequestInfo = RequestInfo; +export { type _RequestInfo as RequestInfo }; + +type _RequestInit = RequestInit; +export { type _RequestInit as RequestInit }; + +type _Response = Response; +export { _Response as Response }; + +type _ResponseInit = ResponseInit; +export { type _ResponseInit as ResponseInit }; + +type _ResponseType = ResponseType; +export { type _ResponseType as ResponseType }; + +type _BodyInit = BodyInit; +export { type _BodyInit as BodyInit }; + +type _Headers = Headers; +export { _Headers as Headers }; + +type _HeadersInit = HeadersInit; +export { type _HeadersInit as HeadersInit }; + +type EndingType = 'native' | 'transparent'; + +export interface BlobPropertyBag { + endings?: EndingType; + type?: string; +} + +export interface FilePropertyBag extends BlobPropertyBag { + lastModified?: number; +} + +export type FileFromPathOptions = Omit; + +type _FormData = FormData; +declare const _FormData: typeof FormData; +export { _FormData as FormData }; + +type _File = File; +declare const _File: typeof File; +export { _File as File }; + +type _Blob = Blob; +declare const _Blob: typeof Blob; +export { _Blob as Blob }; + +export declare class Readable { + readable: boolean; + readonly readableEnded: boolean; + readonly readableFlowing: boolean | null; + readonly readableHighWaterMark: number; + readonly readableLength: number; + readonly readableObjectMode: boolean; + destroyed: boolean; + read(size?: number): any; + pause(): this; + resume(): this; + isPaused(): boolean; + destroy(error?: Error): this; + [Symbol.asyncIterator](): AsyncIterableIterator; +} + +export declare class FsReadStream extends Readable { + path: {}; // node type is string | Buffer +} + +type _ReadableStream = ReadableStream; +declare const _ReadableStream: typeof ReadableStream; +export { _ReadableStream as ReadableStream }; diff --git a/node_modules/openai/src/_shims/web-types.js b/node_modules/openai/src/_shims/web-types.js new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/src/_shims/web-types.js @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/src/_shims/web-types.mjs b/node_modules/openai/src/_shims/web-types.mjs new file mode 100644 index 0000000..ddbdb79 --- /dev/null +++ b/node_modules/openai/src/_shims/web-types.mjs @@ -0,0 +1,3 @@ +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ diff --git a/node_modules/openai/src/_vendor/partial-json-parser/README.md b/node_modules/openai/src/_vendor/partial-json-parser/README.md new file mode 100644 index 0000000..d4e1c85 --- /dev/null +++ b/node_modules/openai/src/_vendor/partial-json-parser/README.md @@ -0,0 +1,3 @@ +# Partial JSON Parser + +Vendored from https://www.npmjs.com/package/partial-json with some modifications diff --git a/node_modules/openai/src/_vendor/partial-json-parser/parser.ts b/node_modules/openai/src/_vendor/partial-json-parser/parser.ts new file mode 100644 index 0000000..5ee62b7 --- /dev/null +++ b/node_modules/openai/src/_vendor/partial-json-parser/parser.ts @@ -0,0 +1,247 @@ +const STR = 0b000000001; +const NUM = 0b000000010; +const ARR = 0b000000100; +const OBJ = 0b000001000; +const NULL = 0b000010000; +const BOOL = 0b000100000; +const NAN = 0b001000000; +const INFINITY = 0b010000000; +const MINUS_INFINITY = 0b100000000; + +const INF = INFINITY | MINUS_INFINITY; +const SPECIAL = NULL | BOOL | INF | NAN; +const ATOM = STR | NUM | SPECIAL; +const COLLECTION = ARR | OBJ; +const ALL = ATOM | COLLECTION; + +const Allow = { + STR, + NUM, + ARR, + OBJ, + NULL, + BOOL, + NAN, + INFINITY, + MINUS_INFINITY, + INF, + SPECIAL, + ATOM, + COLLECTION, + ALL, +}; + +// The JSON string segment was unable to be parsed completely +class PartialJSON extends Error {} + +class MalformedJSON extends Error {} + +/** + * Parse incomplete JSON + * @param {string} jsonString Partial JSON to be parsed + * @param {number} allowPartial Specify what types are allowed to be partial, see {@link Allow} for details + * @returns The parsed JSON + * @throws {PartialJSON} If the JSON is incomplete (related to the `allow` parameter) + * @throws {MalformedJSON} If the JSON is malformed + */ +function parseJSON(jsonString: string, allowPartial: number = Allow.ALL): any { + if (typeof jsonString !== 'string') { + throw new TypeError(`expecting str, got ${typeof jsonString}`); + } + if (!jsonString.trim()) { + throw new Error(`${jsonString} is empty`); + } + return _parseJSON(jsonString.trim(), allowPartial); +} + +const _parseJSON = (jsonString: string, allow: number) => { + const length = jsonString.length; + let index = 0; + + const markPartialJSON = (msg: string) => { + throw new PartialJSON(`${msg} at position ${index}`); + }; + + const throwMalformedError = (msg: string) => { + throw new MalformedJSON(`${msg} at position ${index}`); + }; + + const parseAny: () => any = () => { + skipBlank(); + if (index >= length) markPartialJSON('Unexpected end of input'); + if (jsonString[index] === '"') return parseStr(); + if (jsonString[index] === '{') return parseObj(); + if (jsonString[index] === '[') return parseArr(); + if ( + jsonString.substring(index, index + 4) === 'null' || + (Allow.NULL & allow && length - index < 4 && 'null'.startsWith(jsonString.substring(index))) + ) { + index += 4; + return null; + } + if ( + jsonString.substring(index, index + 4) === 'true' || + (Allow.BOOL & allow && length - index < 4 && 'true'.startsWith(jsonString.substring(index))) + ) { + index += 4; + return true; + } + if ( + jsonString.substring(index, index + 5) === 'false' || + (Allow.BOOL & allow && length - index < 5 && 'false'.startsWith(jsonString.substring(index))) + ) { + index += 5; + return false; + } + if ( + jsonString.substring(index, index + 8) === 'Infinity' || + (Allow.INFINITY & allow && length - index < 8 && 'Infinity'.startsWith(jsonString.substring(index))) + ) { + index += 8; + return Infinity; + } + if ( + jsonString.substring(index, index + 9) === '-Infinity' || + (Allow.MINUS_INFINITY & allow && + 1 < length - index && + length - index < 9 && + '-Infinity'.startsWith(jsonString.substring(index))) + ) { + index += 9; + return -Infinity; + } + if ( + jsonString.substring(index, index + 3) === 'NaN' || + (Allow.NAN & allow && length - index < 3 && 'NaN'.startsWith(jsonString.substring(index))) + ) { + index += 3; + return NaN; + } + return parseNum(); + }; + + const parseStr: () => string = () => { + const start = index; + let escape = false; + index++; // skip initial quote + while (index < length && (jsonString[index] !== '"' || (escape && jsonString[index - 1] === '\\'))) { + escape = jsonString[index] === '\\' ? !escape : false; + index++; + } + if (jsonString.charAt(index) == '"') { + try { + return JSON.parse(jsonString.substring(start, ++index - Number(escape))); + } catch (e) { + throwMalformedError(String(e)); + } + } else if (Allow.STR & allow) { + try { + return JSON.parse(jsonString.substring(start, index - Number(escape)) + '"'); + } catch (e) { + // SyntaxError: Invalid escape sequence + return JSON.parse(jsonString.substring(start, jsonString.lastIndexOf('\\')) + '"'); + } + } + markPartialJSON('Unterminated string literal'); + }; + + const parseObj = () => { + index++; // skip initial brace + skipBlank(); + const obj: Record = {}; + try { + while (jsonString[index] !== '}') { + skipBlank(); + if (index >= length && Allow.OBJ & allow) return obj; + const key = parseStr(); + skipBlank(); + index++; // skip colon + try { + const value = parseAny(); + Object.defineProperty(obj, key, { value, writable: true, enumerable: true, configurable: true }); + } catch (e) { + if (Allow.OBJ & allow) return obj; + else throw e; + } + skipBlank(); + if (jsonString[index] === ',') index++; // skip comma + } + } catch (e) { + if (Allow.OBJ & allow) return obj; + else markPartialJSON("Expected '}' at end of object"); + } + index++; // skip final brace + return obj; + }; + + const parseArr = () => { + index++; // skip initial bracket + const arr = []; + try { + while (jsonString[index] !== ']') { + arr.push(parseAny()); + skipBlank(); + if (jsonString[index] === ',') { + index++; // skip comma + } + } + } catch (e) { + if (Allow.ARR & allow) { + return arr; + } + markPartialJSON("Expected ']' at end of array"); + } + index++; // skip final bracket + return arr; + }; + + const parseNum = () => { + if (index === 0) { + if (jsonString === '-' && Allow.NUM & allow) markPartialJSON("Not sure what '-' is"); + try { + return JSON.parse(jsonString); + } catch (e) { + if (Allow.NUM & allow) { + try { + if ('.' === jsonString[jsonString.length - 1]) + return JSON.parse(jsonString.substring(0, jsonString.lastIndexOf('.'))); + return JSON.parse(jsonString.substring(0, jsonString.lastIndexOf('e'))); + } catch (e) {} + } + throwMalformedError(String(e)); + } + } + + const start = index; + + if (jsonString[index] === '-') index++; + while (jsonString[index] && !',]}'.includes(jsonString[index]!)) index++; + + if (index == length && !(Allow.NUM & allow)) markPartialJSON('Unterminated number literal'); + + try { + return JSON.parse(jsonString.substring(start, index)); + } catch (e) { + if (jsonString.substring(start, index) === '-' && Allow.NUM & allow) + markPartialJSON("Not sure what '-' is"); + try { + return JSON.parse(jsonString.substring(start, jsonString.lastIndexOf('e'))); + } catch (e) { + throwMalformedError(String(e)); + } + } + }; + + const skipBlank = () => { + while (index < length && ' \n\r\t'.includes(jsonString[index]!)) { + index++; + } + }; + + return parseAny(); +}; + +// using this function with malformed JSON is undefined behavior +const partialParse = (input: string) => parseJSON(input, Allow.ALL ^ Allow.NUM); + +export { partialParse, PartialJSON, MalformedJSON }; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/LICENSE b/node_modules/openai/src/_vendor/zod-to-json-schema/LICENSE new file mode 100644 index 0000000..a4690a1 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/LICENSE @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2020, Stefan Terdell + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/Options.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/Options.ts new file mode 100644 index 0000000..a9abfc0 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/Options.ts @@ -0,0 +1,80 @@ +import { ZodSchema, ZodTypeDef } from 'zod'; +import { Refs, Seen } from './Refs'; +import { JsonSchema7Type } from './parseDef'; + +export type Targets = 'jsonSchema7' | 'jsonSchema2019-09' | 'openApi3'; + +export type DateStrategy = 'format:date-time' | 'format:date' | 'string' | 'integer'; + +export const ignoreOverride = Symbol('Let zodToJsonSchema decide on which parser to use'); + +export type Options = { + name: string | undefined; + $refStrategy: 'root' | 'relative' | 'none' | 'seen' | 'extract-to-root'; + basePath: string[]; + effectStrategy: 'input' | 'any'; + pipeStrategy: 'input' | 'output' | 'all'; + dateStrategy: DateStrategy | DateStrategy[]; + mapStrategy: 'entries' | 'record'; + removeAdditionalStrategy: 'passthrough' | 'strict'; + nullableStrategy: 'from-target' | 'property'; + target: Target; + strictUnions: boolean; + definitionPath: string; + definitions: Record; + errorMessages: boolean; + markdownDescription: boolean; + patternStrategy: 'escape' | 'preserve'; + applyRegexFlags: boolean; + emailStrategy: 'format:email' | 'format:idn-email' | 'pattern:zod'; + base64Strategy: 'format:binary' | 'contentEncoding:base64' | 'pattern:zod'; + nameStrategy: 'ref' | 'duplicate-ref' | 'title'; + override?: ( + def: ZodTypeDef, + refs: Refs, + seen: Seen | undefined, + forceResolution?: boolean, + ) => JsonSchema7Type | undefined | typeof ignoreOverride; + openaiStrictMode?: boolean; +}; + +const defaultOptions: Omit = { + name: undefined, + $refStrategy: 'root', + effectStrategy: 'input', + pipeStrategy: 'all', + dateStrategy: 'format:date-time', + mapStrategy: 'entries', + nullableStrategy: 'from-target', + removeAdditionalStrategy: 'passthrough', + definitionPath: 'definitions', + target: 'jsonSchema7', + strictUnions: false, + errorMessages: false, + markdownDescription: false, + patternStrategy: 'escape', + applyRegexFlags: false, + emailStrategy: 'format:email', + base64Strategy: 'contentEncoding:base64', + nameStrategy: 'ref', +}; + +export const getDefaultOptions = ( + options: Partial> | string | undefined, +) => { + // We need to add `definitions` here as we may mutate it + return ( + typeof options === 'string' ? + { + ...defaultOptions, + basePath: ['#'], + definitions: {}, + name: options, + } + : { + ...defaultOptions, + basePath: ['#'], + definitions: {}, + ...options, + }) as Options; +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/README.md b/node_modules/openai/src/_vendor/zod-to-json-schema/README.md new file mode 100644 index 0000000..ffb3512 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/README.md @@ -0,0 +1,3 @@ +# Zod to Json Schema + +Vendored version of https://github.com/StefanTerdell/zod-to-json-schema that has been updated to generate JSON Schemas that are compatible with OpenAI's [strict mode](https://platform.openai.com/docs/guides/structured-outputs/supported-schemas) diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/Refs.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/Refs.ts new file mode 100644 index 0000000..ea63c07 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/Refs.ts @@ -0,0 +1,47 @@ +import type { ZodTypeDef } from 'zod'; +import { getDefaultOptions, Options, Targets } from './Options'; +import { JsonSchema7Type } from './parseDef'; +import { zodDef } from './util'; + +export type Refs = { + seen: Map; + /** + * Set of all the `$ref`s we created, e.g. `Set(['#/$defs/ui'])` + * this notable does not include any `definitions` that were + * explicitly given as an option. + */ + seenRefs: Set; + currentPath: string[]; + propertyPath: string[] | undefined; +} & Options; + +export type Seen = { + def: ZodTypeDef; + path: string[]; + jsonSchema: JsonSchema7Type | undefined; +}; + +export const getRefs = (options?: string | Partial>): Refs => { + const _options = getDefaultOptions(options); + const currentPath = + _options.name !== undefined ? + [..._options.basePath, _options.definitionPath, _options.name] + : _options.basePath; + return { + ..._options, + currentPath: currentPath, + propertyPath: undefined, + seenRefs: new Set(), + seen: new Map( + Object.entries(_options.definitions).map(([name, def]) => [ + zodDef(def), + { + def: zodDef(def), + path: [..._options.basePath, _options.definitionPath, name], + // Resolution of references will be forced even though seen, so it's ok that the schema is undefined here for now. + jsonSchema: undefined, + }, + ]), + ), + }; +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/errorMessages.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/errorMessages.ts new file mode 100644 index 0000000..ceb0e8b --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/errorMessages.ts @@ -0,0 +1,31 @@ +import { JsonSchema7TypeUnion } from './parseDef'; +import { Refs } from './Refs'; + +export type ErrorMessages = Partial< + Omit<{ [key in keyof T]: string }, OmitProperties | 'type' | 'errorMessages'> +>; + +export function addErrorMessage }>( + res: T, + key: keyof T, + errorMessage: string | undefined, + refs: Refs, +) { + if (!refs?.errorMessages) return; + if (errorMessage) { + res.errorMessage = { + ...res.errorMessage, + [key]: errorMessage, + }; + } +} + +export function setResponseValueAndErrors< + Json7Type extends JsonSchema7TypeUnion & { + errorMessage?: ErrorMessages; + }, + Key extends keyof Omit, +>(res: Json7Type, key: Key, value: Json7Type[Key], errorMessage: string | undefined, refs: Refs) { + res[key] = value; + addErrorMessage(res, key, errorMessage, refs); +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/index.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/index.ts new file mode 100644 index 0000000..5808bc2 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/index.ts @@ -0,0 +1,37 @@ +export * from './Options'; +export * from './Refs'; +export * from './errorMessages'; +export * from './parseDef'; +export * from './parsers/any'; +export * from './parsers/array'; +export * from './parsers/bigint'; +export * from './parsers/boolean'; +export * from './parsers/branded'; +export * from './parsers/catch'; +export * from './parsers/date'; +export * from './parsers/default'; +export * from './parsers/effects'; +export * from './parsers/enum'; +export * from './parsers/intersection'; +export * from './parsers/literal'; +export * from './parsers/map'; +export * from './parsers/nativeEnum'; +export * from './parsers/never'; +export * from './parsers/null'; +export * from './parsers/nullable'; +export * from './parsers/number'; +export * from './parsers/object'; +export * from './parsers/optional'; +export * from './parsers/pipeline'; +export * from './parsers/promise'; +export * from './parsers/readonly'; +export * from './parsers/record'; +export * from './parsers/set'; +export * from './parsers/string'; +export * from './parsers/tuple'; +export * from './parsers/undefined'; +export * from './parsers/union'; +export * from './parsers/unknown'; +export * from './zodToJsonSchema'; +import { zodToJsonSchema } from './zodToJsonSchema'; +export default zodToJsonSchema; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parseDef.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parseDef.ts new file mode 100644 index 0000000..8af5ce4 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parseDef.ts @@ -0,0 +1,258 @@ +import { ZodFirstPartyTypeKind, ZodTypeDef } from 'zod'; +import { JsonSchema7AnyType, parseAnyDef } from './parsers/any'; +import { JsonSchema7ArrayType, parseArrayDef } from './parsers/array'; +import { JsonSchema7BigintType, parseBigintDef } from './parsers/bigint'; +import { JsonSchema7BooleanType, parseBooleanDef } from './parsers/boolean'; +import { parseBrandedDef } from './parsers/branded'; +import { parseCatchDef } from './parsers/catch'; +import { JsonSchema7DateType, parseDateDef } from './parsers/date'; +import { parseDefaultDef } from './parsers/default'; +import { parseEffectsDef } from './parsers/effects'; +import { JsonSchema7EnumType, parseEnumDef } from './parsers/enum'; +import { JsonSchema7AllOfType, parseIntersectionDef } from './parsers/intersection'; +import { JsonSchema7LiteralType, parseLiteralDef } from './parsers/literal'; +import { JsonSchema7MapType, parseMapDef } from './parsers/map'; +import { JsonSchema7NativeEnumType, parseNativeEnumDef } from './parsers/nativeEnum'; +import { JsonSchema7NeverType, parseNeverDef } from './parsers/never'; +import { JsonSchema7NullType, parseNullDef } from './parsers/null'; +import { JsonSchema7NullableType, parseNullableDef } from './parsers/nullable'; +import { JsonSchema7NumberType, parseNumberDef } from './parsers/number'; +import { JsonSchema7ObjectType, parseObjectDef } from './parsers/object'; +import { parseOptionalDef } from './parsers/optional'; +import { parsePipelineDef } from './parsers/pipeline'; +import { parsePromiseDef } from './parsers/promise'; +import { JsonSchema7RecordType, parseRecordDef } from './parsers/record'; +import { JsonSchema7SetType, parseSetDef } from './parsers/set'; +import { JsonSchema7StringType, parseStringDef } from './parsers/string'; +import { JsonSchema7TupleType, parseTupleDef } from './parsers/tuple'; +import { JsonSchema7UndefinedType, parseUndefinedDef } from './parsers/undefined'; +import { JsonSchema7UnionType, parseUnionDef } from './parsers/union'; +import { JsonSchema7UnknownType, parseUnknownDef } from './parsers/unknown'; +import { Refs, Seen } from './Refs'; +import { parseReadonlyDef } from './parsers/readonly'; +import { ignoreOverride } from './Options'; + +type JsonSchema7RefType = { $ref: string }; +type JsonSchema7Meta = { + title?: string; + default?: any; + description?: string; + markdownDescription?: string; +}; + +export type JsonSchema7TypeUnion = + | JsonSchema7StringType + | JsonSchema7ArrayType + | JsonSchema7NumberType + | JsonSchema7BigintType + | JsonSchema7BooleanType + | JsonSchema7DateType + | JsonSchema7EnumType + | JsonSchema7LiteralType + | JsonSchema7NativeEnumType + | JsonSchema7NullType + | JsonSchema7NumberType + | JsonSchema7ObjectType + | JsonSchema7RecordType + | JsonSchema7TupleType + | JsonSchema7UnionType + | JsonSchema7UndefinedType + | JsonSchema7RefType + | JsonSchema7NeverType + | JsonSchema7MapType + | JsonSchema7AnyType + | JsonSchema7NullableType + | JsonSchema7AllOfType + | JsonSchema7UnknownType + | JsonSchema7SetType; + +export type JsonSchema7Type = JsonSchema7TypeUnion & JsonSchema7Meta; + +export function parseDef( + def: ZodTypeDef, + refs: Refs, + forceResolution = false, // Forces a new schema to be instantiated even though its def has been seen. Used for improving refs in definitions. See https://github.com/StefanTerdell/zod-to-json-schema/pull/61. +): JsonSchema7Type | undefined { + const seenItem = refs.seen.get(def); + + if (refs.override) { + const overrideResult = refs.override?.(def, refs, seenItem, forceResolution); + + if (overrideResult !== ignoreOverride) { + return overrideResult; + } + } + + if (seenItem && !forceResolution) { + const seenSchema = get$ref(seenItem, refs); + + if (seenSchema !== undefined) { + if ('$ref' in seenSchema) { + refs.seenRefs.add(seenSchema.$ref); + } + + return seenSchema; + } + } + + const newItem: Seen = { def, path: refs.currentPath, jsonSchema: undefined }; + + refs.seen.set(def, newItem); + + const jsonSchema = selectParser(def, (def as any).typeName, refs, forceResolution); + + if (jsonSchema) { + addMeta(def, refs, jsonSchema); + } + + newItem.jsonSchema = jsonSchema; + + return jsonSchema; +} + +const get$ref = ( + item: Seen, + refs: Refs, +): + | { + $ref: string; + } + | {} + | undefined => { + switch (refs.$refStrategy) { + case 'root': + return { $ref: item.path.join('/') }; + // this case is needed as OpenAI strict mode doesn't support top-level `$ref`s, i.e. + // the top-level schema *must* be `{"type": "object", "properties": {...}}` but if we ever + // need to define a `$ref`, relative `$ref`s aren't supported, so we need to extract + // the schema to `#/definitions/` and reference that. + // + // e.g. if we need to reference a schema at + // `["#","definitions","contactPerson","properties","person1","properties","name"]` + // then we'll extract it out to `contactPerson_properties_person1_properties_name` + case 'extract-to-root': + const name = item.path.slice(refs.basePath.length + 1).join('_'); + + // we don't need to extract the root schema in this case, as it's already + // been added to the definitions + if (name !== refs.name && refs.nameStrategy === 'duplicate-ref') { + refs.definitions[name] = item.def; + } + + return { $ref: [...refs.basePath, refs.definitionPath, name].join('/') }; + case 'relative': + return { $ref: getRelativePath(refs.currentPath, item.path) }; + case 'none': + case 'seen': { + if ( + item.path.length < refs.currentPath.length && + item.path.every((value, index) => refs.currentPath[index] === value) + ) { + console.warn(`Recursive reference detected at ${refs.currentPath.join('/')}! Defaulting to any`); + + return {}; + } + + return refs.$refStrategy === 'seen' ? {} : undefined; + } + } +}; + +const getRelativePath = (pathA: string[], pathB: string[]) => { + let i = 0; + for (; i < pathA.length && i < pathB.length; i++) { + if (pathA[i] !== pathB[i]) break; + } + return [(pathA.length - i).toString(), ...pathB.slice(i)].join('/'); +}; + +const selectParser = ( + def: any, + typeName: ZodFirstPartyTypeKind, + refs: Refs, + forceResolution: boolean, +): JsonSchema7Type | undefined => { + switch (typeName) { + case ZodFirstPartyTypeKind.ZodString: + return parseStringDef(def, refs); + case ZodFirstPartyTypeKind.ZodNumber: + return parseNumberDef(def, refs); + case ZodFirstPartyTypeKind.ZodObject: + return parseObjectDef(def, refs); + case ZodFirstPartyTypeKind.ZodBigInt: + return parseBigintDef(def, refs); + case ZodFirstPartyTypeKind.ZodBoolean: + return parseBooleanDef(); + case ZodFirstPartyTypeKind.ZodDate: + return parseDateDef(def, refs); + case ZodFirstPartyTypeKind.ZodUndefined: + return parseUndefinedDef(); + case ZodFirstPartyTypeKind.ZodNull: + return parseNullDef(refs); + case ZodFirstPartyTypeKind.ZodArray: + return parseArrayDef(def, refs); + case ZodFirstPartyTypeKind.ZodUnion: + case ZodFirstPartyTypeKind.ZodDiscriminatedUnion: + return parseUnionDef(def, refs); + case ZodFirstPartyTypeKind.ZodIntersection: + return parseIntersectionDef(def, refs); + case ZodFirstPartyTypeKind.ZodTuple: + return parseTupleDef(def, refs); + case ZodFirstPartyTypeKind.ZodRecord: + return parseRecordDef(def, refs); + case ZodFirstPartyTypeKind.ZodLiteral: + return parseLiteralDef(def, refs); + case ZodFirstPartyTypeKind.ZodEnum: + return parseEnumDef(def); + case ZodFirstPartyTypeKind.ZodNativeEnum: + return parseNativeEnumDef(def); + case ZodFirstPartyTypeKind.ZodNullable: + return parseNullableDef(def, refs); + case ZodFirstPartyTypeKind.ZodOptional: + return parseOptionalDef(def, refs); + case ZodFirstPartyTypeKind.ZodMap: + return parseMapDef(def, refs); + case ZodFirstPartyTypeKind.ZodSet: + return parseSetDef(def, refs); + case ZodFirstPartyTypeKind.ZodLazy: + return parseDef(def.getter()._def, refs); + case ZodFirstPartyTypeKind.ZodPromise: + return parsePromiseDef(def, refs); + case ZodFirstPartyTypeKind.ZodNaN: + case ZodFirstPartyTypeKind.ZodNever: + return parseNeverDef(); + case ZodFirstPartyTypeKind.ZodEffects: + return parseEffectsDef(def, refs, forceResolution); + case ZodFirstPartyTypeKind.ZodAny: + return parseAnyDef(); + case ZodFirstPartyTypeKind.ZodUnknown: + return parseUnknownDef(); + case ZodFirstPartyTypeKind.ZodDefault: + return parseDefaultDef(def, refs); + case ZodFirstPartyTypeKind.ZodBranded: + return parseBrandedDef(def, refs); + case ZodFirstPartyTypeKind.ZodReadonly: + return parseReadonlyDef(def, refs); + case ZodFirstPartyTypeKind.ZodCatch: + return parseCatchDef(def, refs); + case ZodFirstPartyTypeKind.ZodPipeline: + return parsePipelineDef(def, refs); + case ZodFirstPartyTypeKind.ZodFunction: + case ZodFirstPartyTypeKind.ZodVoid: + case ZodFirstPartyTypeKind.ZodSymbol: + return undefined; + default: + return ((_: never) => undefined)(typeName); + } +}; + +const addMeta = (def: ZodTypeDef, refs: Refs, jsonSchema: JsonSchema7Type): JsonSchema7Type => { + if (def.description) { + jsonSchema.description = def.description; + + if (refs.markdownDescription) { + jsonSchema.markdownDescription = def.description; + } + } + return jsonSchema; +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/any.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/any.ts new file mode 100644 index 0000000..68c2921 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/any.ts @@ -0,0 +1,5 @@ +export type JsonSchema7AnyType = {}; + +export function parseAnyDef(): JsonSchema7AnyType { + return {}; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/array.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/array.ts new file mode 100644 index 0000000..3e8578f --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/array.ts @@ -0,0 +1,36 @@ +import { ZodArrayDef, ZodFirstPartyTypeKind } from 'zod'; +import { ErrorMessages, setResponseValueAndErrors } from '../errorMessages'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export type JsonSchema7ArrayType = { + type: 'array'; + items?: JsonSchema7Type | undefined; + minItems?: number; + maxItems?: number; + errorMessages?: ErrorMessages; +}; + +export function parseArrayDef(def: ZodArrayDef, refs: Refs) { + const res: JsonSchema7ArrayType = { + type: 'array', + }; + if (def.type?._def?.typeName !== ZodFirstPartyTypeKind.ZodAny) { + res.items = parseDef(def.type._def, { + ...refs, + currentPath: [...refs.currentPath, 'items'], + }); + } + + if (def.minLength) { + setResponseValueAndErrors(res, 'minItems', def.minLength.value, def.minLength.message, refs); + } + if (def.maxLength) { + setResponseValueAndErrors(res, 'maxItems', def.maxLength.value, def.maxLength.message, refs); + } + if (def.exactLength) { + setResponseValueAndErrors(res, 'minItems', def.exactLength.value, def.exactLength.message, refs); + setResponseValueAndErrors(res, 'maxItems', def.exactLength.value, def.exactLength.message, refs); + } + return res; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/bigint.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/bigint.ts new file mode 100644 index 0000000..f467841 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/bigint.ts @@ -0,0 +1,60 @@ +import { ZodBigIntDef } from 'zod'; +import { Refs } from '../Refs'; +import { ErrorMessages, setResponseValueAndErrors } from '../errorMessages'; + +export type JsonSchema7BigintType = { + type: 'integer'; + format: 'int64'; + minimum?: BigInt; + exclusiveMinimum?: BigInt; + maximum?: BigInt; + exclusiveMaximum?: BigInt; + multipleOf?: BigInt; + errorMessage?: ErrorMessages; +}; + +export function parseBigintDef(def: ZodBigIntDef, refs: Refs): JsonSchema7BigintType { + const res: JsonSchema7BigintType = { + type: 'integer', + format: 'int64', + }; + + if (!def.checks) return res; + + for (const check of def.checks) { + switch (check.kind) { + case 'min': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } else { + setResponseValueAndErrors(res, 'exclusiveMinimum', check.value, check.message, refs); + } + } else { + if (!check.inclusive) { + res.exclusiveMinimum = true as any; + } + setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + break; + case 'max': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } else { + setResponseValueAndErrors(res, 'exclusiveMaximum', check.value, check.message, refs); + } + } else { + if (!check.inclusive) { + res.exclusiveMaximum = true as any; + } + setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + break; + case 'multipleOf': + setResponseValueAndErrors(res, 'multipleOf', check.value, check.message, refs); + break; + } + } + return res; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/boolean.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/boolean.ts new file mode 100644 index 0000000..715e41a --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/boolean.ts @@ -0,0 +1,9 @@ +export type JsonSchema7BooleanType = { + type: 'boolean'; +}; + +export function parseBooleanDef(): JsonSchema7BooleanType { + return { + type: 'boolean', + }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/branded.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/branded.ts new file mode 100644 index 0000000..2242580 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/branded.ts @@ -0,0 +1,7 @@ +import { ZodBrandedDef } from 'zod'; +import { parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export function parseBrandedDef(_def: ZodBrandedDef, refs: Refs) { + return parseDef(_def.type._def, refs); +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/catch.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/catch.ts new file mode 100644 index 0000000..5cce3af --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/catch.ts @@ -0,0 +1,7 @@ +import { ZodCatchDef } from 'zod'; +import { parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export const parseCatchDef = (def: ZodCatchDef, refs: Refs) => { + return parseDef(def.innerType._def, refs); +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/date.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/date.ts new file mode 100644 index 0000000..4afc4e8 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/date.ts @@ -0,0 +1,83 @@ +import { ZodDateDef } from 'zod'; +import { Refs } from '../Refs'; +import { ErrorMessages, setResponseValueAndErrors } from '../errorMessages'; +import { JsonSchema7NumberType } from './number'; +import { DateStrategy } from '../Options'; + +export type JsonSchema7DateType = + | { + type: 'integer' | 'string'; + format: 'unix-time' | 'date-time' | 'date'; + minimum?: number; + maximum?: number; + errorMessage?: ErrorMessages; + } + | { + anyOf: JsonSchema7DateType[]; + }; + +export function parseDateDef( + def: ZodDateDef, + refs: Refs, + overrideDateStrategy?: DateStrategy, +): JsonSchema7DateType { + const strategy = overrideDateStrategy ?? refs.dateStrategy; + + if (Array.isArray(strategy)) { + return { + anyOf: strategy.map((item, i) => parseDateDef(def, refs, item)), + }; + } + + switch (strategy) { + case 'string': + case 'format:date-time': + return { + type: 'string', + format: 'date-time', + }; + case 'format:date': + return { + type: 'string', + format: 'date', + }; + case 'integer': + return integerDateParser(def, refs); + } +} + +const integerDateParser = (def: ZodDateDef, refs: Refs) => { + const res: JsonSchema7DateType = { + type: 'integer', + format: 'unix-time', + }; + + if (refs.target === 'openApi3') { + return res; + } + + for (const check of def.checks) { + switch (check.kind) { + case 'min': + setResponseValueAndErrors( + res, + 'minimum', + check.value, // This is in milliseconds + check.message, + refs, + ); + break; + case 'max': + setResponseValueAndErrors( + res, + 'maximum', + check.value, // This is in milliseconds + check.message, + refs, + ); + break; + } + } + + return res; +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/default.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/default.ts new file mode 100644 index 0000000..f717260 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/default.ts @@ -0,0 +1,10 @@ +import { ZodDefaultDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export function parseDefaultDef(_def: ZodDefaultDef, refs: Refs): JsonSchema7Type & { default: any } { + return { + ...parseDef(_def.innerType._def, refs), + default: _def.defaultValue(), + }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/effects.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/effects.ts new file mode 100644 index 0000000..b010d5c --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/effects.ts @@ -0,0 +1,11 @@ +import { ZodEffectsDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export function parseEffectsDef( + _def: ZodEffectsDef, + refs: Refs, + forceResolution: boolean, +): JsonSchema7Type | undefined { + return refs.effectStrategy === 'input' ? parseDef(_def.schema._def, refs, forceResolution) : {}; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/enum.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/enum.ts new file mode 100644 index 0000000..d6f5ceb --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/enum.ts @@ -0,0 +1,13 @@ +import { ZodEnumDef } from 'zod'; + +export type JsonSchema7EnumType = { + type: 'string'; + enum: string[]; +}; + +export function parseEnumDef(def: ZodEnumDef): JsonSchema7EnumType { + return { + type: 'string', + enum: [...def.values], + }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/intersection.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/intersection.ts new file mode 100644 index 0000000..af5f042 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/intersection.ts @@ -0,0 +1,64 @@ +import { ZodIntersectionDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; +import { JsonSchema7StringType } from './string'; + +export type JsonSchema7AllOfType = { + allOf: JsonSchema7Type[]; + unevaluatedProperties?: boolean; +}; + +const isJsonSchema7AllOfType = ( + type: JsonSchema7Type | JsonSchema7StringType, +): type is JsonSchema7AllOfType => { + if ('type' in type && type.type === 'string') return false; + return 'allOf' in type; +}; + +export function parseIntersectionDef( + def: ZodIntersectionDef, + refs: Refs, +): JsonSchema7AllOfType | JsonSchema7Type | undefined { + const allOf = [ + parseDef(def.left._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '0'], + }), + parseDef(def.right._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '1'], + }), + ].filter((x): x is JsonSchema7Type => !!x); + + let unevaluatedProperties: Pick | undefined = + refs.target === 'jsonSchema2019-09' ? { unevaluatedProperties: false } : undefined; + + const mergedAllOf: JsonSchema7Type[] = []; + // If either of the schemas is an allOf, merge them into a single allOf + allOf.forEach((schema) => { + if (isJsonSchema7AllOfType(schema)) { + mergedAllOf.push(...schema.allOf); + if (schema.unevaluatedProperties === undefined) { + // If one of the schemas has no unevaluatedProperties set, + // the merged schema should also have no unevaluatedProperties set + unevaluatedProperties = undefined; + } + } else { + let nestedSchema: JsonSchema7Type = schema; + if ('additionalProperties' in schema && schema.additionalProperties === false) { + const { additionalProperties, ...rest } = schema; + nestedSchema = rest; + } else { + // As soon as one of the schemas has additionalProperties set not to false, we allow unevaluatedProperties + unevaluatedProperties = undefined; + } + mergedAllOf.push(nestedSchema); + } + }); + return mergedAllOf.length ? + { + allOf: mergedAllOf, + ...unevaluatedProperties, + } + : undefined; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/literal.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/literal.ts new file mode 100644 index 0000000..a35625c --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/literal.ts @@ -0,0 +1,37 @@ +import { ZodLiteralDef } from 'zod'; +import { Refs } from '../Refs'; + +export type JsonSchema7LiteralType = + | { + type: 'string' | 'number' | 'integer' | 'boolean'; + const: string | number | boolean; + } + | { + type: 'object' | 'array'; + }; + +export function parseLiteralDef(def: ZodLiteralDef, refs: Refs): JsonSchema7LiteralType { + const parsedType = typeof def.value; + if ( + parsedType !== 'bigint' && + parsedType !== 'number' && + parsedType !== 'boolean' && + parsedType !== 'string' + ) { + return { + type: Array.isArray(def.value) ? 'array' : 'object', + }; + } + + if (refs.target === 'openApi3') { + return { + type: parsedType === 'bigint' ? 'integer' : parsedType, + enum: [def.value], + } as any; + } + + return { + type: parsedType === 'bigint' ? 'integer' : parsedType, + const: def.value, + }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/map.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/map.ts new file mode 100644 index 0000000..5084ccd --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/map.ts @@ -0,0 +1,42 @@ +import { ZodMapDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; +import { JsonSchema7RecordType, parseRecordDef } from './record'; + +export type JsonSchema7MapType = { + type: 'array'; + maxItems: 125; + items: { + type: 'array'; + items: [JsonSchema7Type, JsonSchema7Type]; + minItems: 2; + maxItems: 2; + }; +}; + +export function parseMapDef(def: ZodMapDef, refs: Refs): JsonSchema7MapType | JsonSchema7RecordType { + if (refs.mapStrategy === 'record') { + return parseRecordDef(def, refs); + } + + const keys = + parseDef(def.keyType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', 'items', '0'], + }) || {}; + const values = + parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', 'items', '1'], + }) || {}; + return { + type: 'array', + maxItems: 125, + items: { + type: 'array', + items: [keys, values], + minItems: 2, + maxItems: 2, + }, + }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/nativeEnum.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/nativeEnum.ts new file mode 100644 index 0000000..a2ed901 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/nativeEnum.ts @@ -0,0 +1,27 @@ +import { ZodNativeEnumDef } from 'zod'; + +export type JsonSchema7NativeEnumType = { + type: 'string' | 'number' | ['string', 'number']; + enum: (string | number)[]; +}; + +export function parseNativeEnumDef(def: ZodNativeEnumDef): JsonSchema7NativeEnumType { + const object = def.values; + const actualKeys = Object.keys(def.values).filter((key: string) => { + return typeof object[object[key]!] !== 'number'; + }); + + const actualValues = actualKeys.map((key: string) => object[key]!); + + const parsedTypes = Array.from(new Set(actualValues.map((values: string | number) => typeof values))); + + return { + type: + parsedTypes.length === 1 ? + parsedTypes[0] === 'string' ? + 'string' + : 'number' + : ['string', 'number'], + enum: actualValues, + }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/never.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/never.ts new file mode 100644 index 0000000..a5c7383 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/never.ts @@ -0,0 +1,9 @@ +export type JsonSchema7NeverType = { + not: {}; +}; + +export function parseNeverDef(): JsonSchema7NeverType { + return { + not: {}, + }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/null.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/null.ts new file mode 100644 index 0000000..e1fe113 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/null.ts @@ -0,0 +1,16 @@ +import { Refs } from '../Refs'; + +export type JsonSchema7NullType = { + type: 'null'; +}; + +export function parseNullDef(refs: Refs): JsonSchema7NullType { + return refs.target === 'openApi3' ? + ({ + enum: ['null'], + nullable: true, + } as any) + : { + type: 'null', + }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/nullable.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/nullable.ts new file mode 100644 index 0000000..0d70636 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/nullable.ts @@ -0,0 +1,49 @@ +import { ZodNullableDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; +import { JsonSchema7NullType } from './null'; +import { primitiveMappings } from './union'; + +export type JsonSchema7NullableType = + | { + anyOf: [JsonSchema7Type, JsonSchema7NullType]; + } + | { + type: [string, 'null']; + }; + +export function parseNullableDef(def: ZodNullableDef, refs: Refs): JsonSchema7NullableType | undefined { + if ( + ['ZodString', 'ZodNumber', 'ZodBigInt', 'ZodBoolean', 'ZodNull'].includes(def.innerType._def.typeName) && + (!def.innerType._def.checks || !def.innerType._def.checks.length) + ) { + if (refs.target === 'openApi3' || refs.nullableStrategy === 'property') { + return { + type: primitiveMappings[def.innerType._def.typeName as keyof typeof primitiveMappings], + nullable: true, + } as any; + } + + return { + type: [primitiveMappings[def.innerType._def.typeName as keyof typeof primitiveMappings], 'null'], + }; + } + + if (refs.target === 'openApi3') { + const base = parseDef(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath], + }); + + if (base && '$ref' in base) return { allOf: [base], nullable: true } as any; + + return base && ({ ...base, nullable: true } as any); + } + + const base = parseDef(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', '0'], + }); + + return base && { anyOf: [base, { type: 'null' }] }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/number.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/number.ts new file mode 100644 index 0000000..45a1f3c --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/number.ts @@ -0,0 +1,62 @@ +import { ZodNumberDef } from 'zod'; +import { addErrorMessage, ErrorMessages, setResponseValueAndErrors } from '../errorMessages'; +import { Refs } from '../Refs'; + +export type JsonSchema7NumberType = { + type: 'number' | 'integer'; + minimum?: number; + exclusiveMinimum?: number; + maximum?: number; + exclusiveMaximum?: number; + multipleOf?: number; + errorMessage?: ErrorMessages; +}; + +export function parseNumberDef(def: ZodNumberDef, refs: Refs): JsonSchema7NumberType { + const res: JsonSchema7NumberType = { + type: 'number', + }; + + if (!def.checks) return res; + + for (const check of def.checks) { + switch (check.kind) { + case 'int': + res.type = 'integer'; + addErrorMessage(res, 'type', check.message, refs); + break; + case 'min': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } else { + setResponseValueAndErrors(res, 'exclusiveMinimum', check.value, check.message, refs); + } + } else { + if (!check.inclusive) { + res.exclusiveMinimum = true as any; + } + setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + break; + case 'max': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } else { + setResponseValueAndErrors(res, 'exclusiveMaximum', check.value, check.message, refs); + } + } else { + if (!check.inclusive) { + res.exclusiveMaximum = true as any; + } + setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + break; + case 'multipleOf': + setResponseValueAndErrors(res, 'multipleOf', check.value, check.message, refs); + break; + } + } + return res; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/object.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/object.ts new file mode 100644 index 0000000..f2120c8 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/object.ts @@ -0,0 +1,63 @@ +import { ZodObjectDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +function decideAdditionalProperties(def: ZodObjectDef, refs: Refs) { + if (refs.removeAdditionalStrategy === 'strict') { + return def.catchall._def.typeName === 'ZodNever' ? + def.unknownKeys !== 'strict' + : parseDef(def.catchall._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? true; + } else { + return def.catchall._def.typeName === 'ZodNever' ? + def.unknownKeys === 'passthrough' + : parseDef(def.catchall._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? true; + } +} + +export type JsonSchema7ObjectType = { + type: 'object'; + properties: Record; + additionalProperties: boolean | JsonSchema7Type; + required?: string[]; +}; + +export function parseObjectDef(def: ZodObjectDef, refs: Refs) { + const result: JsonSchema7ObjectType = { + type: 'object', + ...Object.entries(def.shape()).reduce( + ( + acc: { + properties: Record; + required: string[]; + }, + [propName, propDef], + ) => { + if (propDef === undefined || propDef._def === undefined) return acc; + const parsedDef = parseDef(propDef._def, { + ...refs, + currentPath: [...refs.currentPath, 'properties', propName], + propertyPath: [...refs.currentPath, 'properties', propName], + }); + if (parsedDef === undefined) return acc; + return { + properties: { + ...acc.properties, + [propName]: parsedDef, + }, + required: + propDef.isOptional() && !refs.openaiStrictMode ? acc.required : [...acc.required, propName], + }; + }, + { properties: {}, required: [] }, + ), + additionalProperties: decideAdditionalProperties(def, refs), + }; + if (!result.required!.length) delete result.required; + return result; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/optional.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/optional.ts new file mode 100644 index 0000000..9b3e973 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/optional.ts @@ -0,0 +1,25 @@ +import { ZodOptionalDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export const parseOptionalDef = (def: ZodOptionalDef, refs: Refs): JsonSchema7Type | undefined => { + if (refs.currentPath.toString() === refs.propertyPath?.toString()) { + return parseDef(def.innerType._def, refs); + } + + const innerSchema = parseDef(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', '1'], + }); + + return innerSchema ? + { + anyOf: [ + { + not: {}, + }, + innerSchema, + ], + } + : {}; +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/pipeline.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/pipeline.ts new file mode 100644 index 0000000..7fdcbae --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/pipeline.ts @@ -0,0 +1,28 @@ +import { ZodPipelineDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; +import { JsonSchema7AllOfType } from './intersection'; + +export const parsePipelineDef = ( + def: ZodPipelineDef, + refs: Refs, +): JsonSchema7AllOfType | JsonSchema7Type | undefined => { + if (refs.pipeStrategy === 'input') { + return parseDef(def.in._def, refs); + } else if (refs.pipeStrategy === 'output') { + return parseDef(def.out._def, refs); + } + + const a = parseDef(def.in._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '0'], + }); + const b = parseDef(def.out._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', a ? '1' : '0'], + }); + + return { + allOf: [a, b].filter((x): x is JsonSchema7Type => x !== undefined), + }; +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/promise.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/promise.ts new file mode 100644 index 0000000..f586d11 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/promise.ts @@ -0,0 +1,7 @@ +import { ZodPromiseDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export function parsePromiseDef(def: ZodPromiseDef, refs: Refs): JsonSchema7Type | undefined { + return parseDef(def.type._def, refs); +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/readonly.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/readonly.ts new file mode 100644 index 0000000..cecb937 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/readonly.ts @@ -0,0 +1,7 @@ +import { ZodReadonlyDef } from 'zod'; +import { parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export const parseReadonlyDef = (def: ZodReadonlyDef, refs: Refs) => { + return parseDef(def.innerType._def, refs); +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/record.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/record.ts new file mode 100644 index 0000000..7eff507 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/record.ts @@ -0,0 +1,73 @@ +import { ZodFirstPartyTypeKind, ZodMapDef, ZodRecordDef, ZodTypeAny } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; +import { JsonSchema7EnumType } from './enum'; +import { JsonSchema7ObjectType } from './object'; +import { JsonSchema7StringType, parseStringDef } from './string'; + +type JsonSchema7RecordPropertyNamesType = + | Omit + | Omit; + +export type JsonSchema7RecordType = { + type: 'object'; + additionalProperties: JsonSchema7Type; + propertyNames?: JsonSchema7RecordPropertyNamesType; +}; + +export function parseRecordDef( + def: ZodRecordDef | ZodMapDef, + refs: Refs, +): JsonSchema7RecordType { + if (refs.target === 'openApi3' && def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodEnum) { + return { + type: 'object', + required: def.keyType._def.values, + properties: def.keyType._def.values.reduce( + (acc: Record, key: string) => ({ + ...acc, + [key]: + parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'properties', key], + }) ?? {}, + }), + {}, + ), + additionalProperties: false, + } satisfies JsonSchema7ObjectType as any; + } + + const schema: JsonSchema7RecordType = { + type: 'object', + additionalProperties: + parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? {}, + }; + + if (refs.target === 'openApi3') { + return schema; + } + + if (def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodString && def.keyType._def.checks?.length) { + const keyType: JsonSchema7RecordPropertyNamesType = Object.entries( + parseStringDef(def.keyType._def, refs), + ).reduce((acc, [key, value]) => (key === 'type' ? acc : { ...acc, [key]: value }), {}); + + return { + ...schema, + propertyNames: keyType, + }; + } else if (def.keyType?._def.typeName === ZodFirstPartyTypeKind.ZodEnum) { + return { + ...schema, + propertyNames: { + enum: def.keyType._def.values, + }, + }; + } + + return schema; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/set.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/set.ts new file mode 100644 index 0000000..05fa9ed --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/set.ts @@ -0,0 +1,36 @@ +import { ZodSetDef } from 'zod'; +import { ErrorMessages, setResponseValueAndErrors } from '../errorMessages'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export type JsonSchema7SetType = { + type: 'array'; + uniqueItems: true; + items?: JsonSchema7Type | undefined; + minItems?: number; + maxItems?: number; + errorMessage?: ErrorMessages; +}; + +export function parseSetDef(def: ZodSetDef, refs: Refs): JsonSchema7SetType { + const items = parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items'], + }); + + const schema: JsonSchema7SetType = { + type: 'array', + uniqueItems: true, + items, + }; + + if (def.minSize) { + setResponseValueAndErrors(schema, 'minItems', def.minSize.value, def.minSize.message, refs); + } + + if (def.maxSize) { + setResponseValueAndErrors(schema, 'maxItems', def.maxSize.value, def.maxSize.message, refs); + } + + return schema; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/string.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/string.ts new file mode 100644 index 0000000..daa1a95 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/string.ts @@ -0,0 +1,400 @@ +// @ts-nocheck +import { ZodStringDef } from 'zod'; +import { ErrorMessages, setResponseValueAndErrors } from '../errorMessages'; +import { Refs } from '../Refs'; + +let emojiRegex: RegExp | undefined; + +/** + * Generated from the regular expressions found here as of 2024-05-22: + * https://github.com/colinhacks/zod/blob/master/src/types.ts. + * + * Expressions with /i flag have been changed accordingly. + */ +export const zodPatterns = { + /** + * `c` was changed to `[cC]` to replicate /i flag + */ + cuid: /^[cC][^\s-]{8,}$/, + cuid2: /^[0-9a-z]+$/, + ulid: /^[0-9A-HJKMNP-TV-Z]{26}$/, + /** + * `a-z` was added to replicate /i flag + */ + email: /^(?!\.)(?!.*\.\.)([a-zA-Z0-9_'+\-\.]*)[a-zA-Z0-9_+-]@([a-zA-Z0-9][a-zA-Z0-9\-]*\.)+[a-zA-Z]{2,}$/, + /** + * Constructed a valid Unicode RegExp + * + * Lazily instantiate since this type of regex isn't supported + * in all envs (e.g. React Native). + * + * See: + * https://github.com/colinhacks/zod/issues/2433 + * Fix in Zod: + * https://github.com/colinhacks/zod/commit/9340fd51e48576a75adc919bff65dbc4a5d4c99b + */ + emoji: () => { + if (emojiRegex === undefined) { + emojiRegex = RegExp('^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$', 'u'); + } + return emojiRegex; + }, + /** + * Unused + */ + uuid: /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/, + /** + * Unused + */ + ipv4: /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/, + /** + * Unused + */ + ipv6: /^(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))$/, + base64: /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/, + nanoid: /^[a-zA-Z0-9_-]{21}$/, +} as const; + +export type JsonSchema7StringType = { + type: 'string'; + minLength?: number; + maxLength?: number; + format?: + | 'email' + | 'idn-email' + | 'uri' + | 'uuid' + | 'date-time' + | 'ipv4' + | 'ipv6' + | 'date' + | 'time' + | 'duration'; + pattern?: string; + allOf?: { + pattern: string; + errorMessage?: ErrorMessages<{ pattern: string }>; + }[]; + anyOf?: { + format: string; + errorMessage?: ErrorMessages<{ format: string }>; + }[]; + errorMessage?: ErrorMessages; + contentEncoding?: string; +}; + +export function parseStringDef(def: ZodStringDef, refs: Refs): JsonSchema7StringType { + const res: JsonSchema7StringType = { + type: 'string', + }; + + function processPattern(value: string): string { + return refs.patternStrategy === 'escape' ? escapeNonAlphaNumeric(value) : value; + } + + if (def.checks) { + for (const check of def.checks) { + switch (check.kind) { + case 'min': + setResponseValueAndErrors( + res, + 'minLength', + typeof res.minLength === 'number' ? Math.max(res.minLength, check.value) : check.value, + check.message, + refs, + ); + break; + case 'max': + setResponseValueAndErrors( + res, + 'maxLength', + typeof res.maxLength === 'number' ? Math.min(res.maxLength, check.value) : check.value, + check.message, + refs, + ); + + break; + case 'email': + switch (refs.emailStrategy) { + case 'format:email': + addFormat(res, 'email', check.message, refs); + break; + case 'format:idn-email': + addFormat(res, 'idn-email', check.message, refs); + break; + case 'pattern:zod': + addPattern(res, zodPatterns.email, check.message, refs); + break; + } + + break; + case 'url': + addFormat(res, 'uri', check.message, refs); + break; + case 'uuid': + addFormat(res, 'uuid', check.message, refs); + break; + case 'regex': + addPattern(res, check.regex, check.message, refs); + break; + case 'cuid': + addPattern(res, zodPatterns.cuid, check.message, refs); + break; + case 'cuid2': + addPattern(res, zodPatterns.cuid2, check.message, refs); + break; + case 'startsWith': + addPattern(res, RegExp(`^${processPattern(check.value)}`), check.message, refs); + break; + case 'endsWith': + addPattern(res, RegExp(`${processPattern(check.value)}$`), check.message, refs); + break; + + case 'datetime': + addFormat(res, 'date-time', check.message, refs); + break; + case 'date': + addFormat(res, 'date', check.message, refs); + break; + case 'time': + addFormat(res, 'time', check.message, refs); + break; + case 'duration': + addFormat(res, 'duration', check.message, refs); + break; + case 'length': + setResponseValueAndErrors( + res, + 'minLength', + typeof res.minLength === 'number' ? Math.max(res.minLength, check.value) : check.value, + check.message, + refs, + ); + setResponseValueAndErrors( + res, + 'maxLength', + typeof res.maxLength === 'number' ? Math.min(res.maxLength, check.value) : check.value, + check.message, + refs, + ); + break; + case 'includes': { + addPattern(res, RegExp(processPattern(check.value)), check.message, refs); + break; + } + case 'ip': { + if (check.version !== 'v6') { + addFormat(res, 'ipv4', check.message, refs); + } + if (check.version !== 'v4') { + addFormat(res, 'ipv6', check.message, refs); + } + break; + } + case 'emoji': + addPattern(res, zodPatterns.emoji, check.message, refs); + break; + case 'ulid': { + addPattern(res, zodPatterns.ulid, check.message, refs); + break; + } + case 'base64': { + switch (refs.base64Strategy) { + case 'format:binary': { + addFormat(res, 'binary' as any, check.message, refs); + break; + } + + case 'contentEncoding:base64': { + setResponseValueAndErrors(res, 'contentEncoding', 'base64', check.message, refs); + break; + } + + case 'pattern:zod': { + addPattern(res, zodPatterns.base64, check.message, refs); + break; + } + } + break; + } + case 'nanoid': { + addPattern(res, zodPatterns.nanoid, check.message, refs); + } + case 'toLowerCase': + case 'toUpperCase': + case 'trim': + break; + default: + ((_: never) => {})(check); + } + } + } + + return res; +} + +const escapeNonAlphaNumeric = (value: string) => + Array.from(value) + .map((c) => (/[a-zA-Z0-9]/.test(c) ? c : `\\${c}`)) + .join(''); + +const addFormat = ( + schema: JsonSchema7StringType, + value: Required['format'], + message: string | undefined, + refs: Refs, +) => { + if (schema.format || schema.anyOf?.some((x) => x.format)) { + if (!schema.anyOf) { + schema.anyOf = []; + } + + if (schema.format) { + schema.anyOf!.push({ + format: schema.format, + ...(schema.errorMessage && + refs.errorMessages && { + errorMessage: { format: schema.errorMessage.format }, + }), + }); + delete schema.format; + if (schema.errorMessage) { + delete schema.errorMessage.format; + if (Object.keys(schema.errorMessage).length === 0) { + delete schema.errorMessage; + } + } + } + + schema.anyOf!.push({ + format: value, + ...(message && refs.errorMessages && { errorMessage: { format: message } }), + }); + } else { + setResponseValueAndErrors(schema, 'format', value, message, refs); + } +}; + +const addPattern = ( + schema: JsonSchema7StringType, + regex: RegExp | (() => RegExp), + message: string | undefined, + refs: Refs, +) => { + if (schema.pattern || schema.allOf?.some((x) => x.pattern)) { + if (!schema.allOf) { + schema.allOf = []; + } + + if (schema.pattern) { + schema.allOf!.push({ + pattern: schema.pattern, + ...(schema.errorMessage && + refs.errorMessages && { + errorMessage: { pattern: schema.errorMessage.pattern }, + }), + }); + delete schema.pattern; + if (schema.errorMessage) { + delete schema.errorMessage.pattern; + if (Object.keys(schema.errorMessage).length === 0) { + delete schema.errorMessage; + } + } + } + + schema.allOf!.push({ + pattern: processRegExp(regex, refs), + ...(message && refs.errorMessages && { errorMessage: { pattern: message } }), + }); + } else { + setResponseValueAndErrors(schema, 'pattern', processRegExp(regex, refs), message, refs); + } +}; + +// Mutate z.string.regex() in a best attempt to accommodate for regex flags when applyRegexFlags is true +const processRegExp = (regexOrFunction: RegExp | (() => RegExp), refs: Refs): string => { + const regex = typeof regexOrFunction === 'function' ? regexOrFunction() : regexOrFunction; + if (!refs.applyRegexFlags || !regex.flags) return regex.source; + + // Currently handled flags + const flags = { + i: regex.flags.includes('i'), // Case-insensitive + m: regex.flags.includes('m'), // `^` and `$` matches adjacent to newline characters + s: regex.flags.includes('s'), // `.` matches newlines + }; + + // The general principle here is to step through each character, one at a time, applying mutations as flags require. We keep track when the current character is escaped, and when it's inside a group /like [this]/ or (also) a range like /[a-z]/. The following is fairly brittle imperative code; edit at your peril! + + const source = flags.i ? regex.source.toLowerCase() : regex.source; + let pattern = ''; + let isEscaped = false; + let inCharGroup = false; + let inCharRange = false; + + for (let i = 0; i < source.length; i++) { + if (isEscaped) { + pattern += source[i]; + isEscaped = false; + continue; + } + + if (flags.i) { + if (inCharGroup) { + if (source[i].match(/[a-z]/)) { + if (inCharRange) { + pattern += source[i]; + pattern += `${source[i - 2]}-${source[i]}`.toUpperCase(); + inCharRange = false; + } else if (source[i + 1] === '-' && source[i + 2]?.match(/[a-z]/)) { + pattern += source[i]; + inCharRange = true; + } else { + pattern += `${source[i]}${source[i].toUpperCase()}`; + } + continue; + } + } else if (source[i].match(/[a-z]/)) { + pattern += `[${source[i]}${source[i].toUpperCase()}]`; + continue; + } + } + + if (flags.m) { + if (source[i] === '^') { + pattern += `(^|(?<=[\r\n]))`; + continue; + } else if (source[i] === '$') { + pattern += `($|(?=[\r\n]))`; + continue; + } + } + + if (flags.s && source[i] === '.') { + pattern += inCharGroup ? `${source[i]}\r\n` : `[${source[i]}\r\n]`; + continue; + } + + pattern += source[i]; + if (source[i] === '\\') { + isEscaped = true; + } else if (inCharGroup && source[i] === ']') { + inCharGroup = false; + } else if (!inCharGroup && source[i] === '[') { + inCharGroup = true; + } + } + + try { + const regexTest = new RegExp(pattern); + } catch { + console.warn( + `Could not convert regex pattern at ${refs.currentPath.join( + '/', + )} to a flag-independent form! Falling back to the flag-ignorant source`, + ); + return regex.source; + } + + return pattern; +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/tuple.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/tuple.ts new file mode 100644 index 0000000..b2a8240 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/tuple.ts @@ -0,0 +1,54 @@ +import { ZodTupleDef, ZodTupleItems, ZodTypeAny } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export type JsonSchema7TupleType = { + type: 'array'; + minItems: number; + items: JsonSchema7Type[]; +} & ( + | { + maxItems: number; + } + | { + additionalItems?: JsonSchema7Type | undefined; + } +); + +export function parseTupleDef( + def: ZodTupleDef, + refs: Refs, +): JsonSchema7TupleType { + if (def.rest) { + return { + type: 'array', + minItems: def.items.length, + items: def.items + .map((x, i) => + parseDef(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', `${i}`], + }), + ) + .reduce((acc: JsonSchema7Type[], x) => (x === undefined ? acc : [...acc, x]), []), + additionalItems: parseDef(def.rest._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalItems'], + }), + }; + } else { + return { + type: 'array', + minItems: def.items.length, + maxItems: def.items.length, + items: def.items + .map((x, i) => + parseDef(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', `${i}`], + }), + ) + .reduce((acc: JsonSchema7Type[], x) => (x === undefined ? acc : [...acc, x]), []), + }; + } +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/undefined.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/undefined.ts new file mode 100644 index 0000000..6864d81 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/undefined.ts @@ -0,0 +1,9 @@ +export type JsonSchema7UndefinedType = { + not: {}; +}; + +export function parseUndefinedDef(): JsonSchema7UndefinedType { + return { + not: {}, + }; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/union.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/union.ts new file mode 100644 index 0000000..1daf149 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/union.ts @@ -0,0 +1,119 @@ +import { ZodDiscriminatedUnionDef, ZodLiteralDef, ZodTypeAny, ZodUnionDef } from 'zod'; +import { JsonSchema7Type, parseDef } from '../parseDef'; +import { Refs } from '../Refs'; + +export const primitiveMappings = { + ZodString: 'string', + ZodNumber: 'number', + ZodBigInt: 'integer', + ZodBoolean: 'boolean', + ZodNull: 'null', +} as const; +type ZodPrimitive = keyof typeof primitiveMappings; +type JsonSchema7Primitive = (typeof primitiveMappings)[keyof typeof primitiveMappings]; + +export type JsonSchema7UnionType = JsonSchema7PrimitiveUnionType | JsonSchema7AnyOfType; + +type JsonSchema7PrimitiveUnionType = + | { + type: JsonSchema7Primitive | JsonSchema7Primitive[]; + } + | { + type: JsonSchema7Primitive | JsonSchema7Primitive[]; + enum: (string | number | bigint | boolean | null)[]; + }; + +type JsonSchema7AnyOfType = { + anyOf: JsonSchema7Type[]; +}; + +export function parseUnionDef( + def: ZodUnionDef | ZodDiscriminatedUnionDef, + refs: Refs, +): JsonSchema7PrimitiveUnionType | JsonSchema7AnyOfType | undefined { + if (refs.target === 'openApi3') return asAnyOf(def, refs); + + const options: readonly ZodTypeAny[] = + def.options instanceof Map ? Array.from(def.options.values()) : def.options; + + // This blocks tries to look ahead a bit to produce nicer looking schemas with type array instead of anyOf. + if ( + options.every((x) => x._def.typeName in primitiveMappings && (!x._def.checks || !x._def.checks.length)) + ) { + // all types in union are primitive and lack checks, so might as well squash into {type: [...]} + + const types = options.reduce((types: JsonSchema7Primitive[], x) => { + const type = primitiveMappings[x._def.typeName as ZodPrimitive]; //Can be safely casted due to row 43 + return type && !types.includes(type) ? [...types, type] : types; + }, []); + + return { + type: types.length > 1 ? types : types[0]!, + }; + } else if (options.every((x) => x._def.typeName === 'ZodLiteral' && !x.description)) { + // all options literals + + const types = options.reduce((acc: JsonSchema7Primitive[], x: { _def: ZodLiteralDef }) => { + const type = typeof x._def.value; + switch (type) { + case 'string': + case 'number': + case 'boolean': + return [...acc, type]; + case 'bigint': + return [...acc, 'integer' as const]; + case 'object': + if (x._def.value === null) return [...acc, 'null' as const]; + case 'symbol': + case 'undefined': + case 'function': + default: + return acc; + } + }, []); + + if (types.length === options.length) { + // all the literals are primitive, as far as null can be considered primitive + + const uniqueTypes = types.filter((x, i, a) => a.indexOf(x) === i); + return { + type: uniqueTypes.length > 1 ? uniqueTypes : uniqueTypes[0]!, + enum: options.reduce( + (acc, x) => { + return acc.includes(x._def.value) ? acc : [...acc, x._def.value]; + }, + [] as (string | number | bigint | boolean | null)[], + ), + }; + } + } else if (options.every((x) => x._def.typeName === 'ZodEnum')) { + return { + type: 'string', + enum: options.reduce( + (acc: string[], x) => [...acc, ...x._def.values.filter((x: string) => !acc.includes(x))], + [], + ), + }; + } + + return asAnyOf(def, refs); +} + +const asAnyOf = ( + def: ZodUnionDef | ZodDiscriminatedUnionDef, + refs: Refs, +): JsonSchema7PrimitiveUnionType | JsonSchema7AnyOfType | undefined => { + const anyOf = ((def.options instanceof Map ? Array.from(def.options.values()) : def.options) as any[]) + .map((x, i) => + parseDef(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', `${i}`], + }), + ) + .filter( + (x): x is JsonSchema7Type => + !!x && (!refs.strictUnions || (typeof x === 'object' && Object.keys(x).length > 0)), + ); + + return anyOf.length ? { anyOf } : undefined; +}; diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/unknown.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/unknown.ts new file mode 100644 index 0000000..a3c8d1d --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/parsers/unknown.ts @@ -0,0 +1,5 @@ +export type JsonSchema7UnknownType = {}; + +export function parseUnknownDef(): JsonSchema7UnknownType { + return {}; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/util.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/util.ts new file mode 100644 index 0000000..870ab47 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/util.ts @@ -0,0 +1,11 @@ +import type { ZodSchema, ZodTypeDef } from 'zod'; + +export const zodDef = (zodSchema: ZodSchema | ZodTypeDef): ZodTypeDef => { + return '_def' in zodSchema ? zodSchema._def : zodSchema; +}; + +export function isEmptyObj(obj: Object | null | undefined): boolean { + if (!obj) return true; + for (const _k in obj) return false; + return true; +} diff --git a/node_modules/openai/src/_vendor/zod-to-json-schema/zodToJsonSchema.ts b/node_modules/openai/src/_vendor/zod-to-json-schema/zodToJsonSchema.ts new file mode 100644 index 0000000..e0d63d5 --- /dev/null +++ b/node_modules/openai/src/_vendor/zod-to-json-schema/zodToJsonSchema.ts @@ -0,0 +1,120 @@ +import { ZodSchema } from 'zod'; +import { Options, Targets } from './Options'; +import { JsonSchema7Type, parseDef } from './parseDef'; +import { getRefs } from './Refs'; +import { zodDef, isEmptyObj } from './util'; + +const zodToJsonSchema = ( + schema: ZodSchema, + options?: Partial> | string, +): (Target extends 'jsonSchema7' ? JsonSchema7Type : object) & { + $schema?: string; + definitions?: { + [key: string]: Target extends 'jsonSchema7' ? JsonSchema7Type + : Target extends 'jsonSchema2019-09' ? JsonSchema7Type + : object; + }; +} => { + const refs = getRefs(options); + + const name = + typeof options === 'string' ? options + : options?.nameStrategy === 'title' ? undefined + : options?.name; + + const main = + parseDef( + schema._def, + name === undefined ? refs : ( + { + ...refs, + currentPath: [...refs.basePath, refs.definitionPath, name], + } + ), + false, + ) ?? {}; + + const title = + typeof options === 'object' && options.name !== undefined && options.nameStrategy === 'title' ? + options.name + : undefined; + + if (title !== undefined) { + main.title = title; + } + + const definitions = (() => { + if (isEmptyObj(refs.definitions)) { + return undefined; + } + + const definitions: Record = {}; + const processedDefinitions = new Set(); + + // the call to `parseDef()` here might itself add more entries to `.definitions` + // so we need to continually evaluate definitions until we've resolved all of them + // + // we have a generous iteration limit here to avoid blowing up the stack if there + // are any bugs that would otherwise result in us iterating indefinitely + for (let i = 0; i < 500; i++) { + const newDefinitions = Object.entries(refs.definitions).filter( + ([key]) => !processedDefinitions.has(key), + ); + if (newDefinitions.length === 0) break; + + for (const [key, schema] of newDefinitions) { + definitions[key] = + parseDef( + zodDef(schema), + { ...refs, currentPath: [...refs.basePath, refs.definitionPath, key] }, + true, + ) ?? {}; + processedDefinitions.add(key); + } + } + + return definitions; + })(); + + const combined: ReturnType> = + name === undefined ? + definitions ? + { + ...main, + [refs.definitionPath]: definitions, + } + : main + : refs.nameStrategy === 'duplicate-ref' ? + { + ...main, + ...(definitions || refs.seenRefs.size ? + { + [refs.definitionPath]: { + ...definitions, + // only actually duplicate the schema definition if it was ever referenced + // otherwise the duplication is completely pointless + ...(refs.seenRefs.size ? { [name]: main } : undefined), + }, + } + : undefined), + } + : { + $ref: [...(refs.$refStrategy === 'relative' ? [] : refs.basePath), refs.definitionPath, name].join( + '/', + ), + [refs.definitionPath]: { + ...definitions, + [name]: main, + }, + }; + + if (refs.target === 'jsonSchema7') { + combined.$schema = 'http://json-schema.org/draft-07/schema#'; + } else if (refs.target === 'jsonSchema2019-09') { + combined.$schema = 'https://json-schema.org/draft/2019-09/schema#'; + } + + return combined; +}; + +export { zodToJsonSchema }; diff --git a/node_modules/openai/src/core.ts b/node_modules/openai/src/core.ts new file mode 100644 index 0000000..68f1e67 --- /dev/null +++ b/node_modules/openai/src/core.ts @@ -0,0 +1,1233 @@ +import { VERSION } from './version'; +import { Stream } from './streaming'; +import { + OpenAIError, + APIError, + APIConnectionError, + APIConnectionTimeoutError, + APIUserAbortError, +} from './error'; +import { + kind as shimsKind, + type Readable, + getDefaultAgent, + type Agent, + fetch, + type RequestInfo, + type RequestInit, + type Response, + type HeadersInit, +} from './_shims/index'; +export { type Response }; +import { BlobLike, isBlobLike, isMultipartBody } from './uploads'; +export { + maybeMultipartFormRequestOptions, + multipartFormRequestOptions, + createForm, + type Uploadable, +} from './uploads'; + +export type Fetch = (url: RequestInfo, init?: RequestInit) => Promise; + +type PromiseOrValue = T | Promise; + +type APIResponseProps = { + response: Response; + options: FinalRequestOptions; + controller: AbortController; +}; + +async function defaultParseResponse(props: APIResponseProps): Promise> { + const { response } = props; + if (props.options.stream) { + debug('response', response.status, response.url, response.headers, response.body); + + // Note: there is an invariant here that isn't represented in the type system + // that if you set `stream: true` the response type must also be `Stream` + + if (props.options.__streamClass) { + return props.options.__streamClass.fromSSEResponse(response, props.controller) as any; + } + + return Stream.fromSSEResponse(response, props.controller) as any; + } + + // fetch refuses to read the body when the status code is 204. + if (response.status === 204) { + return null as WithRequestID; + } + + if (props.options.__binaryResponse) { + return response as unknown as WithRequestID; + } + + const contentType = response.headers.get('content-type'); + const isJSON = + contentType?.includes('application/json') || contentType?.includes('application/vnd.api+json'); + if (isJSON) { + const json = await response.json(); + + debug('response', response.status, response.url, response.headers, json); + + return _addRequestID(json, response); + } + + const text = await response.text(); + debug('response', response.status, response.url, response.headers, text); + + // TODO handle blob, arraybuffer, other content types, etc. + return text as unknown as WithRequestID; +} + +type WithRequestID = + T extends Array | Response | AbstractPage ? T + : T extends Record ? T & { _request_id?: string | null } + : T; + +function _addRequestID(value: T, response: Response): WithRequestID { + if (!value || typeof value !== 'object' || Array.isArray(value)) { + return value as WithRequestID; + } + + return Object.defineProperty(value, '_request_id', { + value: response.headers.get('x-request-id'), + enumerable: false, + }) as WithRequestID; +} + +/** + * A subclass of `Promise` providing additional helper methods + * for interacting with the SDK. + */ +export class APIPromise extends Promise> { + private parsedPromise: Promise> | undefined; + + constructor( + private responsePromise: Promise, + private parseResponse: ( + props: APIResponseProps, + ) => PromiseOrValue> = defaultParseResponse, + ) { + super((resolve) => { + // this is maybe a bit weird but this has to be a no-op to not implicitly + // parse the response body; instead .then, .catch, .finally are overridden + // to parse the response + resolve(null as any); + }); + } + + _thenUnwrap(transform: (data: T, props: APIResponseProps) => U): APIPromise { + return new APIPromise(this.responsePromise, async (props) => + _addRequestID(transform(await this.parseResponse(props), props), props.response), + ); + } + + /** + * Gets the raw `Response` instance instead of parsing the response + * data. + * + * If you want to parse the response body but still get the `Response` + * instance, you can use {@link withResponse()}. + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + asResponse(): Promise { + return this.responsePromise.then((p) => p.response); + } + + /** + * Gets the parsed response data, the raw `Response` instance and the ID of the request, + * returned via the X-Request-ID header which is useful for debugging requests and reporting + * issues to OpenAI. + * + * If you just want to get the raw `Response` instance without parsing it, + * you can use {@link asResponse()}. + * + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + async withResponse(): Promise<{ data: T; response: Response; request_id: string | null | undefined }> { + const [data, response] = await Promise.all([this.parse(), this.asResponse()]); + return { data, response, request_id: response.headers.get('x-request-id') }; + } + + private parse(): Promise> { + if (!this.parsedPromise) { + this.parsedPromise = this.responsePromise.then(this.parseResponse) as any as Promise>; + } + return this.parsedPromise; + } + + override then, TResult2 = never>( + onfulfilled?: ((value: WithRequestID) => TResult1 | PromiseLike) | undefined | null, + onrejected?: ((reason: any) => TResult2 | PromiseLike) | undefined | null, + ): Promise { + return this.parse().then(onfulfilled, onrejected); + } + + override catch( + onrejected?: ((reason: any) => TResult | PromiseLike) | undefined | null, + ): Promise | TResult> { + return this.parse().catch(onrejected); + } + + override finally(onfinally?: (() => void) | undefined | null): Promise> { + return this.parse().finally(onfinally); + } +} + +export abstract class APIClient { + baseURL: string; + maxRetries: number; + timeout: number; + httpAgent: Agent | undefined; + + private fetch: Fetch; + protected idempotencyHeader?: string; + + constructor({ + baseURL, + maxRetries = 2, + timeout = 600000, // 10 minutes + httpAgent, + fetch: overriddenFetch, + }: { + baseURL: string; + maxRetries?: number | undefined; + timeout: number | undefined; + httpAgent: Agent | undefined; + fetch: Fetch | undefined; + }) { + this.baseURL = baseURL; + this.maxRetries = validatePositiveInteger('maxRetries', maxRetries); + this.timeout = validatePositiveInteger('timeout', timeout); + this.httpAgent = httpAgent; + + this.fetch = overriddenFetch ?? fetch; + } + + protected authHeaders(opts: FinalRequestOptions): Headers { + return {}; + } + + /** + * Override this to add your own default headers, for example: + * + * { + * ...super.defaultHeaders(), + * Authorization: 'Bearer 123', + * } + */ + protected defaultHeaders(opts: FinalRequestOptions): Headers { + return { + Accept: 'application/json', + 'Content-Type': 'application/json', + 'User-Agent': this.getUserAgent(), + ...getPlatformHeaders(), + ...this.authHeaders(opts), + }; + } + + protected abstract defaultQuery(): DefaultQuery | undefined; + + /** + * Override this to add your own headers validation: + */ + protected validateHeaders(headers: Headers, customHeaders: Headers) {} + + protected defaultIdempotencyKey(): string { + return `stainless-node-retry-${uuid4()}`; + } + + get(path: string, opts?: PromiseOrValue>): APIPromise { + return this.methodRequest('get', path, opts); + } + + post(path: string, opts?: PromiseOrValue>): APIPromise { + return this.methodRequest('post', path, opts); + } + + patch(path: string, opts?: PromiseOrValue>): APIPromise { + return this.methodRequest('patch', path, opts); + } + + put(path: string, opts?: PromiseOrValue>): APIPromise { + return this.methodRequest('put', path, opts); + } + + delete(path: string, opts?: PromiseOrValue>): APIPromise { + return this.methodRequest('delete', path, opts); + } + + private methodRequest( + method: HTTPMethod, + path: string, + opts?: PromiseOrValue>, + ): APIPromise { + return this.request( + Promise.resolve(opts).then(async (opts) => { + const body = + opts && isBlobLike(opts?.body) ? new DataView(await opts.body.arrayBuffer()) + : opts?.body instanceof DataView ? opts.body + : opts?.body instanceof ArrayBuffer ? new DataView(opts.body) + : opts && ArrayBuffer.isView(opts?.body) ? new DataView(opts.body.buffer) + : opts?.body; + return { method, path, ...opts, body }; + }), + ); + } + + getAPIList = AbstractPage>( + path: string, + Page: new (...args: any[]) => PageClass, + opts?: RequestOptions, + ): PagePromise { + return this.requestAPIList(Page, { method: 'get', path, ...opts }); + } + + private calculateContentLength(body: unknown): string | null { + if (typeof body === 'string') { + if (typeof Buffer !== 'undefined') { + return Buffer.byteLength(body, 'utf8').toString(); + } + + if (typeof TextEncoder !== 'undefined') { + const encoder = new TextEncoder(); + const encoded = encoder.encode(body); + return encoded.length.toString(); + } + } else if (ArrayBuffer.isView(body)) { + return body.byteLength.toString(); + } + + return null; + } + + buildRequest( + options: FinalRequestOptions, + { retryCount = 0 }: { retryCount?: number } = {}, + ): { req: RequestInit; url: string; timeout: number } { + const { method, path, query, headers: headers = {} } = options; + + const body = + ArrayBuffer.isView(options.body) || (options.__binaryRequest && typeof options.body === 'string') ? + options.body + : isMultipartBody(options.body) ? options.body.body + : options.body ? JSON.stringify(options.body, null, 2) + : null; + const contentLength = this.calculateContentLength(body); + + const url = this.buildURL(path!, query); + if ('timeout' in options) validatePositiveInteger('timeout', options.timeout); + const timeout = options.timeout ?? this.timeout; + const httpAgent = options.httpAgent ?? this.httpAgent ?? getDefaultAgent(url); + const minAgentTimeout = timeout + 1000; + if ( + typeof (httpAgent as any)?.options?.timeout === 'number' && + minAgentTimeout > ((httpAgent as any).options.timeout ?? 0) + ) { + // Allow any given request to bump our agent active socket timeout. + // This may seem strange, but leaking active sockets should be rare and not particularly problematic, + // and without mutating agent we would need to create more of them. + // This tradeoff optimizes for performance. + (httpAgent as any).options.timeout = minAgentTimeout; + } + + if (this.idempotencyHeader && method !== 'get') { + if (!options.idempotencyKey) options.idempotencyKey = this.defaultIdempotencyKey(); + headers[this.idempotencyHeader] = options.idempotencyKey; + } + + const reqHeaders = this.buildHeaders({ options, headers, contentLength, retryCount }); + + const req: RequestInit = { + method, + ...(body && { body: body as any }), + headers: reqHeaders, + ...(httpAgent && { agent: httpAgent }), + // @ts-ignore node-fetch uses a custom AbortSignal type that is + // not compatible with standard web types + signal: options.signal ?? null, + }; + + return { req, url, timeout }; + } + + private buildHeaders({ + options, + headers, + contentLength, + retryCount, + }: { + options: FinalRequestOptions; + headers: Record; + contentLength: string | null | undefined; + retryCount: number; + }): Record { + const reqHeaders: Record = {}; + if (contentLength) { + reqHeaders['content-length'] = contentLength; + } + + const defaultHeaders = this.defaultHeaders(options); + applyHeadersMut(reqHeaders, defaultHeaders); + applyHeadersMut(reqHeaders, headers); + + // let builtin fetch set the Content-Type for multipart bodies + if (isMultipartBody(options.body) && shimsKind !== 'node') { + delete reqHeaders['content-type']; + } + + // Don't set the retry count header if it was already set or removed through default headers or by the + // caller. We check `defaultHeaders` and `headers`, which can contain nulls, instead of `reqHeaders` to + // account for the removal case. + if ( + getHeader(defaultHeaders, 'x-stainless-retry-count') === undefined && + getHeader(headers, 'x-stainless-retry-count') === undefined + ) { + reqHeaders['x-stainless-retry-count'] = String(retryCount); + } + + this.validateHeaders(reqHeaders, headers); + + return reqHeaders; + } + + /** + * Used as a callback for mutating the given `FinalRequestOptions` object. + */ + protected async prepareOptions(options: FinalRequestOptions): Promise {} + + /** + * Used as a callback for mutating the given `RequestInit` object. + * + * This is useful for cases where you want to add certain headers based off of + * the request properties, e.g. `method` or `url`. + */ + protected async prepareRequest( + request: RequestInit, + { url, options }: { url: string; options: FinalRequestOptions }, + ): Promise {} + + protected parseHeaders(headers: HeadersInit | null | undefined): Record { + return ( + !headers ? {} + : Symbol.iterator in headers ? + Object.fromEntries(Array.from(headers as Iterable).map((header) => [...header])) + : { ...headers } + ); + } + + protected makeStatusError( + status: number | undefined, + error: Object | undefined, + message: string | undefined, + headers: Headers | undefined, + ): APIError { + return APIError.generate(status, error, message, headers); + } + + request( + options: PromiseOrValue>, + remainingRetries: number | null = null, + ): APIPromise { + return new APIPromise(this.makeRequest(options, remainingRetries)); + } + + private async makeRequest( + optionsInput: PromiseOrValue>, + retriesRemaining: number | null, + ): Promise { + const options = await optionsInput; + const maxRetries = options.maxRetries ?? this.maxRetries; + if (retriesRemaining == null) { + retriesRemaining = maxRetries; + } + + await this.prepareOptions(options); + + const { req, url, timeout } = this.buildRequest(options, { retryCount: maxRetries - retriesRemaining }); + + await this.prepareRequest(req, { url, options }); + + debug('request', url, options, req.headers); + + if (options.signal?.aborted) { + throw new APIUserAbortError(); + } + + const controller = new AbortController(); + const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(castToError); + + if (response instanceof Error) { + if (options.signal?.aborted) { + throw new APIUserAbortError(); + } + if (retriesRemaining) { + return this.retryRequest(options, retriesRemaining); + } + if (response.name === 'AbortError') { + throw new APIConnectionTimeoutError(); + } + throw new APIConnectionError({ cause: response }); + } + + const responseHeaders = createResponseHeaders(response.headers); + + if (!response.ok) { + if (retriesRemaining && this.shouldRetry(response)) { + const retryMessage = `retrying, ${retriesRemaining} attempts remaining`; + debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders); + return this.retryRequest(options, retriesRemaining, responseHeaders); + } + + const errText = await response.text().catch((e) => castToError(e).message); + const errJSON = safeJSON(errText); + const errMessage = errJSON ? undefined : errText; + const retryMessage = retriesRemaining ? `(error; no more retries left)` : `(error; not retryable)`; + + debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders, errMessage); + + const err = this.makeStatusError(response.status, errJSON, errMessage, responseHeaders); + throw err; + } + + return { response, options, controller }; + } + + requestAPIList = AbstractPage>( + Page: new (...args: ConstructorParameters) => PageClass, + options: FinalRequestOptions, + ): PagePromise { + const request = this.makeRequest(options, null); + return new PagePromise(this, request, Page); + } + + buildURL(path: string, query: Req | null | undefined): string { + const url = + isAbsoluteURL(path) ? + new URL(path) + : new URL(this.baseURL + (this.baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path)); + + const defaultQuery = this.defaultQuery(); + if (!isEmptyObj(defaultQuery)) { + query = { ...defaultQuery, ...query } as Req; + } + + if (typeof query === 'object' && query && !Array.isArray(query)) { + url.search = this.stringifyQuery(query as Record); + } + + return url.toString(); + } + + protected stringifyQuery(query: Record): string { + return Object.entries(query) + .filter(([_, value]) => typeof value !== 'undefined') + .map(([key, value]) => { + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`; + } + if (value === null) { + return `${encodeURIComponent(key)}=`; + } + throw new OpenAIError( + `Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`, + ); + }) + .join('&'); + } + + async fetchWithTimeout( + url: RequestInfo, + init: RequestInit | undefined, + ms: number, + controller: AbortController, + ): Promise { + const { signal, ...options } = init || {}; + if (signal) signal.addEventListener('abort', () => controller.abort()); + + const timeout = setTimeout(() => controller.abort(), ms); + + return ( + // use undefined this binding; fetch errors if bound to something else in browser/cloudflare + this.fetch.call(undefined, url, { signal: controller.signal as any, ...options }).finally(() => { + clearTimeout(timeout); + }) + ); + } + + private shouldRetry(response: Response): boolean { + // Note this is not a standard header. + const shouldRetryHeader = response.headers.get('x-should-retry'); + + // If the server explicitly says whether or not to retry, obey. + if (shouldRetryHeader === 'true') return true; + if (shouldRetryHeader === 'false') return false; + + // Retry on request timeouts. + if (response.status === 408) return true; + + // Retry on lock timeouts. + if (response.status === 409) return true; + + // Retry on rate limits. + if (response.status === 429) return true; + + // Retry internal errors. + if (response.status >= 500) return true; + + return false; + } + + private async retryRequest( + options: FinalRequestOptions, + retriesRemaining: number, + responseHeaders?: Headers | undefined, + ): Promise { + let timeoutMillis: number | undefined; + + // Note the `retry-after-ms` header may not be standard, but is a good idea and we'd like proactive support for it. + const retryAfterMillisHeader = responseHeaders?.['retry-after-ms']; + if (retryAfterMillisHeader) { + const timeoutMs = parseFloat(retryAfterMillisHeader); + if (!Number.isNaN(timeoutMs)) { + timeoutMillis = timeoutMs; + } + } + + // About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + const retryAfterHeader = responseHeaders?.['retry-after']; + if (retryAfterHeader && !timeoutMillis) { + const timeoutSeconds = parseFloat(retryAfterHeader); + if (!Number.isNaN(timeoutSeconds)) { + timeoutMillis = timeoutSeconds * 1000; + } else { + timeoutMillis = Date.parse(retryAfterHeader) - Date.now(); + } + } + + // If the API asks us to wait a certain amount of time (and it's a reasonable amount), + // just do what it says, but otherwise calculate a default + if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1000)) { + const maxRetries = options.maxRetries ?? this.maxRetries; + timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries); + } + await sleep(timeoutMillis); + + return this.makeRequest(options, retriesRemaining - 1); + } + + private calculateDefaultRetryTimeoutMillis(retriesRemaining: number, maxRetries: number): number { + const initialRetryDelay = 0.5; + const maxRetryDelay = 8.0; + + const numRetries = maxRetries - retriesRemaining; + + // Apply exponential backoff, but not more than the max. + const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay); + + // Apply some jitter, take up to at most 25 percent of the retry time. + const jitter = 1 - Math.random() * 0.25; + + return sleepSeconds * jitter * 1000; + } + + private getUserAgent(): string { + return `${this.constructor.name}/JS ${VERSION}`; + } +} + +export type PageInfo = { url: URL } | { params: Record | null }; + +export abstract class AbstractPage implements AsyncIterable { + #client: APIClient; + protected options: FinalRequestOptions; + + protected response: Response; + protected body: unknown; + + constructor(client: APIClient, response: Response, body: unknown, options: FinalRequestOptions) { + this.#client = client; + this.options = options; + this.response = response; + this.body = body; + } + + /** + * @deprecated Use nextPageInfo instead + */ + abstract nextPageParams(): Partial> | null; + abstract nextPageInfo(): PageInfo | null; + + abstract getPaginatedItems(): Item[]; + + hasNextPage(): boolean { + const items = this.getPaginatedItems(); + if (!items.length) return false; + return this.nextPageInfo() != null; + } + + async getNextPage(): Promise { + const nextInfo = this.nextPageInfo(); + if (!nextInfo) { + throw new OpenAIError( + 'No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.', + ); + } + const nextOptions = { ...this.options }; + if ('params' in nextInfo && typeof nextOptions.query === 'object') { + nextOptions.query = { ...nextOptions.query, ...nextInfo.params }; + } else if ('url' in nextInfo) { + const params = [...Object.entries(nextOptions.query || {}), ...nextInfo.url.searchParams.entries()]; + for (const [key, value] of params) { + nextInfo.url.searchParams.set(key, value as any); + } + nextOptions.query = undefined; + nextOptions.path = nextInfo.url.toString(); + } + return await this.#client.requestAPIList(this.constructor as any, nextOptions); + } + + async *iterPages(): AsyncGenerator { + // eslint-disable-next-line @typescript-eslint/no-this-alias + let page: this = this; + yield page; + while (page.hasNextPage()) { + page = await page.getNextPage(); + yield page; + } + } + + async *[Symbol.asyncIterator](): AsyncGenerator { + for await (const page of this.iterPages()) { + for (const item of page.getPaginatedItems()) { + yield item; + } + } + } +} + +/** + * This subclass of Promise will resolve to an instantiated Page once the request completes. + * + * It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ +export class PagePromise< + PageClass extends AbstractPage, + Item = ReturnType[number], + > + extends APIPromise + implements AsyncIterable +{ + constructor( + client: APIClient, + request: Promise, + Page: new (...args: ConstructorParameters) => PageClass, + ) { + super( + request, + async (props) => + new Page( + client, + props.response, + await defaultParseResponse(props), + props.options, + ) as WithRequestID, + ); + } + + /** + * Allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ + async *[Symbol.asyncIterator](): AsyncGenerator { + const page = await this; + for await (const item of page) { + yield item; + } + } +} + +export const createResponseHeaders = ( + headers: Awaited>['headers'], +): Record => { + return new Proxy( + Object.fromEntries( + // @ts-ignore + headers.entries(), + ), + { + get(target, name) { + const key = name.toString(); + return target[key.toLowerCase()] || target[key]; + }, + }, + ); +}; + +type HTTPMethod = 'get' | 'post' | 'put' | 'patch' | 'delete'; + +export type RequestClient = { fetch: Fetch }; +export type Headers = Record; +export type DefaultQuery = Record; +export type KeysEnum = { [P in keyof Required]: true }; + +export type RequestOptions< + Req = unknown | Record | Readable | BlobLike | ArrayBufferView | ArrayBuffer, +> = { + method?: HTTPMethod; + path?: string; + query?: Req | undefined; + body?: Req | null | undefined; + headers?: Headers | undefined; + + maxRetries?: number; + stream?: boolean | undefined; + timeout?: number; + httpAgent?: Agent; + signal?: AbortSignal | undefined | null; + idempotencyKey?: string; + + __binaryRequest?: boolean | undefined; + __binaryResponse?: boolean | undefined; + __streamClass?: typeof Stream; +}; + +// This is required so that we can determine if a given object matches the RequestOptions +// type at runtime. While this requires duplication, it is enforced by the TypeScript +// compiler such that any missing / extraneous keys will cause an error. +const requestOptionsKeys: KeysEnum = { + method: true, + path: true, + query: true, + body: true, + headers: true, + + maxRetries: true, + stream: true, + timeout: true, + httpAgent: true, + signal: true, + idempotencyKey: true, + + __binaryRequest: true, + __binaryResponse: true, + __streamClass: true, +}; + +export const isRequestOptions = (obj: unknown): obj is RequestOptions => { + return ( + typeof obj === 'object' && + obj !== null && + !isEmptyObj(obj) && + Object.keys(obj).every((k) => hasOwn(requestOptionsKeys, k)) + ); +}; + +export type FinalRequestOptions | Readable | DataView> = + RequestOptions & { + method: HTTPMethod; + path: string; + }; + +declare const Deno: any; +declare const EdgeRuntime: any; +type Arch = 'x32' | 'x64' | 'arm' | 'arm64' | `other:${string}` | 'unknown'; +type PlatformName = + | 'MacOS' + | 'Linux' + | 'Windows' + | 'FreeBSD' + | 'OpenBSD' + | 'iOS' + | 'Android' + | `Other:${string}` + | 'Unknown'; +type Browser = 'ie' | 'edge' | 'chrome' | 'firefox' | 'safari'; +type PlatformProperties = { + 'X-Stainless-Lang': 'js'; + 'X-Stainless-Package-Version': string; + 'X-Stainless-OS': PlatformName; + 'X-Stainless-Arch': Arch; + 'X-Stainless-Runtime': 'node' | 'deno' | 'edge' | `browser:${Browser}` | 'unknown'; + 'X-Stainless-Runtime-Version': string; +}; +const getPlatformProperties = (): PlatformProperties => { + if (typeof Deno !== 'undefined' && Deno.build != null) { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': normalizePlatform(Deno.build.os), + 'X-Stainless-Arch': normalizeArch(Deno.build.arch), + 'X-Stainless-Runtime': 'deno', + 'X-Stainless-Runtime-Version': + typeof Deno.version === 'string' ? Deno.version : Deno.version?.deno ?? 'unknown', + }; + } + if (typeof EdgeRuntime !== 'undefined') { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': `other:${EdgeRuntime}`, + 'X-Stainless-Runtime': 'edge', + 'X-Stainless-Runtime-Version': process.version, + }; + } + // Check if Node.js + if (Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]') { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': normalizePlatform(process.platform), + 'X-Stainless-Arch': normalizeArch(process.arch), + 'X-Stainless-Runtime': 'node', + 'X-Stainless-Runtime-Version': process.version, + }; + } + + const browserInfo = getBrowserInfo(); + if (browserInfo) { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': 'unknown', + 'X-Stainless-Runtime': `browser:${browserInfo.browser}`, + 'X-Stainless-Runtime-Version': browserInfo.version, + }; + } + + // TODO add support for Cloudflare workers, etc. + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': 'unknown', + 'X-Stainless-Runtime': 'unknown', + 'X-Stainless-Runtime-Version': 'unknown', + }; +}; + +type BrowserInfo = { + browser: Browser; + version: string; +}; + +declare const navigator: { userAgent: string } | undefined; + +// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts +function getBrowserInfo(): BrowserInfo | null { + if (typeof navigator === 'undefined' || !navigator) { + return null; + } + + // NOTE: The order matters here! + const browserPatterns = [ + { key: 'edge' as const, pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'ie' as const, pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'ie' as const, pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'chrome' as const, pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'firefox' as const, pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'safari' as const, pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ }, + ]; + + // Find the FIRST matching browser + for (const { key, pattern } of browserPatterns) { + const match = pattern.exec(navigator.userAgent); + if (match) { + const major = match[1] || 0; + const minor = match[2] || 0; + const patch = match[3] || 0; + + return { browser: key, version: `${major}.${minor}.${patch}` }; + } + } + + return null; +} + +const normalizeArch = (arch: string): Arch => { + // Node docs: + // - https://nodejs.org/api/process.html#processarch + // Deno docs: + // - https://doc.deno.land/deno/stable/~/Deno.build + if (arch === 'x32') return 'x32'; + if (arch === 'x86_64' || arch === 'x64') return 'x64'; + if (arch === 'arm') return 'arm'; + if (arch === 'aarch64' || arch === 'arm64') return 'arm64'; + if (arch) return `other:${arch}`; + return 'unknown'; +}; + +const normalizePlatform = (platform: string): PlatformName => { + // Node platforms: + // - https://nodejs.org/api/process.html#processplatform + // Deno platforms: + // - https://doc.deno.land/deno/stable/~/Deno.build + // - https://github.com/denoland/deno/issues/14799 + + platform = platform.toLowerCase(); + + // NOTE: this iOS check is untested and may not work + // Node does not work natively on IOS, there is a fork at + // https://github.com/nodejs-mobile/nodejs-mobile + // however it is unknown at the time of writing how to detect if it is running + if (platform.includes('ios')) return 'iOS'; + if (platform === 'android') return 'Android'; + if (platform === 'darwin') return 'MacOS'; + if (platform === 'win32') return 'Windows'; + if (platform === 'freebsd') return 'FreeBSD'; + if (platform === 'openbsd') return 'OpenBSD'; + if (platform === 'linux') return 'Linux'; + if (platform) return `Other:${platform}`; + return 'Unknown'; +}; + +let _platformHeaders: PlatformProperties; +const getPlatformHeaders = () => { + return (_platformHeaders ??= getPlatformProperties()); +}; + +export const safeJSON = (text: string) => { + try { + return JSON.parse(text); + } catch (err) { + return undefined; + } +}; + +// https://url.spec.whatwg.org/#url-scheme-string +const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i; +const isAbsoluteURL = (url: string): boolean => { + return startsWithSchemeRegexp.test(url); +}; + +export const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); + +const validatePositiveInteger = (name: string, n: unknown): number => { + if (typeof n !== 'number' || !Number.isInteger(n)) { + throw new OpenAIError(`${name} must be an integer`); + } + if (n < 0) { + throw new OpenAIError(`${name} must be a positive integer`); + } + return n; +}; + +export const castToError = (err: any): Error => { + if (err instanceof Error) return err; + if (typeof err === 'object' && err !== null) { + try { + return new Error(JSON.stringify(err)); + } catch {} + } + return new Error(err); +}; + +export const ensurePresent = (value: T | null | undefined): T => { + if (value == null) throw new OpenAIError(`Expected a value to be given but received ${value} instead.`); + return value; +}; + +/** + * Read an environment variable. + * + * Trims beginning and trailing whitespace. + * + * Will return undefined if the environment variable doesn't exist or cannot be accessed. + */ +export const readEnv = (env: string): string | undefined => { + if (typeof process !== 'undefined') { + return process.env?.[env]?.trim() ?? undefined; + } + if (typeof Deno !== 'undefined') { + return Deno.env?.get?.(env)?.trim(); + } + return undefined; +}; + +export const coerceInteger = (value: unknown): number => { + if (typeof value === 'number') return Math.round(value); + if (typeof value === 'string') return parseInt(value, 10); + + throw new OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); +}; + +export const coerceFloat = (value: unknown): number => { + if (typeof value === 'number') return value; + if (typeof value === 'string') return parseFloat(value); + + throw new OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); +}; + +export const coerceBoolean = (value: unknown): boolean => { + if (typeof value === 'boolean') return value; + if (typeof value === 'string') return value === 'true'; + return Boolean(value); +}; + +export const maybeCoerceInteger = (value: unknown): number | undefined => { + if (value === undefined) { + return undefined; + } + return coerceInteger(value); +}; + +export const maybeCoerceFloat = (value: unknown): number | undefined => { + if (value === undefined) { + return undefined; + } + return coerceFloat(value); +}; + +export const maybeCoerceBoolean = (value: unknown): boolean | undefined => { + if (value === undefined) { + return undefined; + } + return coerceBoolean(value); +}; + +// https://stackoverflow.com/a/34491287 +export function isEmptyObj(obj: Object | null | undefined): boolean { + if (!obj) return true; + for (const _k in obj) return false; + return true; +} + +// https://eslint.org/docs/latest/rules/no-prototype-builtins +export function hasOwn(obj: Object, key: string): boolean { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +/** + * Copies headers from "newHeaders" onto "targetHeaders", + * using lower-case for all properties, + * ignoring any keys with undefined values, + * and deleting any keys with null values. + */ +function applyHeadersMut(targetHeaders: Headers, newHeaders: Headers): void { + for (const k in newHeaders) { + if (!hasOwn(newHeaders, k)) continue; + const lowerKey = k.toLowerCase(); + if (!lowerKey) continue; + + const val = newHeaders[k]; + + if (val === null) { + delete targetHeaders[lowerKey]; + } else if (val !== undefined) { + targetHeaders[lowerKey] = val; + } + } +} + +export function debug(action: string, ...args: any[]) { + if (typeof process !== 'undefined' && process?.env?.['DEBUG'] === 'true') { + console.log(`OpenAI:DEBUG:${action}`, ...args); + } +} + +/** + * https://stackoverflow.com/a/2117523 + */ +const uuid4 = () => { + return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => { + const r = (Math.random() * 16) | 0; + const v = c === 'x' ? r : (r & 0x3) | 0x8; + return v.toString(16); + }); +}; + +export const isRunningInBrowser = () => { + return ( + // @ts-ignore + typeof window !== 'undefined' && + // @ts-ignore + typeof window.document !== 'undefined' && + // @ts-ignore + typeof navigator !== 'undefined' + ); +}; + +export interface HeadersProtocol { + get: (header: string) => string | null | undefined; +} +export type HeadersLike = Record | HeadersProtocol; + +export const isHeadersProtocol = (headers: any): headers is HeadersProtocol => { + return typeof headers?.get === 'function'; +}; + +export const getRequiredHeader = (headers: HeadersLike | Headers, header: string): string => { + const foundHeader = getHeader(headers, header); + if (foundHeader === undefined) { + throw new Error(`Could not find ${header} header`); + } + return foundHeader; +}; + +export const getHeader = (headers: HeadersLike | Headers, header: string): string | undefined => { + const lowerCasedHeader = header.toLowerCase(); + if (isHeadersProtocol(headers)) { + // to deal with the case where the header looks like Stainless-Event-Id + const intercapsHeader = + header[0]?.toUpperCase() + + header.substring(1).replace(/([^\w])(\w)/g, (_m, g1, g2) => g1 + g2.toUpperCase()); + for (const key of [header, lowerCasedHeader, header.toUpperCase(), intercapsHeader]) { + const value = headers.get(key); + if (value) { + return value; + } + } + } + + for (const [key, value] of Object.entries(headers)) { + if (key.toLowerCase() === lowerCasedHeader) { + if (Array.isArray(value)) { + if (value.length <= 1) return value[0]; + console.warn(`Received ${value.length} entries for the ${header} header, using the first entry.`); + return value[0]; + } + return value; + } + } + + return undefined; +}; + +/** + * Encodes a string to Base64 format. + */ +export const toBase64 = (str: string | null | undefined): string => { + if (!str) return ''; + if (typeof Buffer !== 'undefined') { + return Buffer.from(str).toString('base64'); + } + + if (typeof btoa !== 'undefined') { + return btoa(str); + } + + throw new OpenAIError('Cannot generate b64 string; Expected `Buffer` or `btoa` to be defined'); +}; + +export function isObj(obj: unknown): obj is Record { + return obj != null && typeof obj === 'object' && !Array.isArray(obj); +} diff --git a/node_modules/openai/src/error.ts b/node_modules/openai/src/error.ts new file mode 100644 index 0000000..f3dc576 --- /dev/null +++ b/node_modules/openai/src/error.ts @@ -0,0 +1,154 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { castToError, Headers } from './core'; + +export class OpenAIError extends Error {} + +export class APIError< + TStatus extends number | undefined = number | undefined, + THeaders extends Headers | undefined = Headers | undefined, + TError extends Object | undefined = Object | undefined, +> extends OpenAIError { + /** HTTP status for the response that caused the error */ + readonly status: TStatus; + /** HTTP headers for the response that caused the error */ + readonly headers: THeaders; + /** JSON body of the response that caused the error */ + readonly error: TError; + + readonly code: string | null | undefined; + readonly param: string | null | undefined; + readonly type: string | undefined; + + readonly request_id: string | null | undefined; + + constructor(status: TStatus, error: TError, message: string | undefined, headers: THeaders) { + super(`${APIError.makeMessage(status, error, message)}`); + this.status = status; + this.headers = headers; + this.request_id = headers?.['x-request-id']; + this.error = error; + + const data = error as Record; + this.code = data?.['code']; + this.param = data?.['param']; + this.type = data?.['type']; + } + + private static makeMessage(status: number | undefined, error: any, message: string | undefined) { + const msg = + error?.message ? + typeof error.message === 'string' ? + error.message + : JSON.stringify(error.message) + : error ? JSON.stringify(error) + : message; + + if (status && msg) { + return `${status} ${msg}`; + } + if (status) { + return `${status} status code (no body)`; + } + if (msg) { + return msg; + } + return '(no status code or body)'; + } + + static generate( + status: number | undefined, + errorResponse: Object | undefined, + message: string | undefined, + headers: Headers | undefined, + ): APIError { + if (!status || !headers) { + return new APIConnectionError({ message, cause: castToError(errorResponse) }); + } + + const error = (errorResponse as Record)?.['error']; + + if (status === 400) { + return new BadRequestError(status, error, message, headers); + } + + if (status === 401) { + return new AuthenticationError(status, error, message, headers); + } + + if (status === 403) { + return new PermissionDeniedError(status, error, message, headers); + } + + if (status === 404) { + return new NotFoundError(status, error, message, headers); + } + + if (status === 409) { + return new ConflictError(status, error, message, headers); + } + + if (status === 422) { + return new UnprocessableEntityError(status, error, message, headers); + } + + if (status === 429) { + return new RateLimitError(status, error, message, headers); + } + + if (status >= 500) { + return new InternalServerError(status, error, message, headers); + } + + return new APIError(status, error, message, headers); + } +} + +export class APIUserAbortError extends APIError { + constructor({ message }: { message?: string } = {}) { + super(undefined, undefined, message || 'Request was aborted.', undefined); + } +} + +export class APIConnectionError extends APIError { + constructor({ message, cause }: { message?: string | undefined; cause?: Error | undefined }) { + super(undefined, undefined, message || 'Connection error.', undefined); + // in some environments the 'cause' property is already declared + // @ts-ignore + if (cause) this.cause = cause; + } +} + +export class APIConnectionTimeoutError extends APIConnectionError { + constructor({ message }: { message?: string } = {}) { + super({ message: message ?? 'Request timed out.' }); + } +} + +export class BadRequestError extends APIError<400, Headers> {} + +export class AuthenticationError extends APIError<401, Headers> {} + +export class PermissionDeniedError extends APIError<403, Headers> {} + +export class NotFoundError extends APIError<404, Headers> {} + +export class ConflictError extends APIError<409, Headers> {} + +export class UnprocessableEntityError extends APIError<422, Headers> {} + +export class RateLimitError extends APIError<429, Headers> {} + +export class InternalServerError extends APIError {} + +export class LengthFinishReasonError extends OpenAIError { + constructor() { + super(`Could not parse response content as the length limit was reached`); + } +} + +export class ContentFilterFinishReasonError extends OpenAIError { + constructor() { + super(`Could not parse response content as the request was rejected by the content filter`); + } +} diff --git a/node_modules/openai/src/helpers/zod.ts b/node_modules/openai/src/helpers/zod.ts new file mode 100644 index 0000000..99b9eb4 --- /dev/null +++ b/node_modules/openai/src/helpers/zod.ts @@ -0,0 +1,108 @@ +import { ResponseFormatJSONSchema } from '../resources/index'; +import type { infer as zodInfer, ZodType } from 'zod'; +import { + AutoParseableResponseFormat, + AutoParseableTool, + makeParseableResponseFormat, + makeParseableTool, +} from '../lib/parser'; +import { zodToJsonSchema as _zodToJsonSchema } from '../_vendor/zod-to-json-schema'; + +function zodToJsonSchema(schema: ZodType, options: { name: string }): Record { + return _zodToJsonSchema(schema, { + openaiStrictMode: true, + name: options.name, + nameStrategy: 'duplicate-ref', + $refStrategy: 'extract-to-root', + nullableStrategy: 'property', + }); +} + +/** + * Creates a chat completion `JSONSchema` response format object from + * the given Zod schema. + * + * If this is passed to the `.parse()`, `.stream()` or `.runTools()` + * chat completion methods then the response message will contain a + * `.parsed` property that is the result of parsing the content with + * the given Zod object. + * + * ```ts + * const completion = await client.beta.chat.completions.parse({ + * model: 'gpt-4o-2024-08-06', + * messages: [ + * { role: 'system', content: 'You are a helpful math tutor.' }, + * { role: 'user', content: 'solve 8x + 31 = 2' }, + * ], + * response_format: zodResponseFormat( + * z.object({ + * steps: z.array(z.object({ + * explanation: z.string(), + * answer: z.string(), + * })), + * final_answer: z.string(), + * }), + * 'math_answer', + * ), + * }); + * const message = completion.choices[0]?.message; + * if (message?.parsed) { + * console.log(message.parsed); + * console.log(message.parsed.final_answer); + * } + * ``` + * + * This can be passed directly to the `.create()` method but will not + * result in any automatic parsing, you'll have to parse the response yourself. + */ +export function zodResponseFormat( + zodObject: ZodInput, + name: string, + props?: Omit, +): AutoParseableResponseFormat> { + return makeParseableResponseFormat( + { + type: 'json_schema', + json_schema: { + ...props, + name, + strict: true, + schema: zodToJsonSchema(zodObject, { name }), + }, + }, + (content) => zodObject.parse(JSON.parse(content)), + ); +} + +/** + * Creates a chat completion `function` tool that can be invoked + * automatically by the chat completion `.runTools()` method or automatically + * parsed by `.parse()` / `.stream()`. + */ +export function zodFunction(options: { + name: string; + parameters: Parameters; + function?: ((args: zodInfer) => unknown | Promise) | undefined; + description?: string | undefined; +}): AutoParseableTool<{ + arguments: Parameters; + name: string; + function: (args: zodInfer) => unknown; +}> { + // @ts-expect-error TODO + return makeParseableTool( + { + type: 'function', + function: { + name: options.name, + parameters: zodToJsonSchema(options.parameters, { name: options.name }), + strict: true, + ...(options.description ? { description: options.description } : undefined), + }, + }, + { + callback: options.function, + parser: (args) => options.parameters.parse(JSON.parse(args)), + }, + ); +} diff --git a/node_modules/openai/src/index.ts b/node_modules/openai/src/index.ts new file mode 100644 index 0000000..2320850 --- /dev/null +++ b/node_modules/openai/src/index.ts @@ -0,0 +1,668 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { type Agent, type RequestInit } from './_shims/index'; +import * as qs from './internal/qs'; +import * as Core from './core'; +import * as Errors from './error'; +import * as Pagination from './pagination'; +import { type CursorPageParams, CursorPageResponse, PageResponse } from './pagination'; +import * as Uploads from './uploads'; +import * as API from './resources/index'; +import { + Batch, + BatchCreateParams, + BatchError, + BatchListParams, + BatchRequestCounts, + Batches, + BatchesPage, +} from './resources/batches'; +import { + Completion, + CompletionChoice, + CompletionCreateParams, + CompletionCreateParamsNonStreaming, + CompletionCreateParamsStreaming, + CompletionUsage, + Completions, +} from './resources/completions'; +import { + CreateEmbeddingResponse, + Embedding, + EmbeddingCreateParams, + EmbeddingModel, + Embeddings, +} from './resources/embeddings'; +import { + FileContent, + FileCreateParams, + FileDeleted, + FileListParams, + FileObject, + FileObjectsPage, + FilePurpose, + Files, +} from './resources/files'; +import { + Image, + ImageCreateVariationParams, + ImageEditParams, + ImageGenerateParams, + ImageModel, + Images, + ImagesResponse, +} from './resources/images'; +import { Model, ModelDeleted, Models, ModelsPage } from './resources/models'; +import { + Moderation, + ModerationCreateParams, + ModerationCreateResponse, + ModerationImageURLInput, + ModerationModel, + ModerationMultiModalInput, + ModerationTextInput, + Moderations, +} from './resources/moderations'; +import { Audio, AudioModel, AudioResponseFormat } from './resources/audio/audio'; +import { Beta } from './resources/beta/beta'; +import { Chat, ChatModel } from './resources/chat/chat'; +import { + ChatCompletion, + ChatCompletionAssistantMessageParam, + ChatCompletionAudio, + ChatCompletionAudioParam, + ChatCompletionChunk, + ChatCompletionContentPart, + ChatCompletionContentPartImage, + ChatCompletionContentPartInputAudio, + ChatCompletionContentPartRefusal, + ChatCompletionContentPartText, + ChatCompletionCreateParams, + ChatCompletionCreateParamsNonStreaming, + ChatCompletionCreateParamsStreaming, + ChatCompletionDeveloperMessageParam, + ChatCompletionFunctionCallOption, + ChatCompletionFunctionMessageParam, + ChatCompletionMessage, + ChatCompletionMessageParam, + ChatCompletionMessageToolCall, + ChatCompletionModality, + ChatCompletionNamedToolChoice, + ChatCompletionPredictionContent, + ChatCompletionReasoningEffort, + ChatCompletionRole, + ChatCompletionStreamOptions, + ChatCompletionSystemMessageParam, + ChatCompletionTokenLogprob, + ChatCompletionTool, + ChatCompletionToolChoiceOption, + ChatCompletionToolMessageParam, + ChatCompletionUserMessageParam, +} from './resources/chat/completions'; +import { FineTuning } from './resources/fine-tuning/fine-tuning'; +import { + Upload, + UploadCompleteParams, + UploadCreateParams, + Uploads as UploadsAPIUploads, +} from './resources/uploads/uploads'; + +export interface ClientOptions { + /** + * Defaults to process.env['OPENAI_API_KEY']. + */ + apiKey?: string | undefined; + + /** + * Defaults to process.env['OPENAI_ORG_ID']. + */ + organization?: string | null | undefined; + + /** + * Defaults to process.env['OPENAI_PROJECT_ID']. + */ + project?: string | null | undefined; + + /** + * Override the default base URL for the API, e.g., "https://api.example.com/v2/" + * + * Defaults to process.env['OPENAI_BASE_URL']. + */ + baseURL?: string | null | undefined; + + /** + * The maximum amount of time (in milliseconds) that the client should wait for a response + * from the server before timing out a single request. + * + * Note that request timeouts are retried by default, so in a worst-case scenario you may wait + * much longer than this timeout before the promise succeeds or fails. + */ + timeout?: number; + + /** + * An HTTP agent used to manage HTTP(S) connections. + * + * If not provided, an agent will be constructed by default in the Node.js environment, + * otherwise no agent is used. + */ + httpAgent?: Agent; + + /** + * Specify a custom `fetch` function implementation. + * + * If not provided, we use `node-fetch` on Node.js and otherwise expect that `fetch` is + * defined globally. + */ + fetch?: Core.Fetch | undefined; + + /** + * The maximum number of times that the client will retry a request in case of a + * temporary failure, like a network error or a 5XX error from the server. + * + * @default 2 + */ + maxRetries?: number; + + /** + * Default headers to include with every request to the API. + * + * These can be removed in individual requests by explicitly setting the + * header to `undefined` or `null` in request options. + */ + defaultHeaders?: Core.Headers; + + /** + * Default query parameters to include with every request to the API. + * + * These can be removed in individual requests by explicitly setting the + * param to `undefined` in request options. + */ + defaultQuery?: Core.DefaultQuery; + + /** + * By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + * Only set this option to `true` if you understand the risks and have appropriate mitigations in place. + */ + dangerouslyAllowBrowser?: boolean; +} + +/** + * API Client for interfacing with the OpenAI API. + */ +export class OpenAI extends Core.APIClient { + apiKey: string; + organization: string | null; + project: string | null; + + private _options: ClientOptions; + + /** + * API Client for interfacing with the OpenAI API. + * + * @param {string | undefined} [opts.apiKey=process.env['OPENAI_API_KEY'] ?? undefined] + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string | null | undefined} [opts.project=process.env['OPENAI_PROJECT_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL'] ?? https://api.openai.com/v1] - Override the default base URL for the API. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ + baseURL = Core.readEnv('OPENAI_BASE_URL'), + apiKey = Core.readEnv('OPENAI_API_KEY'), + organization = Core.readEnv('OPENAI_ORG_ID') ?? null, + project = Core.readEnv('OPENAI_PROJECT_ID') ?? null, + ...opts + }: ClientOptions = {}) { + if (apiKey === undefined) { + throw new Errors.OpenAIError( + "The OPENAI_API_KEY environment variable is missing or empty; either provide it, or instantiate the OpenAI client with an apiKey option, like new OpenAI({ apiKey: 'My API Key' }).", + ); + } + + const options: ClientOptions = { + apiKey, + organization, + project, + ...opts, + baseURL: baseURL || `https://api.openai.com/v1`, + }; + + if (!options.dangerouslyAllowBrowser && Core.isRunningInBrowser()) { + throw new Errors.OpenAIError( + "It looks like you're running in a browser-like environment.\n\nThis is disabled by default, as it risks exposing your secret API credentials to attackers.\nIf you understand the risks and have appropriate mitigations in place,\nyou can set the `dangerouslyAllowBrowser` option to `true`, e.g.,\n\nnew OpenAI({ apiKey, dangerouslyAllowBrowser: true });\n\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\n", + ); + } + + super({ + baseURL: options.baseURL!, + timeout: options.timeout ?? 600000 /* 10 minutes */, + httpAgent: options.httpAgent, + maxRetries: options.maxRetries, + fetch: options.fetch, + }); + + this._options = options; + + this.apiKey = apiKey; + this.organization = organization; + this.project = project; + } + + completions: API.Completions = new API.Completions(this); + chat: API.Chat = new API.Chat(this); + embeddings: API.Embeddings = new API.Embeddings(this); + files: API.Files = new API.Files(this); + images: API.Images = new API.Images(this); + audio: API.Audio = new API.Audio(this); + moderations: API.Moderations = new API.Moderations(this); + models: API.Models = new API.Models(this); + fineTuning: API.FineTuning = new API.FineTuning(this); + beta: API.Beta = new API.Beta(this); + batches: API.Batches = new API.Batches(this); + uploads: API.Uploads = new API.Uploads(this); + + protected override defaultQuery(): Core.DefaultQuery | undefined { + return this._options.defaultQuery; + } + + protected override defaultHeaders(opts: Core.FinalRequestOptions): Core.Headers { + return { + ...super.defaultHeaders(opts), + 'OpenAI-Organization': this.organization, + 'OpenAI-Project': this.project, + ...this._options.defaultHeaders, + }; + } + + protected override authHeaders(opts: Core.FinalRequestOptions): Core.Headers { + return { Authorization: `Bearer ${this.apiKey}` }; + } + + protected override stringifyQuery(query: Record): string { + return qs.stringify(query, { arrayFormat: 'brackets' }); + } + + static OpenAI = this; + static DEFAULT_TIMEOUT = 600000; // 10 minutes + + static OpenAIError = Errors.OpenAIError; + static APIError = Errors.APIError; + static APIConnectionError = Errors.APIConnectionError; + static APIConnectionTimeoutError = Errors.APIConnectionTimeoutError; + static APIUserAbortError = Errors.APIUserAbortError; + static NotFoundError = Errors.NotFoundError; + static ConflictError = Errors.ConflictError; + static RateLimitError = Errors.RateLimitError; + static BadRequestError = Errors.BadRequestError; + static AuthenticationError = Errors.AuthenticationError; + static InternalServerError = Errors.InternalServerError; + static PermissionDeniedError = Errors.PermissionDeniedError; + static UnprocessableEntityError = Errors.UnprocessableEntityError; + + static toFile = Uploads.toFile; + static fileFromPath = Uploads.fileFromPath; +} + +OpenAI.Completions = Completions; +OpenAI.Chat = Chat; +OpenAI.Embeddings = Embeddings; +OpenAI.Files = Files; +OpenAI.FileObjectsPage = FileObjectsPage; +OpenAI.Images = Images; +OpenAI.Audio = Audio; +OpenAI.Moderations = Moderations; +OpenAI.Models = Models; +OpenAI.ModelsPage = ModelsPage; +OpenAI.FineTuning = FineTuning; +OpenAI.Beta = Beta; +OpenAI.Batches = Batches; +OpenAI.BatchesPage = BatchesPage; +OpenAI.Uploads = UploadsAPIUploads; +export declare namespace OpenAI { + export type RequestOptions = Core.RequestOptions; + + export import Page = Pagination.Page; + export { type PageResponse as PageResponse }; + + export import CursorPage = Pagination.CursorPage; + export { type CursorPageParams as CursorPageParams, type CursorPageResponse as CursorPageResponse }; + + export { + Completions as Completions, + type Completion as Completion, + type CompletionChoice as CompletionChoice, + type CompletionUsage as CompletionUsage, + type CompletionCreateParams as CompletionCreateParams, + type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming, + type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming, + }; + + export { + Chat as Chat, + type ChatModel as ChatModel, + type ChatCompletion as ChatCompletion, + type ChatCompletionAssistantMessageParam as ChatCompletionAssistantMessageParam, + type ChatCompletionAudio as ChatCompletionAudio, + type ChatCompletionAudioParam as ChatCompletionAudioParam, + type ChatCompletionChunk as ChatCompletionChunk, + type ChatCompletionContentPart as ChatCompletionContentPart, + type ChatCompletionContentPartImage as ChatCompletionContentPartImage, + type ChatCompletionContentPartInputAudio as ChatCompletionContentPartInputAudio, + type ChatCompletionContentPartRefusal as ChatCompletionContentPartRefusal, + type ChatCompletionContentPartText as ChatCompletionContentPartText, + type ChatCompletionDeveloperMessageParam as ChatCompletionDeveloperMessageParam, + type ChatCompletionFunctionCallOption as ChatCompletionFunctionCallOption, + type ChatCompletionFunctionMessageParam as ChatCompletionFunctionMessageParam, + type ChatCompletionMessage as ChatCompletionMessage, + type ChatCompletionMessageParam as ChatCompletionMessageParam, + type ChatCompletionMessageToolCall as ChatCompletionMessageToolCall, + type ChatCompletionModality as ChatCompletionModality, + type ChatCompletionNamedToolChoice as ChatCompletionNamedToolChoice, + type ChatCompletionPredictionContent as ChatCompletionPredictionContent, + type ChatCompletionReasoningEffort as ChatCompletionReasoningEffort, + type ChatCompletionRole as ChatCompletionRole, + type ChatCompletionStreamOptions as ChatCompletionStreamOptions, + type ChatCompletionSystemMessageParam as ChatCompletionSystemMessageParam, + type ChatCompletionTokenLogprob as ChatCompletionTokenLogprob, + type ChatCompletionTool as ChatCompletionTool, + type ChatCompletionToolChoiceOption as ChatCompletionToolChoiceOption, + type ChatCompletionToolMessageParam as ChatCompletionToolMessageParam, + type ChatCompletionUserMessageParam as ChatCompletionUserMessageParam, + type ChatCompletionCreateParams as ChatCompletionCreateParams, + type ChatCompletionCreateParamsNonStreaming as ChatCompletionCreateParamsNonStreaming, + type ChatCompletionCreateParamsStreaming as ChatCompletionCreateParamsStreaming, + }; + + export { + Embeddings as Embeddings, + type CreateEmbeddingResponse as CreateEmbeddingResponse, + type Embedding as Embedding, + type EmbeddingModel as EmbeddingModel, + type EmbeddingCreateParams as EmbeddingCreateParams, + }; + + export { + Files as Files, + type FileContent as FileContent, + type FileDeleted as FileDeleted, + type FileObject as FileObject, + type FilePurpose as FilePurpose, + FileObjectsPage as FileObjectsPage, + type FileCreateParams as FileCreateParams, + type FileListParams as FileListParams, + }; + + export { + Images as Images, + type Image as Image, + type ImageModel as ImageModel, + type ImagesResponse as ImagesResponse, + type ImageCreateVariationParams as ImageCreateVariationParams, + type ImageEditParams as ImageEditParams, + type ImageGenerateParams as ImageGenerateParams, + }; + + export { Audio as Audio, type AudioModel as AudioModel, type AudioResponseFormat as AudioResponseFormat }; + + export { + Moderations as Moderations, + type Moderation as Moderation, + type ModerationImageURLInput as ModerationImageURLInput, + type ModerationModel as ModerationModel, + type ModerationMultiModalInput as ModerationMultiModalInput, + type ModerationTextInput as ModerationTextInput, + type ModerationCreateResponse as ModerationCreateResponse, + type ModerationCreateParams as ModerationCreateParams, + }; + + export { + Models as Models, + type Model as Model, + type ModelDeleted as ModelDeleted, + ModelsPage as ModelsPage, + }; + + export { FineTuning as FineTuning }; + + export { Beta as Beta }; + + export { + Batches as Batches, + type Batch as Batch, + type BatchError as BatchError, + type BatchRequestCounts as BatchRequestCounts, + BatchesPage as BatchesPage, + type BatchCreateParams as BatchCreateParams, + type BatchListParams as BatchListParams, + }; + + export { + UploadsAPIUploads as Uploads, + type Upload as Upload, + type UploadCreateParams as UploadCreateParams, + type UploadCompleteParams as UploadCompleteParams, + }; + + export type ErrorObject = API.ErrorObject; + export type FunctionDefinition = API.FunctionDefinition; + export type FunctionParameters = API.FunctionParameters; + export type ResponseFormatJSONObject = API.ResponseFormatJSONObject; + export type ResponseFormatJSONSchema = API.ResponseFormatJSONSchema; + export type ResponseFormatText = API.ResponseFormatText; +} + +// ---------------------- Azure ---------------------- + +/** API Client for interfacing with the Azure OpenAI API. */ +export interface AzureClientOptions extends ClientOptions { + /** + * Defaults to process.env['OPENAI_API_VERSION']. + */ + apiVersion?: string | undefined; + + /** + * Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + */ + endpoint?: string | undefined; + + /** + * A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * Note: this means you won't be able to use non-deployment endpoints. Not supported with Assistants APIs. + */ + deployment?: string | undefined; + + /** + * Defaults to process.env['AZURE_OPENAI_API_KEY']. + */ + apiKey?: string | undefined; + + /** + * A function that returns an access token for Microsoft Entra (formerly known as Azure Active Directory), + * which will be invoked on every request. + */ + azureADTokenProvider?: (() => Promise) | undefined; +} + +/** API Client for interfacing with the Azure OpenAI API. */ +export class AzureOpenAI extends OpenAI { + private _azureADTokenProvider: (() => Promise) | undefined; + private _deployment: string | undefined; + apiVersion: string = ''; + /** + * API Client for interfacing with the Azure OpenAI API. + * + * @param {string | undefined} [opts.apiVersion=process.env['OPENAI_API_VERSION'] ?? undefined] + * @param {string | undefined} [opts.endpoint=process.env['AZURE_OPENAI_ENDPOINT'] ?? undefined] - Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + * @param {string | undefined} [opts.apiKey=process.env['AZURE_OPENAI_API_KEY'] ?? undefined] + * @param {string | undefined} opts.deployment - A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL']] - Sets the base URL for the API, e.g. `https://example-resource.azure.openai.com/openai/`. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ + baseURL = Core.readEnv('OPENAI_BASE_URL'), + apiKey = Core.readEnv('AZURE_OPENAI_API_KEY'), + apiVersion = Core.readEnv('OPENAI_API_VERSION'), + endpoint, + deployment, + azureADTokenProvider, + dangerouslyAllowBrowser, + ...opts + }: AzureClientOptions = {}) { + if (!apiVersion) { + throw new Errors.OpenAIError( + "The OPENAI_API_VERSION environment variable is missing or empty; either provide it, or instantiate the AzureOpenAI client with an apiVersion option, like new AzureOpenAI({ apiVersion: 'My API Version' }).", + ); + } + + if (typeof azureADTokenProvider === 'function') { + dangerouslyAllowBrowser = true; + } + + if (!azureADTokenProvider && !apiKey) { + throw new Errors.OpenAIError( + 'Missing credentials. Please pass one of `apiKey` and `azureADTokenProvider`, or set the `AZURE_OPENAI_API_KEY` environment variable.', + ); + } + + if (azureADTokenProvider && apiKey) { + throw new Errors.OpenAIError( + 'The `apiKey` and `azureADTokenProvider` arguments are mutually exclusive; only one can be passed at a time.', + ); + } + + // define a sentinel value to avoid any typing issues + apiKey ??= API_KEY_SENTINEL; + + opts.defaultQuery = { ...opts.defaultQuery, 'api-version': apiVersion }; + + if (!baseURL) { + if (!endpoint) { + endpoint = process.env['AZURE_OPENAI_ENDPOINT']; + } + + if (!endpoint) { + throw new Errors.OpenAIError( + 'Must provide one of the `baseURL` or `endpoint` arguments, or the `AZURE_OPENAI_ENDPOINT` environment variable', + ); + } + + baseURL = `${endpoint}/openai`; + } else { + if (endpoint) { + throw new Errors.OpenAIError('baseURL and endpoint are mutually exclusive'); + } + } + + super({ + apiKey, + baseURL, + ...opts, + ...(dangerouslyAllowBrowser !== undefined ? { dangerouslyAllowBrowser } : {}), + }); + + this._azureADTokenProvider = azureADTokenProvider; + this.apiVersion = apiVersion; + this._deployment = deployment; + } + + override buildRequest(options: Core.FinalRequestOptions): { + req: RequestInit; + url: string; + timeout: number; + } { + if (_deployments_endpoints.has(options.path) && options.method === 'post' && options.body !== undefined) { + if (!Core.isObj(options.body)) { + throw new Error('Expected request body to be an object'); + } + const model = this._deployment || options.body['model']; + if (model !== undefined && !this.baseURL.includes('/deployments')) { + options.path = `/deployments/${model}${options.path}`; + } + } + return super.buildRequest(options); + } + + private async _getAzureADToken(): Promise { + if (typeof this._azureADTokenProvider === 'function') { + const token = await this._azureADTokenProvider(); + if (!token || typeof token !== 'string') { + throw new Errors.OpenAIError( + `Expected 'azureADTokenProvider' argument to return a string but it returned ${token}`, + ); + } + return token; + } + return undefined; + } + + protected override authHeaders(opts: Core.FinalRequestOptions): Core.Headers { + return {}; + } + + protected override async prepareOptions(opts: Core.FinalRequestOptions): Promise { + /** + * The user should provide a bearer token provider if they want + * to use Azure AD authentication. The user shouldn't set the + * Authorization header manually because the header is overwritten + * with the Azure AD token if a bearer token provider is provided. + */ + if (opts.headers?.['api-key']) { + return super.prepareOptions(opts); + } + const token = await this._getAzureADToken(); + opts.headers ??= {}; + if (token) { + opts.headers['Authorization'] = `Bearer ${token}`; + } else if (this.apiKey !== API_KEY_SENTINEL) { + opts.headers['api-key'] = this.apiKey; + } else { + throw new Errors.OpenAIError('Unable to handle auth'); + } + return super.prepareOptions(opts); + } +} + +const _deployments_endpoints = new Set([ + '/completions', + '/chat/completions', + '/embeddings', + '/audio/transcriptions', + '/audio/translations', + '/audio/speech', + '/images/generations', +]); + +const API_KEY_SENTINEL = ''; + +// ---------------------- End Azure ---------------------- + +export { toFile, fileFromPath } from './uploads'; +export { + OpenAIError, + APIError, + APIConnectionError, + APIConnectionTimeoutError, + APIUserAbortError, + NotFoundError, + ConflictError, + RateLimitError, + BadRequestError, + AuthenticationError, + InternalServerError, + PermissionDeniedError, + UnprocessableEntityError, +} from './error'; + +export default OpenAI; diff --git a/node_modules/openai/src/internal/decoders/line.ts b/node_modules/openai/src/internal/decoders/line.ts new file mode 100644 index 0000000..1e0bbf3 --- /dev/null +++ b/node_modules/openai/src/internal/decoders/line.ts @@ -0,0 +1,114 @@ +import { OpenAIError } from '../../error'; + +type Bytes = string | ArrayBuffer | Uint8Array | Buffer | null | undefined; + +/** + * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally + * reading lines from text. + * + * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258 + */ +export class LineDecoder { + // prettier-ignore + static NEWLINE_CHARS = new Set(['\n', '\r']); + static NEWLINE_REGEXP = /\r\n|[\n\r]/g; + + buffer: string[]; + trailingCR: boolean; + textDecoder: any; // TextDecoder found in browsers; not typed to avoid pulling in either "dom" or "node" types. + + constructor() { + this.buffer = []; + this.trailingCR = false; + } + + decode(chunk: Bytes): string[] { + let text = this.decodeText(chunk); + + if (this.trailingCR) { + text = '\r' + text; + this.trailingCR = false; + } + if (text.endsWith('\r')) { + this.trailingCR = true; + text = text.slice(0, -1); + } + + if (!text) { + return []; + } + + const trailingNewline = LineDecoder.NEWLINE_CHARS.has(text[text.length - 1] || ''); + let lines = text.split(LineDecoder.NEWLINE_REGEXP); + + // if there is a trailing new line then the last entry will be an empty + // string which we don't care about + if (trailingNewline) { + lines.pop(); + } + + if (lines.length === 1 && !trailingNewline) { + this.buffer.push(lines[0]!); + return []; + } + + if (this.buffer.length > 0) { + lines = [this.buffer.join('') + lines[0], ...lines.slice(1)]; + this.buffer = []; + } + + if (!trailingNewline) { + this.buffer = [lines.pop() || '']; + } + + return lines; + } + + decodeText(bytes: Bytes): string { + if (bytes == null) return ''; + if (typeof bytes === 'string') return bytes; + + // Node: + if (typeof Buffer !== 'undefined') { + if (bytes instanceof Buffer) { + return bytes.toString(); + } + if (bytes instanceof Uint8Array) { + return Buffer.from(bytes).toString(); + } + + throw new OpenAIError( + `Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.`, + ); + } + + // Browser + if (typeof TextDecoder !== 'undefined') { + if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { + this.textDecoder ??= new TextDecoder('utf8'); + return this.textDecoder.decode(bytes); + } + + throw new OpenAIError( + `Unexpected: received non-Uint8Array/ArrayBuffer (${ + (bytes as any).constructor.name + }) in a web platform. Please report this error.`, + ); + } + + throw new OpenAIError( + `Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`, + ); + } + + flush(): string[] { + if (!this.buffer.length && !this.trailingCR) { + return []; + } + + const lines = [this.buffer.join('')]; + this.buffer = []; + this.trailingCR = false; + return lines; + } +} diff --git a/node_modules/openai/src/internal/qs/LICENSE.md b/node_modules/openai/src/internal/qs/LICENSE.md new file mode 100644 index 0000000..3fda157 --- /dev/null +++ b/node_modules/openai/src/internal/qs/LICENSE.md @@ -0,0 +1,13 @@ +BSD 3-Clause License + +Copyright (c) 2014, Nathan LaFreniere and other [contributors](https://github.com/puruvj/neoqs/graphs/contributors) All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/openai/src/internal/qs/README.md b/node_modules/openai/src/internal/qs/README.md new file mode 100644 index 0000000..67ae04e --- /dev/null +++ b/node_modules/openai/src/internal/qs/README.md @@ -0,0 +1,3 @@ +# qs + +This is a vendored version of [neoqs](https://github.com/PuruVJ/neoqs) which is a TypeScript rewrite of [qs](https://github.com/ljharb/qs), a query string library. diff --git a/node_modules/openai/src/internal/qs/formats.ts b/node_modules/openai/src/internal/qs/formats.ts new file mode 100644 index 0000000..1cf9e2c --- /dev/null +++ b/node_modules/openai/src/internal/qs/formats.ts @@ -0,0 +1,9 @@ +import type { Format } from './types'; + +export const default_format: Format = 'RFC3986'; +export const formatters: Record string> = { + RFC1738: (v: PropertyKey) => String(v).replace(/%20/g, '+'), + RFC3986: (v: PropertyKey) => String(v), +}; +export const RFC1738 = 'RFC1738'; +export const RFC3986 = 'RFC3986'; diff --git a/node_modules/openai/src/internal/qs/index.ts b/node_modules/openai/src/internal/qs/index.ts new file mode 100644 index 0000000..c3a3620 --- /dev/null +++ b/node_modules/openai/src/internal/qs/index.ts @@ -0,0 +1,13 @@ +import { default_format, formatters, RFC1738, RFC3986 } from './formats'; + +const formats = { + formatters, + RFC1738, + RFC3986, + default: default_format, +}; + +export { stringify } from './stringify'; +export { formats }; + +export type { DefaultDecoder, DefaultEncoder, Format, ParseOptions, StringifyOptions } from './types'; diff --git a/node_modules/openai/src/internal/qs/stringify.ts b/node_modules/openai/src/internal/qs/stringify.ts new file mode 100644 index 0000000..6749756 --- /dev/null +++ b/node_modules/openai/src/internal/qs/stringify.ts @@ -0,0 +1,388 @@ +import { encode, is_buffer, maybe_map } from './utils'; +import { default_format, formatters } from './formats'; +import type { NonNullableProperties, StringifyOptions } from './types'; + +const has = Object.prototype.hasOwnProperty; + +const array_prefix_generators = { + brackets(prefix: PropertyKey) { + return String(prefix) + '[]'; + }, + comma: 'comma', + indices(prefix: PropertyKey, key: string) { + return String(prefix) + '[' + key + ']'; + }, + repeat(prefix: PropertyKey) { + return String(prefix); + }, +}; + +const is_array = Array.isArray; +const push = Array.prototype.push; +const push_to_array = function (arr: any[], value_or_array: any) { + push.apply(arr, is_array(value_or_array) ? value_or_array : [value_or_array]); +}; + +const to_ISO = Date.prototype.toISOString; + +const defaults = { + addQueryPrefix: false, + allowDots: false, + allowEmptyArrays: false, + arrayFormat: 'indices', + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encodeDotInKeys: false, + encoder: encode, + encodeValuesOnly: false, + format: default_format, + formatter: formatters[default_format], + /** @deprecated */ + indices: false, + serializeDate(date) { + return to_ISO.call(date); + }, + skipNulls: false, + strictNullHandling: false, +} as NonNullableProperties; + +function is_non_nullish_primitive(v: unknown): v is string | number | boolean | symbol | bigint { + return ( + typeof v === 'string' || + typeof v === 'number' || + typeof v === 'boolean' || + typeof v === 'symbol' || + typeof v === 'bigint' + ); +} + +const sentinel = {}; + +function inner_stringify( + object: any, + prefix: PropertyKey, + generateArrayPrefix: StringifyOptions['arrayFormat'] | ((prefix: string, key: string) => string), + commaRoundTrip: boolean, + allowEmptyArrays: boolean, + strictNullHandling: boolean, + skipNulls: boolean, + encodeDotInKeys: boolean, + encoder: StringifyOptions['encoder'], + filter: StringifyOptions['filter'], + sort: StringifyOptions['sort'], + allowDots: StringifyOptions['allowDots'], + serializeDate: StringifyOptions['serializeDate'], + format: StringifyOptions['format'], + formatter: StringifyOptions['formatter'], + encodeValuesOnly: boolean, + charset: StringifyOptions['charset'], + sideChannel: WeakMap, +) { + let obj = object; + + let tmp_sc = sideChannel; + let step = 0; + let find_flag = false; + while ((tmp_sc = tmp_sc.get(sentinel)) !== void undefined && !find_flag) { + // Where object last appeared in the ref tree + const pos = tmp_sc.get(object); + step += 1; + if (typeof pos !== 'undefined') { + if (pos === step) { + throw new RangeError('Cyclic object value'); + } else { + find_flag = true; // Break while + } + } + if (typeof tmp_sc.get(sentinel) === 'undefined') { + step = 0; + } + } + + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate?.(obj); + } else if (generateArrayPrefix === 'comma' && is_array(obj)) { + obj = maybe_map(obj, function (value) { + if (value instanceof Date) { + return serializeDate?.(value); + } + return value; + }); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? + // @ts-expect-error + encoder(prefix, defaults.encoder, charset, 'key', format) + : prefix; + } + + obj = ''; + } + + if (is_non_nullish_primitive(obj) || is_buffer(obj)) { + if (encoder) { + const key_value = + encodeValuesOnly ? prefix + // @ts-expect-error + : encoder(prefix, defaults.encoder, charset, 'key', format); + return [ + formatter?.(key_value) + + '=' + + // @ts-expect-error + formatter?.(encoder(obj, defaults.encoder, charset, 'value', format)), + ]; + } + return [formatter?.(prefix) + '=' + formatter?.(String(obj))]; + } + + const values: string[] = []; + + if (typeof obj === 'undefined') { + return values; + } + + let obj_keys; + if (generateArrayPrefix === 'comma' && is_array(obj)) { + // we need to join elements in + if (encodeValuesOnly && encoder) { + // @ts-expect-error values only + obj = maybe_map(obj, encoder); + } + obj_keys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }]; + } else if (is_array(filter)) { + obj_keys = filter; + } else { + const keys = Object.keys(obj); + obj_keys = sort ? keys.sort(sort) : keys; + } + + const encoded_prefix = encodeDotInKeys ? String(prefix).replace(/\./g, '%2E') : String(prefix); + + const adjusted_prefix = + commaRoundTrip && is_array(obj) && obj.length === 1 ? encoded_prefix + '[]' : encoded_prefix; + + if (allowEmptyArrays && is_array(obj) && obj.length === 0) { + return adjusted_prefix + '[]'; + } + + for (let j = 0; j < obj_keys.length; ++j) { + const key = obj_keys[j]; + const value = + // @ts-ignore + typeof key === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key as any]; + + if (skipNulls && value === null) { + continue; + } + + // @ts-ignore + const encoded_key = allowDots && encodeDotInKeys ? (key as any).replace(/\./g, '%2E') : key; + const key_prefix = + is_array(obj) ? + typeof generateArrayPrefix === 'function' ? + generateArrayPrefix(adjusted_prefix, encoded_key) + : adjusted_prefix + : adjusted_prefix + (allowDots ? '.' + encoded_key : '[' + encoded_key + ']'); + + sideChannel.set(object, step); + const valueSideChannel = new WeakMap(); + valueSideChannel.set(sentinel, sideChannel); + push_to_array( + values, + inner_stringify( + value, + key_prefix, + generateArrayPrefix, + commaRoundTrip, + allowEmptyArrays, + strictNullHandling, + skipNulls, + encodeDotInKeys, + // @ts-ignore + generateArrayPrefix === 'comma' && encodeValuesOnly && is_array(obj) ? null : encoder, + filter, + sort, + allowDots, + serializeDate, + format, + formatter, + encodeValuesOnly, + charset, + valueSideChannel, + ), + ); + } + + return values; +} + +function normalize_stringify_options( + opts: StringifyOptions = defaults, +): NonNullableProperties> & { indices?: boolean } { + if (typeof opts.allowEmptyArrays !== 'undefined' && typeof opts.allowEmptyArrays !== 'boolean') { + throw new TypeError('`allowEmptyArrays` option can only be `true` or `false`, when provided'); + } + + if (typeof opts.encodeDotInKeys !== 'undefined' && typeof opts.encodeDotInKeys !== 'boolean') { + throw new TypeError('`encodeDotInKeys` option can only be `true` or `false`, when provided'); + } + + if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + const charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + let format = default_format; + if (typeof opts.format !== 'undefined') { + if (!has.call(formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + const formatter = formatters[format]; + + let filter = defaults.filter; + if (typeof opts.filter === 'function' || is_array(opts.filter)) { + filter = opts.filter; + } + + let arrayFormat: StringifyOptions['arrayFormat']; + if (opts.arrayFormat && opts.arrayFormat in array_prefix_generators) { + arrayFormat = opts.arrayFormat; + } else if ('indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = defaults.arrayFormat; + } + + if ('commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') { + throw new TypeError('`commaRoundTrip` must be a boolean, or absent'); + } + + const allowDots = + typeof opts.allowDots === 'undefined' ? + !!opts.encodeDotInKeys === true ? + true + : defaults.allowDots + : !!opts.allowDots; + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + // @ts-ignore + allowDots: allowDots, + allowEmptyArrays: + typeof opts.allowEmptyArrays === 'boolean' ? !!opts.allowEmptyArrays : defaults.allowEmptyArrays, + arrayFormat: arrayFormat, + charset: charset, + charsetSentinel: + typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + commaRoundTrip: !!opts.commaRoundTrip, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encodeDotInKeys: + typeof opts.encodeDotInKeys === 'boolean' ? opts.encodeDotInKeys : defaults.encodeDotInKeys, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: + typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + format: format, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + // @ts-ignore + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: + typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling, + }; +} + +export function stringify(object: any, opts: StringifyOptions = {}) { + let obj = object; + const options = normalize_stringify_options(opts); + + let obj_keys: PropertyKey[] | undefined; + let filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (is_array(options.filter)) { + filter = options.filter; + obj_keys = filter; + } + + const keys: string[] = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + const generateArrayPrefix = array_prefix_generators[options.arrayFormat]; + const commaRoundTrip = generateArrayPrefix === 'comma' && options.commaRoundTrip; + + if (!obj_keys) { + obj_keys = Object.keys(obj); + } + + if (options.sort) { + obj_keys.sort(options.sort); + } + + const sideChannel = new WeakMap(); + for (let i = 0; i < obj_keys.length; ++i) { + const key = obj_keys[i]!; + + if (options.skipNulls && obj[key] === null) { + continue; + } + push_to_array( + keys, + inner_stringify( + obj[key], + key, + // @ts-expect-error + generateArrayPrefix, + commaRoundTrip, + options.allowEmptyArrays, + options.strictNullHandling, + options.skipNulls, + options.encodeDotInKeys, + options.encode ? options.encoder : null, + options.filter, + options.sort, + options.allowDots, + options.serializeDate, + options.format, + options.formatter, + options.encodeValuesOnly, + options.charset, + sideChannel, + ), + ); + } + + const joined = keys.join(options.delimiter); + let prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +} diff --git a/node_modules/openai/src/internal/qs/types.ts b/node_modules/openai/src/internal/qs/types.ts new file mode 100644 index 0000000..7c28dbb --- /dev/null +++ b/node_modules/openai/src/internal/qs/types.ts @@ -0,0 +1,71 @@ +export type Format = 'RFC1738' | 'RFC3986'; + +export type DefaultEncoder = (str: any, defaultEncoder?: any, charset?: string) => string; +export type DefaultDecoder = (str: string, decoder?: any, charset?: string) => string; + +export type BooleanOptional = boolean | undefined; + +export type StringifyBaseOptions = { + delimiter?: string; + allowDots?: boolean; + encodeDotInKeys?: boolean; + strictNullHandling?: boolean; + skipNulls?: boolean; + encode?: boolean; + encoder?: ( + str: any, + defaultEncoder: DefaultEncoder, + charset: string, + type: 'key' | 'value', + format?: Format, + ) => string; + filter?: Array | ((prefix: PropertyKey, value: any) => any); + arrayFormat?: 'indices' | 'brackets' | 'repeat' | 'comma'; + indices?: boolean; + sort?: ((a: PropertyKey, b: PropertyKey) => number) | null; + serializeDate?: (d: Date) => string; + format?: 'RFC1738' | 'RFC3986'; + formatter?: (str: PropertyKey) => string; + encodeValuesOnly?: boolean; + addQueryPrefix?: boolean; + charset?: 'utf-8' | 'iso-8859-1'; + charsetSentinel?: boolean; + allowEmptyArrays?: boolean; + commaRoundTrip?: boolean; +}; + +export type StringifyOptions = StringifyBaseOptions; + +export type ParseBaseOptions = { + comma?: boolean; + delimiter?: string | RegExp; + depth?: number | false; + decoder?: (str: string, defaultDecoder: DefaultDecoder, charset: string, type: 'key' | 'value') => any; + arrayLimit?: number; + parseArrays?: boolean; + plainObjects?: boolean; + allowPrototypes?: boolean; + allowSparse?: boolean; + parameterLimit?: number; + strictDepth?: boolean; + strictNullHandling?: boolean; + ignoreQueryPrefix?: boolean; + charset?: 'utf-8' | 'iso-8859-1'; + charsetSentinel?: boolean; + interpretNumericEntities?: boolean; + allowEmptyArrays?: boolean; + duplicates?: 'combine' | 'first' | 'last'; + allowDots?: boolean; + decodeDotInKeys?: boolean; +}; + +export type ParseOptions = ParseBaseOptions; + +export type ParsedQs = { + [key: string]: undefined | string | string[] | ParsedQs | ParsedQs[]; +}; + +// Type to remove null or undefined union from each property +export type NonNullableProperties = { + [K in keyof T]-?: Exclude; +}; diff --git a/node_modules/openai/src/internal/qs/utils.ts b/node_modules/openai/src/internal/qs/utils.ts new file mode 100644 index 0000000..113b18f --- /dev/null +++ b/node_modules/openai/src/internal/qs/utils.ts @@ -0,0 +1,265 @@ +import { RFC1738 } from './formats'; +import type { DefaultEncoder, Format } from './types'; + +const has = Object.prototype.hasOwnProperty; +const is_array = Array.isArray; + +const hex_table = (() => { + const array = []; + for (let i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +})(); + +function compact_queue>(queue: Array<{ obj: T; prop: string }>) { + while (queue.length > 1) { + const item = queue.pop(); + if (!item) continue; + + const obj = item.obj[item.prop]; + + if (is_array(obj)) { + const compacted: unknown[] = []; + + for (let j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + // @ts-ignore + item.obj[item.prop] = compacted; + } + } +} + +function array_to_object(source: any[], options: { plainObjects: boolean }) { + const obj = options && options.plainObjects ? Object.create(null) : {}; + for (let i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +} + +export function merge( + target: any, + source: any, + options: { plainObjects?: boolean; allowPrototypes?: boolean } = {}, +) { + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (is_array(target)) { + target.push(source); + } else if (target && typeof target === 'object') { + if ( + (options && (options.plainObjects || options.allowPrototypes)) || + !has.call(Object.prototype, source) + ) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + + let mergeTarget = target; + if (is_array(target) && !is_array(source)) { + // @ts-ignore + mergeTarget = array_to_object(target, options); + } + + if (is_array(target) && is_array(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + const targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + const value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +} + +export function assign_single_source(target: any, source: any) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +} + +export function decode(str: string, _: any, charset: string) { + const strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +} + +const limit = 1024; + +export const encode: ( + str: any, + defaultEncoder: DefaultEncoder, + charset: string, + type: 'key' | 'value', + format: Format, +) => string = (str, _defaultEncoder, charset, _kind, format: Format) => { + // This code was originally written by Brian White for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + let string = str; + if (typeof str === 'symbol') { + string = Symbol.prototype.toString.call(str); + } else if (typeof str !== 'string') { + string = String(str); + } + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + let out = ''; + for (let j = 0; j < string.length; j += limit) { + const segment = string.length >= limit ? string.slice(j, j + limit) : string; + const arr = []; + + for (let i = 0; i < segment.length; ++i) { + let c = segment.charCodeAt(i); + if ( + c === 0x2d || // - + c === 0x2e || // . + c === 0x5f || // _ + c === 0x7e || // ~ + (c >= 0x30 && c <= 0x39) || // 0-9 + (c >= 0x41 && c <= 0x5a) || // a-z + (c >= 0x61 && c <= 0x7a) || // A-Z + (format === RFC1738 && (c === 0x28 || c === 0x29)) // ( ) + ) { + arr[arr.length] = segment.charAt(i); + continue; + } + + if (c < 0x80) { + arr[arr.length] = hex_table[c]; + continue; + } + + if (c < 0x800) { + arr[arr.length] = hex_table[0xc0 | (c >> 6)]! + hex_table[0x80 | (c & 0x3f)]; + continue; + } + + if (c < 0xd800 || c >= 0xe000) { + arr[arr.length] = + hex_table[0xe0 | (c >> 12)]! + hex_table[0x80 | ((c >> 6) & 0x3f)] + hex_table[0x80 | (c & 0x3f)]; + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3ff) << 10) | (segment.charCodeAt(i) & 0x3ff)); + + arr[arr.length] = + hex_table[0xf0 | (c >> 18)]! + + hex_table[0x80 | ((c >> 12) & 0x3f)] + + hex_table[0x80 | ((c >> 6) & 0x3f)] + + hex_table[0x80 | (c & 0x3f)]; + } + + out += arr.join(''); + } + + return out; +}; + +export function compact(value: any) { + const queue = [{ obj: { o: value }, prop: 'o' }]; + const refs = []; + + for (let i = 0; i < queue.length; ++i) { + const item = queue[i]; + // @ts-ignore + const obj = item.obj[item.prop]; + + const keys = Object.keys(obj); + for (let j = 0; j < keys.length; ++j) { + const key = keys[j]!; + const val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + + compact_queue(queue); + + return value; +} + +export function is_regexp(obj: any) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +} + +export function is_buffer(obj: any) { + if (!obj || typeof obj !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +} + +export function combine(a: any, b: any) { + return [].concat(a, b); +} + +export function maybe_map(val: T[], fn: (v: T) => T) { + if (is_array(val)) { + const mapped = []; + for (let i = 0; i < val.length; i += 1) { + mapped.push(fn(val[i]!)); + } + return mapped; + } + return fn(val); +} diff --git a/node_modules/openai/src/lib/.keep b/node_modules/openai/src/lib/.keep new file mode 100644 index 0000000..7554f8b --- /dev/null +++ b/node_modules/openai/src/lib/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store custom files to expand the SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. diff --git a/node_modules/openai/src/lib/AbstractChatCompletionRunner.ts b/node_modules/openai/src/lib/AbstractChatCompletionRunner.ts new file mode 100644 index 0000000..406f5a4 --- /dev/null +++ b/node_modules/openai/src/lib/AbstractChatCompletionRunner.ts @@ -0,0 +1,503 @@ +import * as Core from '../core'; +import { type CompletionUsage } from '../resources/completions'; +import { + type ChatCompletion, + type ChatCompletionMessage, + type ChatCompletionMessageParam, + type ChatCompletionCreateParams, + type ChatCompletionTool, +} from '../resources/chat/completions'; +import { OpenAIError } from '../error'; +import { + type RunnableFunction, + isRunnableFunctionWithParse, + type BaseFunctionsArgs, + RunnableToolFunction, +} from './RunnableFunction'; +import { ChatCompletionFunctionRunnerParams, ChatCompletionToolRunnerParams } from './ChatCompletionRunner'; +import { + ChatCompletionStreamingFunctionRunnerParams, + ChatCompletionStreamingToolRunnerParams, +} from './ChatCompletionStreamingRunner'; +import { isAssistantMessage, isFunctionMessage, isToolMessage } from './chatCompletionUtils'; +import { BaseEvents, EventStream } from './EventStream'; +import { ParsedChatCompletion } from '../resources/beta/chat/completions'; +import OpenAI from '../index'; +import { isAutoParsableTool, parseChatCompletion } from '../lib/parser'; + +const DEFAULT_MAX_CHAT_COMPLETIONS = 10; +export interface RunnerOptions extends Core.RequestOptions { + /** How many requests to make before canceling. Default 10. */ + maxChatCompletions?: number; +} + +export class AbstractChatCompletionRunner< + EventTypes extends AbstractChatCompletionRunnerEvents, + ParsedT, +> extends EventStream { + protected _chatCompletions: ParsedChatCompletion[] = []; + messages: ChatCompletionMessageParam[] = []; + + protected _addChatCompletion( + this: AbstractChatCompletionRunner, + chatCompletion: ParsedChatCompletion, + ): ParsedChatCompletion { + this._chatCompletions.push(chatCompletion); + this._emit('chatCompletion', chatCompletion); + const message = chatCompletion.choices[0]?.message; + if (message) this._addMessage(message as ChatCompletionMessageParam); + return chatCompletion; + } + + protected _addMessage( + this: AbstractChatCompletionRunner, + message: ChatCompletionMessageParam, + emit = true, + ) { + if (!('content' in message)) message.content = null; + + this.messages.push(message); + + if (emit) { + this._emit('message', message); + if ((isFunctionMessage(message) || isToolMessage(message)) && message.content) { + // Note, this assumes that {role: 'tool', content: …} is always the result of a call of tool of type=function. + this._emit('functionCallResult', message.content as string); + } else if (isAssistantMessage(message) && message.function_call) { + this._emit('functionCall', message.function_call); + } else if (isAssistantMessage(message) && message.tool_calls) { + for (const tool_call of message.tool_calls) { + if (tool_call.type === 'function') { + this._emit('functionCall', tool_call.function); + } + } + } + } + } + + /** + * @returns a promise that resolves with the final ChatCompletion, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletion. + */ + async finalChatCompletion(): Promise> { + await this.done(); + const completion = this._chatCompletions[this._chatCompletions.length - 1]; + if (!completion) throw new OpenAIError('stream ended without producing a ChatCompletion'); + return completion; + } + + #getFinalContent(): string | null { + return this.#getFinalMessage().content ?? null; + } + + /** + * @returns a promise that resolves with the content of the final ChatCompletionMessage, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + async finalContent(): Promise { + await this.done(); + return this.#getFinalContent(); + } + + #getFinalMessage(): ChatCompletionMessage { + let i = this.messages.length; + while (i-- > 0) { + const message = this.messages[i]; + if (isAssistantMessage(message)) { + const { function_call, ...rest } = message; + + // TODO: support audio here + const ret: Omit = { + ...rest, + content: (message as ChatCompletionMessage).content ?? null, + refusal: (message as ChatCompletionMessage).refusal ?? null, + }; + if (function_call) { + ret.function_call = function_call; + } + return ret; + } + } + throw new OpenAIError('stream ended without producing a ChatCompletionMessage with role=assistant'); + } + + /** + * @returns a promise that resolves with the the final assistant ChatCompletionMessage response, + * or rejects if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + async finalMessage(): Promise { + await this.done(); + return this.#getFinalMessage(); + } + + #getFinalFunctionCall(): ChatCompletionMessage.FunctionCall | undefined { + for (let i = this.messages.length - 1; i >= 0; i--) { + const message = this.messages[i]; + if (isAssistantMessage(message) && message?.function_call) { + return message.function_call; + } + if (isAssistantMessage(message) && message?.tool_calls?.length) { + return message.tool_calls.at(-1)?.function; + } + } + + return; + } + + /** + * @returns a promise that resolves with the content of the final FunctionCall, or rejects + * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage. + */ + async finalFunctionCall(): Promise { + await this.done(); + return this.#getFinalFunctionCall(); + } + + #getFinalFunctionCallResult(): string | undefined { + for (let i = this.messages.length - 1; i >= 0; i--) { + const message = this.messages[i]; + if (isFunctionMessage(message) && message.content != null) { + return message.content; + } + if ( + isToolMessage(message) && + message.content != null && + typeof message.content === 'string' && + this.messages.some( + (x) => + x.role === 'assistant' && + x.tool_calls?.some((y) => y.type === 'function' && y.id === message.tool_call_id), + ) + ) { + return message.content; + } + } + + return; + } + + async finalFunctionCallResult(): Promise { + await this.done(); + return this.#getFinalFunctionCallResult(); + } + + #calculateTotalUsage(): CompletionUsage { + const total: CompletionUsage = { + completion_tokens: 0, + prompt_tokens: 0, + total_tokens: 0, + }; + for (const { usage } of this._chatCompletions) { + if (usage) { + total.completion_tokens += usage.completion_tokens; + total.prompt_tokens += usage.prompt_tokens; + total.total_tokens += usage.total_tokens; + } + } + return total; + } + + async totalUsage(): Promise { + await this.done(); + return this.#calculateTotalUsage(); + } + + allChatCompletions(): ChatCompletion[] { + return [...this._chatCompletions]; + } + + protected override _emitFinal( + this: AbstractChatCompletionRunner, + ) { + const completion = this._chatCompletions[this._chatCompletions.length - 1]; + if (completion) this._emit('finalChatCompletion', completion); + const finalMessage = this.#getFinalMessage(); + if (finalMessage) this._emit('finalMessage', finalMessage); + const finalContent = this.#getFinalContent(); + if (finalContent) this._emit('finalContent', finalContent); + + const finalFunctionCall = this.#getFinalFunctionCall(); + if (finalFunctionCall) this._emit('finalFunctionCall', finalFunctionCall); + + const finalFunctionCallResult = this.#getFinalFunctionCallResult(); + if (finalFunctionCallResult != null) this._emit('finalFunctionCallResult', finalFunctionCallResult); + + if (this._chatCompletions.some((c) => c.usage)) { + this._emit('totalUsage', this.#calculateTotalUsage()); + } + } + + #validateParams(params: ChatCompletionCreateParams): void { + if (params.n != null && params.n > 1) { + throw new OpenAIError( + 'ChatCompletion convenience helpers only support n=1 at this time. To use n>1, please use chat.completions.create() directly.', + ); + } + } + + protected async _createChatCompletion( + client: OpenAI, + params: ChatCompletionCreateParams, + options?: Core.RequestOptions, + ): Promise> { + const signal = options?.signal; + if (signal) { + if (signal.aborted) this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + this.#validateParams(params); + + const chatCompletion = await client.chat.completions.create( + { ...params, stream: false }, + { ...options, signal: this.controller.signal }, + ); + this._connected(); + return this._addChatCompletion(parseChatCompletion(chatCompletion, params)); + } + + protected async _runChatCompletion( + client: OpenAI, + params: ChatCompletionCreateParams, + options?: Core.RequestOptions, + ): Promise { + for (const message of params.messages) { + this._addMessage(message, false); + } + return await this._createChatCompletion(client, params, options); + } + + protected async _runFunctions( + client: OpenAI, + params: + | ChatCompletionFunctionRunnerParams + | ChatCompletionStreamingFunctionRunnerParams, + options?: RunnerOptions, + ) { + const role = 'function' as const; + const { function_call = 'auto', stream, ...restParams } = params; + const singleFunctionToCall = typeof function_call !== 'string' && function_call?.name; + const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {}; + + const functionsByName: Record> = {}; + for (const f of params.functions) { + functionsByName[f.name || f.function.name] = f; + } + + const functions: ChatCompletionCreateParams.Function[] = params.functions.map( + (f): ChatCompletionCreateParams.Function => ({ + name: f.name || f.function.name, + parameters: f.parameters as Record, + description: f.description, + }), + ); + + for (const message of params.messages) { + this._addMessage(message, false); + } + + for (let i = 0; i < maxChatCompletions; ++i) { + const chatCompletion: ChatCompletion = await this._createChatCompletion( + client, + { + ...restParams, + function_call, + functions, + messages: [...this.messages], + }, + options, + ); + const message = chatCompletion.choices[0]?.message; + if (!message) { + throw new OpenAIError(`missing message in ChatCompletion response`); + } + if (!message.function_call) return; + const { name, arguments: args } = message.function_call; + const fn = functionsByName[name]; + if (!fn) { + const content = `Invalid function_call: ${JSON.stringify(name)}. Available options are: ${functions + .map((f) => JSON.stringify(f.name)) + .join(', ')}. Please try again`; + + this._addMessage({ role, name, content }); + continue; + } else if (singleFunctionToCall && singleFunctionToCall !== name) { + const content = `Invalid function_call: ${JSON.stringify(name)}. ${JSON.stringify( + singleFunctionToCall, + )} requested. Please try again`; + + this._addMessage({ role, name, content }); + continue; + } + + let parsed; + try { + parsed = isRunnableFunctionWithParse(fn) ? await fn.parse(args) : args; + } catch (error) { + this._addMessage({ + role, + name, + content: error instanceof Error ? error.message : String(error), + }); + continue; + } + + // @ts-expect-error it can't rule out `never` type. + const rawContent = await fn.function(parsed, this); + const content = this.#stringifyFunctionCallResult(rawContent); + + this._addMessage({ role, name, content }); + + if (singleFunctionToCall) return; + } + } + + protected async _runTools( + client: OpenAI, + params: + | ChatCompletionToolRunnerParams + | ChatCompletionStreamingToolRunnerParams, + options?: RunnerOptions, + ) { + const role = 'tool' as const; + const { tool_choice = 'auto', stream, ...restParams } = params; + const singleFunctionToCall = typeof tool_choice !== 'string' && tool_choice?.function?.name; + const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {}; + + // TODO(someday): clean this logic up + const inputTools = params.tools.map((tool): RunnableToolFunction => { + if (isAutoParsableTool(tool)) { + if (!tool.$callback) { + throw new OpenAIError('Tool given to `.runTools()` that does not have an associated function'); + } + + return { + type: 'function', + function: { + function: tool.$callback, + name: tool.function.name, + description: tool.function.description || '', + parameters: tool.function.parameters as any, + parse: tool.$parseRaw, + strict: true, + }, + }; + } + + return tool as any as RunnableToolFunction; + }); + + const functionsByName: Record> = {}; + for (const f of inputTools) { + if (f.type === 'function') { + functionsByName[f.function.name || f.function.function.name] = f.function; + } + } + + const tools: ChatCompletionTool[] = + 'tools' in params ? + inputTools.map((t) => + t.type === 'function' ? + { + type: 'function', + function: { + name: t.function.name || t.function.function.name, + parameters: t.function.parameters as Record, + description: t.function.description, + strict: t.function.strict, + }, + } + : (t as unknown as ChatCompletionTool), + ) + : (undefined as any); + + for (const message of params.messages) { + this._addMessage(message, false); + } + + for (let i = 0; i < maxChatCompletions; ++i) { + const chatCompletion: ChatCompletion = await this._createChatCompletion( + client, + { + ...restParams, + tool_choice, + tools, + messages: [...this.messages], + }, + options, + ); + const message = chatCompletion.choices[0]?.message; + if (!message) { + throw new OpenAIError(`missing message in ChatCompletion response`); + } + if (!message.tool_calls?.length) { + return; + } + + for (const tool_call of message.tool_calls) { + if (tool_call.type !== 'function') continue; + const tool_call_id = tool_call.id; + const { name, arguments: args } = tool_call.function; + const fn = functionsByName[name]; + + if (!fn) { + const content = `Invalid tool_call: ${JSON.stringify(name)}. Available options are: ${Object.keys( + functionsByName, + ) + .map((name) => JSON.stringify(name)) + .join(', ')}. Please try again`; + + this._addMessage({ role, tool_call_id, content }); + continue; + } else if (singleFunctionToCall && singleFunctionToCall !== name) { + const content = `Invalid tool_call: ${JSON.stringify(name)}. ${JSON.stringify( + singleFunctionToCall, + )} requested. Please try again`; + + this._addMessage({ role, tool_call_id, content }); + continue; + } + + let parsed; + try { + parsed = isRunnableFunctionWithParse(fn) ? await fn.parse(args) : args; + } catch (error) { + const content = error instanceof Error ? error.message : String(error); + this._addMessage({ role, tool_call_id, content }); + continue; + } + + // @ts-expect-error it can't rule out `never` type. + const rawContent = await fn.function(parsed, this); + const content = this.#stringifyFunctionCallResult(rawContent); + this._addMessage({ role, tool_call_id, content }); + + if (singleFunctionToCall) { + return; + } + } + } + + return; + } + + #stringifyFunctionCallResult(rawContent: unknown): string { + return ( + typeof rawContent === 'string' ? rawContent + : rawContent === undefined ? 'undefined' + : JSON.stringify(rawContent) + ); + } +} + +export interface AbstractChatCompletionRunnerEvents extends BaseEvents { + functionCall: (functionCall: ChatCompletionMessage.FunctionCall) => void; + message: (message: ChatCompletionMessageParam) => void; + chatCompletion: (completion: ChatCompletion) => void; + finalContent: (contentSnapshot: string) => void; + finalMessage: (message: ChatCompletionMessageParam) => void; + finalChatCompletion: (completion: ChatCompletion) => void; + finalFunctionCall: (functionCall: ChatCompletionMessage.FunctionCall) => void; + functionCallResult: (content: string) => void; + finalFunctionCallResult: (content: string) => void; + totalUsage: (usage: CompletionUsage) => void; +} diff --git a/node_modules/openai/src/lib/AssistantStream.ts b/node_modules/openai/src/lib/AssistantStream.ts new file mode 100644 index 0000000..caf68e7 --- /dev/null +++ b/node_modules/openai/src/lib/AssistantStream.ts @@ -0,0 +1,774 @@ +import { + TextContentBlock, + ImageFileContentBlock, + Message, + MessageContentDelta, + Text, + ImageFile, + TextDelta, + MessageDelta, + MessageContent, +} from '../resources/beta/threads/messages'; +import * as Core from '../core'; +import { RequestOptions } from '../core'; +import { + Run, + RunCreateParamsBase, + RunCreateParamsStreaming, + Runs, + RunSubmitToolOutputsParamsBase, + RunSubmitToolOutputsParamsStreaming, +} from '../resources/beta/threads/runs/runs'; +import { type ReadableStream } from '../_shims/index'; +import { Stream } from '../streaming'; +import { APIUserAbortError, OpenAIError } from '../error'; +import { + AssistantStreamEvent, + MessageStreamEvent, + RunStepStreamEvent, + RunStreamEvent, +} from '../resources/beta/assistants'; +import { RunStep, RunStepDelta, ToolCall, ToolCallDelta } from '../resources/beta/threads/runs/steps'; +import { ThreadCreateAndRunParamsBase, Threads } from '../resources/beta/threads/threads'; +import { BaseEvents, EventStream } from './EventStream'; + +export interface AssistantStreamEvents extends BaseEvents { + run: (run: Run) => void; + + //New event structure + messageCreated: (message: Message) => void; + messageDelta: (message: MessageDelta, snapshot: Message) => void; + messageDone: (message: Message) => void; + + runStepCreated: (runStep: RunStep) => void; + runStepDelta: (delta: RunStepDelta, snapshot: Runs.RunStep) => void; + runStepDone: (runStep: Runs.RunStep, snapshot: Runs.RunStep) => void; + + toolCallCreated: (toolCall: ToolCall) => void; + toolCallDelta: (delta: ToolCallDelta, snapshot: ToolCall) => void; + toolCallDone: (toolCall: ToolCall) => void; + + textCreated: (content: Text) => void; + textDelta: (delta: TextDelta, snapshot: Text) => void; + textDone: (content: Text, snapshot: Message) => void; + + //No created or delta as this is not streamed + imageFileDone: (content: ImageFile, snapshot: Message) => void; + + event: (event: AssistantStreamEvent) => void; +} + +export type ThreadCreateAndRunParamsBaseStream = Omit & { + stream?: true; +}; + +export type RunCreateParamsBaseStream = Omit & { + stream?: true; +}; + +export type RunSubmitToolOutputsParamsStream = Omit & { + stream?: true; +}; + +export class AssistantStream + extends EventStream + implements AsyncIterable +{ + //Track all events in a single list for reference + #events: AssistantStreamEvent[] = []; + + //Used to accumulate deltas + //We are accumulating many types so the value here is not strict + #runStepSnapshots: { [id: string]: Runs.RunStep } = {}; + #messageSnapshots: { [id: string]: Message } = {}; + #messageSnapshot: Message | undefined; + #finalRun: Run | undefined; + #currentContentIndex: number | undefined; + #currentContent: MessageContent | undefined; + #currentToolCallIndex: number | undefined; + #currentToolCall: ToolCall | undefined; + + //For current snapshot methods + #currentEvent: AssistantStreamEvent | undefined; + #currentRunSnapshot: Run | undefined; + #currentRunStepSnapshot: Runs.RunStep | undefined; + + [Symbol.asyncIterator](): AsyncIterator { + const pushQueue: AssistantStreamEvent[] = []; + const readQueue: { + resolve: (chunk: AssistantStreamEvent | undefined) => void; + reject: (err: unknown) => void; + }[] = []; + let done = false; + + //Catch all for passing along all events + this.on('event', (event) => { + const reader = readQueue.shift(); + if (reader) { + reader.resolve(event); + } else { + pushQueue.push(event); + } + }); + + this.on('end', () => { + done = true; + for (const reader of readQueue) { + reader.resolve(undefined); + } + readQueue.length = 0; + }); + + this.on('abort', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + + this.on('error', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + + return { + next: async (): Promise> => { + if (!pushQueue.length) { + if (done) { + return { value: undefined, done: true }; + } + return new Promise((resolve, reject) => + readQueue.push({ resolve, reject }), + ).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true })); + } + const chunk = pushQueue.shift()!; + return { value: chunk, done: false }; + }, + return: async () => { + this.abort(); + return { value: undefined, done: true }; + }, + }; + } + + static fromReadableStream(stream: ReadableStream): AssistantStream { + const runner = new AssistantStream(); + runner._run(() => runner._fromReadableStream(stream)); + return runner; + } + + protected async _fromReadableStream( + readableStream: ReadableStream, + options?: Core.RequestOptions, + ): Promise { + const signal = options?.signal; + if (signal) { + if (signal.aborted) this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + this._connected(); + const stream = Stream.fromReadableStream(readableStream, this.controller); + for await (const event of stream) { + this.#addEvent(event); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + return this._addRun(this.#endRequest()); + } + + toReadableStream(): ReadableStream { + const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller); + return stream.toReadableStream(); + } + + static createToolAssistantStream( + threadId: string, + runId: string, + runs: Runs, + params: RunSubmitToolOutputsParamsStream, + options: RequestOptions | undefined, + ): AssistantStream { + const runner = new AssistantStream(); + runner._run(() => + runner._runToolAssistantStream(threadId, runId, runs, params, { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' }, + }), + ); + return runner; + } + + protected async _createToolAssistantStream( + run: Runs, + threadId: string, + runId: string, + params: RunSubmitToolOutputsParamsStream, + options?: Core.RequestOptions, + ): Promise { + const signal = options?.signal; + if (signal) { + if (signal.aborted) this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + + const body: RunSubmitToolOutputsParamsStreaming = { ...params, stream: true }; + const stream = await run.submitToolOutputs(threadId, runId, body, { + ...options, + signal: this.controller.signal, + }); + + this._connected(); + + for await (const event of stream) { + this.#addEvent(event); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + + return this._addRun(this.#endRequest()); + } + + static createThreadAssistantStream( + params: ThreadCreateAndRunParamsBaseStream, + thread: Threads, + options?: RequestOptions, + ): AssistantStream { + const runner = new AssistantStream(); + runner._run(() => + runner._threadAssistantStream(params, thread, { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' }, + }), + ); + return runner; + } + + static createAssistantStream( + threadId: string, + runs: Runs, + params: RunCreateParamsBaseStream, + options?: RequestOptions, + ): AssistantStream { + const runner = new AssistantStream(); + runner._run(() => + runner._runAssistantStream(threadId, runs, params, { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' }, + }), + ); + return runner; + } + + currentEvent(): AssistantStreamEvent | undefined { + return this.#currentEvent; + } + + currentRun(): Run | undefined { + return this.#currentRunSnapshot; + } + + currentMessageSnapshot(): Message | undefined { + return this.#messageSnapshot; + } + + currentRunStepSnapshot(): Runs.RunStep | undefined { + return this.#currentRunStepSnapshot; + } + + async finalRunSteps(): Promise { + await this.done(); + + return Object.values(this.#runStepSnapshots); + } + + async finalMessages(): Promise { + await this.done(); + + return Object.values(this.#messageSnapshots); + } + + async finalRun(): Promise { + await this.done(); + if (!this.#finalRun) throw Error('Final run was not received.'); + + return this.#finalRun; + } + + protected async _createThreadAssistantStream( + thread: Threads, + params: ThreadCreateAndRunParamsBase, + options?: Core.RequestOptions, + ): Promise { + const signal = options?.signal; + if (signal) { + if (signal.aborted) this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + + const body: RunCreateParamsStreaming = { ...params, stream: true }; + const stream = await thread.createAndRun(body, { ...options, signal: this.controller.signal }); + + this._connected(); + + for await (const event of stream) { + this.#addEvent(event); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + + return this._addRun(this.#endRequest()); + } + + protected async _createAssistantStream( + run: Runs, + threadId: string, + params: RunCreateParamsBase, + options?: Core.RequestOptions, + ): Promise { + const signal = options?.signal; + if (signal) { + if (signal.aborted) this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + + const body: RunCreateParamsStreaming = { ...params, stream: true }; + const stream = await run.create(threadId, body, { ...options, signal: this.controller.signal }); + + this._connected(); + + for await (const event of stream) { + this.#addEvent(event); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + + return this._addRun(this.#endRequest()); + } + + #addEvent(event: AssistantStreamEvent) { + if (this.ended) return; + + this.#currentEvent = event; + + this.#handleEvent(event); + + switch (event.event) { + case 'thread.created': + //No action on this event. + break; + + case 'thread.run.created': + case 'thread.run.queued': + case 'thread.run.in_progress': + case 'thread.run.requires_action': + case 'thread.run.completed': + case 'thread.run.failed': + case 'thread.run.cancelling': + case 'thread.run.cancelled': + case 'thread.run.expired': + this.#handleRun(event); + break; + + case 'thread.run.step.created': + case 'thread.run.step.in_progress': + case 'thread.run.step.delta': + case 'thread.run.step.completed': + case 'thread.run.step.failed': + case 'thread.run.step.cancelled': + case 'thread.run.step.expired': + this.#handleRunStep(event); + break; + + case 'thread.message.created': + case 'thread.message.in_progress': + case 'thread.message.delta': + case 'thread.message.completed': + case 'thread.message.incomplete': + this.#handleMessage(event); + break; + + case 'error': + //This is included for completeness, but errors are processed in the SSE event processing so this should not occur + throw new Error( + 'Encountered an error event in event processing - errors should be processed earlier', + ); + } + } + + #endRequest(): Run { + if (this.ended) { + throw new OpenAIError(`stream has ended, this shouldn't happen`); + } + + if (!this.#finalRun) throw Error('Final run has not been received'); + + return this.#finalRun; + } + + #handleMessage(this: AssistantStream, event: MessageStreamEvent) { + const [accumulatedMessage, newContent] = this.#accumulateMessage(event, this.#messageSnapshot); + this.#messageSnapshot = accumulatedMessage; + this.#messageSnapshots[accumulatedMessage.id] = accumulatedMessage; + + for (const content of newContent) { + const snapshotContent = accumulatedMessage.content[content.index]; + if (snapshotContent?.type == 'text') { + this._emit('textCreated', snapshotContent.text); + } + } + + switch (event.event) { + case 'thread.message.created': + this._emit('messageCreated', event.data); + break; + + case 'thread.message.in_progress': + break; + + case 'thread.message.delta': + this._emit('messageDelta', event.data.delta, accumulatedMessage); + + if (event.data.delta.content) { + for (const content of event.data.delta.content) { + //If it is text delta, emit a text delta event + if (content.type == 'text' && content.text) { + let textDelta = content.text; + let snapshot = accumulatedMessage.content[content.index]; + if (snapshot && snapshot.type == 'text') { + this._emit('textDelta', textDelta, snapshot.text); + } else { + throw Error('The snapshot associated with this text delta is not text or missing'); + } + } + + if (content.index != this.#currentContentIndex) { + //See if we have in progress content + if (this.#currentContent) { + switch (this.#currentContent.type) { + case 'text': + this._emit('textDone', this.#currentContent.text, this.#messageSnapshot); + break; + case 'image_file': + this._emit('imageFileDone', this.#currentContent.image_file, this.#messageSnapshot); + break; + } + } + + this.#currentContentIndex = content.index; + } + + this.#currentContent = accumulatedMessage.content[content.index]; + } + } + + break; + + case 'thread.message.completed': + case 'thread.message.incomplete': + //We emit the latest content we were working on on completion (including incomplete) + if (this.#currentContentIndex !== undefined) { + const currentContent = event.data.content[this.#currentContentIndex]; + if (currentContent) { + switch (currentContent.type) { + case 'image_file': + this._emit('imageFileDone', currentContent.image_file, this.#messageSnapshot); + break; + case 'text': + this._emit('textDone', currentContent.text, this.#messageSnapshot); + break; + } + } + } + + if (this.#messageSnapshot) { + this._emit('messageDone', event.data); + } + + this.#messageSnapshot = undefined; + } + } + + #handleRunStep(this: AssistantStream, event: RunStepStreamEvent) { + const accumulatedRunStep = this.#accumulateRunStep(event); + this.#currentRunStepSnapshot = accumulatedRunStep; + + switch (event.event) { + case 'thread.run.step.created': + this._emit('runStepCreated', event.data); + break; + case 'thread.run.step.delta': + const delta = event.data.delta; + if ( + delta.step_details && + delta.step_details.type == 'tool_calls' && + delta.step_details.tool_calls && + accumulatedRunStep.step_details.type == 'tool_calls' + ) { + for (const toolCall of delta.step_details.tool_calls) { + if (toolCall.index == this.#currentToolCallIndex) { + this._emit( + 'toolCallDelta', + toolCall, + accumulatedRunStep.step_details.tool_calls[toolCall.index] as ToolCall, + ); + } else { + if (this.#currentToolCall) { + this._emit('toolCallDone', this.#currentToolCall); + } + + this.#currentToolCallIndex = toolCall.index; + this.#currentToolCall = accumulatedRunStep.step_details.tool_calls[toolCall.index]; + if (this.#currentToolCall) this._emit('toolCallCreated', this.#currentToolCall); + } + } + } + + this._emit('runStepDelta', event.data.delta, accumulatedRunStep); + break; + case 'thread.run.step.completed': + case 'thread.run.step.failed': + case 'thread.run.step.cancelled': + case 'thread.run.step.expired': + this.#currentRunStepSnapshot = undefined; + const details = event.data.step_details; + if (details.type == 'tool_calls') { + if (this.#currentToolCall) { + this._emit('toolCallDone', this.#currentToolCall as ToolCall); + this.#currentToolCall = undefined; + } + } + this._emit('runStepDone', event.data, accumulatedRunStep); + break; + case 'thread.run.step.in_progress': + break; + } + } + + #handleEvent(this: AssistantStream, event: AssistantStreamEvent) { + this.#events.push(event); + this._emit('event', event); + } + + #accumulateRunStep(event: RunStepStreamEvent): Runs.RunStep { + switch (event.event) { + case 'thread.run.step.created': + this.#runStepSnapshots[event.data.id] = event.data; + return event.data; + + case 'thread.run.step.delta': + let snapshot = this.#runStepSnapshots[event.data.id] as Runs.RunStep; + if (!snapshot) { + throw Error('Received a RunStepDelta before creation of a snapshot'); + } + + let data = event.data; + + if (data.delta) { + const accumulated = AssistantStream.accumulateDelta(snapshot, data.delta) as Runs.RunStep; + this.#runStepSnapshots[event.data.id] = accumulated; + } + + return this.#runStepSnapshots[event.data.id] as Runs.RunStep; + + case 'thread.run.step.completed': + case 'thread.run.step.failed': + case 'thread.run.step.cancelled': + case 'thread.run.step.expired': + case 'thread.run.step.in_progress': + this.#runStepSnapshots[event.data.id] = event.data; + break; + } + + if (this.#runStepSnapshots[event.data.id]) return this.#runStepSnapshots[event.data.id] as Runs.RunStep; + throw new Error('No snapshot available'); + } + + #accumulateMessage( + event: AssistantStreamEvent, + snapshot: Message | undefined, + ): [Message, MessageContentDelta[]] { + let newContent: MessageContentDelta[] = []; + + switch (event.event) { + case 'thread.message.created': + //On creation the snapshot is just the initial message + return [event.data, newContent]; + + case 'thread.message.delta': + if (!snapshot) { + throw Error( + 'Received a delta with no existing snapshot (there should be one from message creation)', + ); + } + + let data = event.data; + + //If this delta does not have content, nothing to process + if (data.delta.content) { + for (const contentElement of data.delta.content) { + if (contentElement.index in snapshot.content) { + let currentContent = snapshot.content[contentElement.index]; + snapshot.content[contentElement.index] = this.#accumulateContent( + contentElement, + currentContent, + ); + } else { + snapshot.content[contentElement.index] = contentElement as MessageContent; + // This is a new element + newContent.push(contentElement); + } + } + } + + return [snapshot, newContent]; + + case 'thread.message.in_progress': + case 'thread.message.completed': + case 'thread.message.incomplete': + //No changes on other thread events + if (snapshot) { + return [snapshot, newContent]; + } else { + throw Error('Received thread message event with no existing snapshot'); + } + } + throw Error('Tried to accumulate a non-message event'); + } + + #accumulateContent( + contentElement: MessageContentDelta, + currentContent: MessageContent | undefined, + ): TextContentBlock | ImageFileContentBlock { + return AssistantStream.accumulateDelta(currentContent as unknown as Record, contentElement) as + | TextContentBlock + | ImageFileContentBlock; + } + + static accumulateDelta(acc: Record, delta: Record): Record { + for (const [key, deltaValue] of Object.entries(delta)) { + if (!acc.hasOwnProperty(key)) { + acc[key] = deltaValue; + continue; + } + + let accValue = acc[key]; + if (accValue === null || accValue === undefined) { + acc[key] = deltaValue; + continue; + } + + // We don't accumulate these special properties + if (key === 'index' || key === 'type') { + acc[key] = deltaValue; + continue; + } + + // Type-specific accumulation logic + if (typeof accValue === 'string' && typeof deltaValue === 'string') { + accValue += deltaValue; + } else if (typeof accValue === 'number' && typeof deltaValue === 'number') { + accValue += deltaValue; + } else if (Core.isObj(accValue) && Core.isObj(deltaValue)) { + accValue = this.accumulateDelta(accValue as Record, deltaValue as Record); + } else if (Array.isArray(accValue) && Array.isArray(deltaValue)) { + if (accValue.every((x) => typeof x === 'string' || typeof x === 'number')) { + accValue.push(...deltaValue); // Use spread syntax for efficient addition + continue; + } + + for (const deltaEntry of deltaValue) { + if (!Core.isObj(deltaEntry)) { + throw new Error(`Expected array delta entry to be an object but got: ${deltaEntry}`); + } + + const index = deltaEntry['index']; + if (index == null) { + console.error(deltaEntry); + throw new Error('Expected array delta entry to have an `index` property'); + } + + if (typeof index !== 'number') { + throw new Error(`Expected array delta entry \`index\` property to be a number but got ${index}`); + } + + const accEntry = accValue[index]; + if (accEntry == null) { + accValue.push(deltaEntry); + } else { + accValue[index] = this.accumulateDelta(accEntry, deltaEntry); + } + } + continue; + } else { + throw Error(`Unhandled record type: ${key}, deltaValue: ${deltaValue}, accValue: ${accValue}`); + } + acc[key] = accValue; + } + + return acc; + } + + #handleRun(this: AssistantStream, event: RunStreamEvent) { + this.#currentRunSnapshot = event.data; + switch (event.event) { + case 'thread.run.created': + break; + case 'thread.run.queued': + break; + case 'thread.run.in_progress': + break; + case 'thread.run.requires_action': + case 'thread.run.cancelled': + case 'thread.run.failed': + case 'thread.run.completed': + case 'thread.run.expired': + this.#finalRun = event.data; + if (this.#currentToolCall) { + this._emit('toolCallDone', this.#currentToolCall); + this.#currentToolCall = undefined; + } + break; + case 'thread.run.cancelling': + break; + } + } + + protected _addRun(run: Run): Run { + return run; + } + + protected async _threadAssistantStream( + params: ThreadCreateAndRunParamsBase, + thread: Threads, + options?: Core.RequestOptions, + ): Promise { + return await this._createThreadAssistantStream(thread, params, options); + } + + protected async _runAssistantStream( + threadId: string, + runs: Runs, + params: RunCreateParamsBase, + options?: Core.RequestOptions, + ): Promise { + return await this._createAssistantStream(runs, threadId, params, options); + } + + protected async _runToolAssistantStream( + threadId: string, + runId: string, + runs: Runs, + params: RunSubmitToolOutputsParamsStream, + options?: Core.RequestOptions, + ): Promise { + return await this._createToolAssistantStream(runs, threadId, runId, params, options); + } +} diff --git a/node_modules/openai/src/lib/ChatCompletionRunner.ts b/node_modules/openai/src/lib/ChatCompletionRunner.ts new file mode 100644 index 0000000..9e68e66 --- /dev/null +++ b/node_modules/openai/src/lib/ChatCompletionRunner.ts @@ -0,0 +1,76 @@ +import { + type ChatCompletionMessageParam, + type ChatCompletionCreateParamsNonStreaming, +} from '../resources/chat/completions'; +import { type RunnableFunctions, type BaseFunctionsArgs, RunnableTools } from './RunnableFunction'; +import { + AbstractChatCompletionRunner, + AbstractChatCompletionRunnerEvents, + RunnerOptions, +} from './AbstractChatCompletionRunner'; +import { isAssistantMessage } from './chatCompletionUtils'; +import OpenAI from '../index'; +import { AutoParseableTool } from '../lib/parser'; + +export interface ChatCompletionRunnerEvents extends AbstractChatCompletionRunnerEvents { + content: (content: string) => void; +} + +export type ChatCompletionFunctionRunnerParams = Omit< + ChatCompletionCreateParamsNonStreaming, + 'functions' +> & { + functions: RunnableFunctions; +}; + +export type ChatCompletionToolRunnerParams = Omit< + ChatCompletionCreateParamsNonStreaming, + 'tools' +> & { + tools: RunnableTools | AutoParseableTool[]; +}; + +export class ChatCompletionRunner extends AbstractChatCompletionRunner< + ChatCompletionRunnerEvents, + ParsedT +> { + /** @deprecated - please use `runTools` instead. */ + static runFunctions( + client: OpenAI, + params: ChatCompletionFunctionRunnerParams, + options?: RunnerOptions, + ): ChatCompletionRunner { + const runner = new ChatCompletionRunner(); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' }, + }; + runner._run(() => runner._runFunctions(client, params, opts)); + return runner; + } + + static runTools( + client: OpenAI, + params: ChatCompletionToolRunnerParams, + options?: RunnerOptions, + ): ChatCompletionRunner { + const runner = new ChatCompletionRunner(); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' }, + }; + runner._run(() => runner._runTools(client, params, opts)); + return runner; + } + + override _addMessage( + this: ChatCompletionRunner, + message: ChatCompletionMessageParam, + emit: boolean = true, + ) { + super._addMessage(message, emit); + if (isAssistantMessage(message) && message.content) { + this._emit('content', message.content as string); + } + } +} diff --git a/node_modules/openai/src/lib/ChatCompletionStream.ts b/node_modules/openai/src/lib/ChatCompletionStream.ts new file mode 100644 index 0000000..a88f8a2 --- /dev/null +++ b/node_modules/openai/src/lib/ChatCompletionStream.ts @@ -0,0 +1,870 @@ +import * as Core from '../core'; +import { + OpenAIError, + APIUserAbortError, + LengthFinishReasonError, + ContentFilterFinishReasonError, +} from '../error'; +import { + ChatCompletionTokenLogprob, + type ChatCompletion, + type ChatCompletionChunk, + type ChatCompletionCreateParams, + type ChatCompletionCreateParamsStreaming, + type ChatCompletionCreateParamsBase, +} from '../resources/chat/completions'; +import { + AbstractChatCompletionRunner, + type AbstractChatCompletionRunnerEvents, +} from './AbstractChatCompletionRunner'; +import { type ReadableStream } from '../_shims/index'; +import { Stream } from '../streaming'; +import OpenAI from '../index'; +import { ParsedChatCompletion } from '../resources/beta/chat/completions'; +import { + AutoParseableResponseFormat, + hasAutoParseableInput, + isAutoParsableResponseFormat, + isAutoParsableTool, + maybeParseChatCompletion, + shouldParseToolCall, +} from '../lib/parser'; +import { partialParse } from '../_vendor/partial-json-parser/parser'; + +export interface ContentDeltaEvent { + delta: string; + snapshot: string; + parsed: unknown | null; +} + +export interface ContentDoneEvent { + content: string; + parsed: ParsedT | null; +} + +export interface RefusalDeltaEvent { + delta: string; + snapshot: string; +} + +export interface RefusalDoneEvent { + refusal: string; +} + +export interface FunctionToolCallArgumentsDeltaEvent { + name: string; + + index: number; + + arguments: string; + + parsed_arguments: unknown; + + arguments_delta: string; +} + +export interface FunctionToolCallArgumentsDoneEvent { + name: string; + + index: number; + + arguments: string; + + parsed_arguments: unknown; +} + +export interface LogProbsContentDeltaEvent { + content: Array; + snapshot: Array; +} + +export interface LogProbsContentDoneEvent { + content: Array; +} + +export interface LogProbsRefusalDeltaEvent { + refusal: Array; + snapshot: Array; +} + +export interface LogProbsRefusalDoneEvent { + refusal: Array; +} + +export interface ChatCompletionStreamEvents extends AbstractChatCompletionRunnerEvents { + content: (contentDelta: string, contentSnapshot: string) => void; + chunk: (chunk: ChatCompletionChunk, snapshot: ChatCompletionSnapshot) => void; + + 'content.delta': (props: ContentDeltaEvent) => void; + 'content.done': (props: ContentDoneEvent) => void; + + 'refusal.delta': (props: RefusalDeltaEvent) => void; + 'refusal.done': (props: RefusalDoneEvent) => void; + + 'tool_calls.function.arguments.delta': (props: FunctionToolCallArgumentsDeltaEvent) => void; + 'tool_calls.function.arguments.done': (props: FunctionToolCallArgumentsDoneEvent) => void; + + 'logprobs.content.delta': (props: LogProbsContentDeltaEvent) => void; + 'logprobs.content.done': (props: LogProbsContentDoneEvent) => void; + + 'logprobs.refusal.delta': (props: LogProbsRefusalDeltaEvent) => void; + 'logprobs.refusal.done': (props: LogProbsRefusalDoneEvent) => void; +} + +export type ChatCompletionStreamParams = Omit & { + stream?: true; +}; + +interface ChoiceEventState { + content_done: boolean; + refusal_done: boolean; + logprobs_content_done: boolean; + logprobs_refusal_done: boolean; + current_tool_call_index: number | null; + done_tool_calls: Set; +} + +export class ChatCompletionStream + extends AbstractChatCompletionRunner, ParsedT> + implements AsyncIterable +{ + #params: ChatCompletionCreateParams | null; + #choiceEventStates: ChoiceEventState[]; + #currentChatCompletionSnapshot: ChatCompletionSnapshot | undefined; + + constructor(params: ChatCompletionCreateParams | null) { + super(); + this.#params = params; + this.#choiceEventStates = []; + } + + get currentChatCompletionSnapshot(): ChatCompletionSnapshot | undefined { + return this.#currentChatCompletionSnapshot; + } + + /** + * Intended for use on the frontend, consuming a stream produced with + * `.toReadableStream()` on the backend. + * + * Note that messages sent to the model do not appear in `.on('message')` + * in this context. + */ + static fromReadableStream(stream: ReadableStream): ChatCompletionStream { + const runner = new ChatCompletionStream(null); + runner._run(() => runner._fromReadableStream(stream)); + return runner; + } + + static createChatCompletion( + client: OpenAI, + params: ChatCompletionStreamParams, + options?: Core.RequestOptions, + ): ChatCompletionStream { + const runner = new ChatCompletionStream(params as ChatCompletionCreateParamsStreaming); + runner._run(() => + runner._runChatCompletion( + client, + { ...params, stream: true }, + { ...options, headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' } }, + ), + ); + return runner; + } + + #beginRequest() { + if (this.ended) return; + this.#currentChatCompletionSnapshot = undefined; + } + + #getChoiceEventState(choice: ChatCompletionSnapshot.Choice): ChoiceEventState { + let state = this.#choiceEventStates[choice.index]; + if (state) { + return state; + } + + state = { + content_done: false, + refusal_done: false, + logprobs_content_done: false, + logprobs_refusal_done: false, + done_tool_calls: new Set(), + current_tool_call_index: null, + }; + this.#choiceEventStates[choice.index] = state; + return state; + } + + #addChunk(this: ChatCompletionStream, chunk: ChatCompletionChunk) { + if (this.ended) return; + + const completion = this.#accumulateChatCompletion(chunk); + this._emit('chunk', chunk, completion); + + for (const choice of chunk.choices) { + const choiceSnapshot = completion.choices[choice.index]!; + + if ( + choice.delta.content != null && + choiceSnapshot.message?.role === 'assistant' && + choiceSnapshot.message?.content + ) { + this._emit('content', choice.delta.content, choiceSnapshot.message.content); + this._emit('content.delta', { + delta: choice.delta.content, + snapshot: choiceSnapshot.message.content, + parsed: choiceSnapshot.message.parsed, + }); + } + + if ( + choice.delta.refusal != null && + choiceSnapshot.message?.role === 'assistant' && + choiceSnapshot.message?.refusal + ) { + this._emit('refusal.delta', { + delta: choice.delta.refusal, + snapshot: choiceSnapshot.message.refusal, + }); + } + + if (choice.logprobs?.content != null && choiceSnapshot.message?.role === 'assistant') { + this._emit('logprobs.content.delta', { + content: choice.logprobs?.content, + snapshot: choiceSnapshot.logprobs?.content ?? [], + }); + } + + if (choice.logprobs?.refusal != null && choiceSnapshot.message?.role === 'assistant') { + this._emit('logprobs.refusal.delta', { + refusal: choice.logprobs?.refusal, + snapshot: choiceSnapshot.logprobs?.refusal ?? [], + }); + } + + const state = this.#getChoiceEventState(choiceSnapshot); + + if (choiceSnapshot.finish_reason) { + this.#emitContentDoneEvents(choiceSnapshot); + + if (state.current_tool_call_index != null) { + this.#emitToolCallDoneEvent(choiceSnapshot, state.current_tool_call_index); + } + } + + for (const toolCall of choice.delta.tool_calls ?? []) { + if (state.current_tool_call_index !== toolCall.index) { + this.#emitContentDoneEvents(choiceSnapshot); + + // new tool call started, the previous one is done + if (state.current_tool_call_index != null) { + this.#emitToolCallDoneEvent(choiceSnapshot, state.current_tool_call_index); + } + } + + state.current_tool_call_index = toolCall.index; + } + + for (const toolCallDelta of choice.delta.tool_calls ?? []) { + const toolCallSnapshot = choiceSnapshot.message.tool_calls?.[toolCallDelta.index]; + if (!toolCallSnapshot?.type) { + continue; + } + + if (toolCallSnapshot?.type === 'function') { + this._emit('tool_calls.function.arguments.delta', { + name: toolCallSnapshot.function?.name, + index: toolCallDelta.index, + arguments: toolCallSnapshot.function.arguments, + parsed_arguments: toolCallSnapshot.function.parsed_arguments, + arguments_delta: toolCallDelta.function?.arguments ?? '', + }); + } else { + assertNever(toolCallSnapshot?.type); + } + } + } + } + + #emitToolCallDoneEvent(choiceSnapshot: ChatCompletionSnapshot.Choice, toolCallIndex: number) { + const state = this.#getChoiceEventState(choiceSnapshot); + if (state.done_tool_calls.has(toolCallIndex)) { + // we've already fired the done event + return; + } + + const toolCallSnapshot = choiceSnapshot.message.tool_calls?.[toolCallIndex]; + if (!toolCallSnapshot) { + throw new Error('no tool call snapshot'); + } + if (!toolCallSnapshot.type) { + throw new Error('tool call snapshot missing `type`'); + } + + if (toolCallSnapshot.type === 'function') { + const inputTool = this.#params?.tools?.find( + (tool) => tool.type === 'function' && tool.function.name === toolCallSnapshot.function.name, + ); + + this._emit('tool_calls.function.arguments.done', { + name: toolCallSnapshot.function.name, + index: toolCallIndex, + arguments: toolCallSnapshot.function.arguments, + parsed_arguments: + isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCallSnapshot.function.arguments) + : inputTool?.function.strict ? JSON.parse(toolCallSnapshot.function.arguments) + : null, + }); + } else { + assertNever(toolCallSnapshot.type); + } + } + + #emitContentDoneEvents(choiceSnapshot: ChatCompletionSnapshot.Choice) { + const state = this.#getChoiceEventState(choiceSnapshot); + + if (choiceSnapshot.message.content && !state.content_done) { + state.content_done = true; + + const responseFormat = this.#getAutoParseableResponseFormat(); + + this._emit('content.done', { + content: choiceSnapshot.message.content, + parsed: responseFormat ? responseFormat.$parseRaw(choiceSnapshot.message.content) : (null as any), + }); + } + + if (choiceSnapshot.message.refusal && !state.refusal_done) { + state.refusal_done = true; + + this._emit('refusal.done', { refusal: choiceSnapshot.message.refusal }); + } + + if (choiceSnapshot.logprobs?.content && !state.logprobs_content_done) { + state.logprobs_content_done = true; + + this._emit('logprobs.content.done', { content: choiceSnapshot.logprobs.content }); + } + + if (choiceSnapshot.logprobs?.refusal && !state.logprobs_refusal_done) { + state.logprobs_refusal_done = true; + + this._emit('logprobs.refusal.done', { refusal: choiceSnapshot.logprobs.refusal }); + } + } + + #endRequest(): ParsedChatCompletion { + if (this.ended) { + throw new OpenAIError(`stream has ended, this shouldn't happen`); + } + const snapshot = this.#currentChatCompletionSnapshot; + if (!snapshot) { + throw new OpenAIError(`request ended without sending any chunks`); + } + this.#currentChatCompletionSnapshot = undefined; + this.#choiceEventStates = []; + return finalizeChatCompletion(snapshot, this.#params); + } + + protected override async _createChatCompletion( + client: OpenAI, + params: ChatCompletionCreateParams, + options?: Core.RequestOptions, + ): Promise> { + super._createChatCompletion; + const signal = options?.signal; + if (signal) { + if (signal.aborted) this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + this.#beginRequest(); + + const stream = await client.chat.completions.create( + { ...params, stream: true }, + { ...options, signal: this.controller.signal }, + ); + this._connected(); + for await (const chunk of stream) { + this.#addChunk(chunk); + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + return this._addChatCompletion(this.#endRequest()); + } + + protected async _fromReadableStream( + readableStream: ReadableStream, + options?: Core.RequestOptions, + ): Promise { + const signal = options?.signal; + if (signal) { + if (signal.aborted) this.controller.abort(); + signal.addEventListener('abort', () => this.controller.abort()); + } + this.#beginRequest(); + this._connected(); + const stream = Stream.fromReadableStream(readableStream, this.controller); + let chatId; + for await (const chunk of stream) { + if (chatId && chatId !== chunk.id) { + // A new request has been made. + this._addChatCompletion(this.#endRequest()); + } + + this.#addChunk(chunk); + chatId = chunk.id; + } + if (stream.controller.signal?.aborted) { + throw new APIUserAbortError(); + } + return this._addChatCompletion(this.#endRequest()); + } + + #getAutoParseableResponseFormat(): AutoParseableResponseFormat | null { + const responseFormat = this.#params?.response_format; + if (isAutoParsableResponseFormat(responseFormat)) { + return responseFormat; + } + + return null; + } + + #accumulateChatCompletion(chunk: ChatCompletionChunk): ChatCompletionSnapshot { + let snapshot = this.#currentChatCompletionSnapshot; + const { choices, ...rest } = chunk; + if (!snapshot) { + snapshot = this.#currentChatCompletionSnapshot = { + ...rest, + choices: [], + }; + } else { + Object.assign(snapshot, rest); + } + + for (const { delta, finish_reason, index, logprobs = null, ...other } of chunk.choices) { + let choice = snapshot.choices[index]; + if (!choice) { + choice = snapshot.choices[index] = { finish_reason, index, message: {}, logprobs, ...other }; + } + + if (logprobs) { + if (!choice.logprobs) { + choice.logprobs = Object.assign({}, logprobs); + } else { + const { content, refusal, ...rest } = logprobs; + assertIsEmpty(rest); + Object.assign(choice.logprobs, rest); + + if (content) { + choice.logprobs.content ??= []; + choice.logprobs.content.push(...content); + } + + if (refusal) { + choice.logprobs.refusal ??= []; + choice.logprobs.refusal.push(...refusal); + } + } + } + + if (finish_reason) { + choice.finish_reason = finish_reason; + + if (this.#params && hasAutoParseableInput(this.#params)) { + if (finish_reason === 'length') { + throw new LengthFinishReasonError(); + } + + if (finish_reason === 'content_filter') { + throw new ContentFilterFinishReasonError(); + } + } + } + + Object.assign(choice, other); + + if (!delta) continue; // Shouldn't happen; just in case. + + const { content, refusal, function_call, role, tool_calls, ...rest } = delta; + assertIsEmpty(rest); + Object.assign(choice.message, rest); + + if (refusal) { + choice.message.refusal = (choice.message.refusal || '') + refusal; + } + + if (role) choice.message.role = role; + if (function_call) { + if (!choice.message.function_call) { + choice.message.function_call = function_call; + } else { + if (function_call.name) choice.message.function_call.name = function_call.name; + if (function_call.arguments) { + choice.message.function_call.arguments ??= ''; + choice.message.function_call.arguments += function_call.arguments; + } + } + } + if (content) { + choice.message.content = (choice.message.content || '') + content; + + if (!choice.message.refusal && this.#getAutoParseableResponseFormat()) { + choice.message.parsed = partialParse(choice.message.content); + } + } + + if (tool_calls) { + if (!choice.message.tool_calls) choice.message.tool_calls = []; + + for (const { index, id, type, function: fn, ...rest } of tool_calls) { + const tool_call = (choice.message.tool_calls[index] ??= + {} as ChatCompletionSnapshot.Choice.Message.ToolCall); + Object.assign(tool_call, rest); + if (id) tool_call.id = id; + if (type) tool_call.type = type; + if (fn) tool_call.function ??= { name: fn.name ?? '', arguments: '' }; + if (fn?.name) tool_call.function!.name = fn.name; + if (fn?.arguments) { + tool_call.function!.arguments += fn.arguments; + + if (shouldParseToolCall(this.#params, tool_call)) { + tool_call.function!.parsed_arguments = partialParse(tool_call.function!.arguments); + } + } + } + } + } + return snapshot; + } + + [Symbol.asyncIterator](this: ChatCompletionStream): AsyncIterator { + const pushQueue: ChatCompletionChunk[] = []; + const readQueue: { + resolve: (chunk: ChatCompletionChunk | undefined) => void; + reject: (err: unknown) => void; + }[] = []; + let done = false; + + this.on('chunk', (chunk) => { + const reader = readQueue.shift(); + if (reader) { + reader.resolve(chunk); + } else { + pushQueue.push(chunk); + } + }); + + this.on('end', () => { + done = true; + for (const reader of readQueue) { + reader.resolve(undefined); + } + readQueue.length = 0; + }); + + this.on('abort', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + + this.on('error', (err) => { + done = true; + for (const reader of readQueue) { + reader.reject(err); + } + readQueue.length = 0; + }); + + return { + next: async (): Promise> => { + if (!pushQueue.length) { + if (done) { + return { value: undefined, done: true }; + } + return new Promise((resolve, reject) => + readQueue.push({ resolve, reject }), + ).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true })); + } + const chunk = pushQueue.shift()!; + return { value: chunk, done: false }; + }, + return: async () => { + this.abort(); + return { value: undefined, done: true }; + }, + }; + } + + toReadableStream(): ReadableStream { + const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller); + return stream.toReadableStream(); + } +} + +function finalizeChatCompletion( + snapshot: ChatCompletionSnapshot, + params: ChatCompletionCreateParams | null, +): ParsedChatCompletion { + const { id, choices, created, model, system_fingerprint, ...rest } = snapshot; + const completion: ChatCompletion = { + ...rest, + id, + choices: choices.map( + ({ message, finish_reason, index, logprobs, ...choiceRest }): ChatCompletion.Choice => { + if (!finish_reason) { + throw new OpenAIError(`missing finish_reason for choice ${index}`); + } + + const { content = null, function_call, tool_calls, ...messageRest } = message; + const role = message.role as 'assistant'; // this is what we expect; in theory it could be different which would make our types a slight lie but would be fine. + if (!role) { + throw new OpenAIError(`missing role for choice ${index}`); + } + + if (function_call) { + const { arguments: args, name } = function_call; + if (args == null) { + throw new OpenAIError(`missing function_call.arguments for choice ${index}`); + } + + if (!name) { + throw new OpenAIError(`missing function_call.name for choice ${index}`); + } + + return { + ...choiceRest, + message: { + content, + function_call: { arguments: args, name }, + role, + refusal: message.refusal ?? null, + }, + finish_reason, + index, + logprobs, + }; + } + + if (tool_calls) { + return { + ...choiceRest, + index, + finish_reason, + logprobs, + message: { + ...messageRest, + role, + content, + refusal: message.refusal ?? null, + tool_calls: tool_calls.map((tool_call, i) => { + const { function: fn, type, id, ...toolRest } = tool_call; + const { arguments: args, name, ...fnRest } = fn || {}; + if (id == null) { + throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].id\n${str(snapshot)}`); + } + if (type == null) { + throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].type\n${str(snapshot)}`); + } + if (name == null) { + throw new OpenAIError( + `missing choices[${index}].tool_calls[${i}].function.name\n${str(snapshot)}`, + ); + } + if (args == null) { + throw new OpenAIError( + `missing choices[${index}].tool_calls[${i}].function.arguments\n${str(snapshot)}`, + ); + } + + return { ...toolRest, id, type, function: { ...fnRest, name, arguments: args } }; + }), + }, + }; + } + return { + ...choiceRest, + message: { ...messageRest, content, role, refusal: message.refusal ?? null }, + finish_reason, + index, + logprobs, + }; + }, + ), + created, + model, + object: 'chat.completion', + ...(system_fingerprint ? { system_fingerprint } : {}), + }; + + return maybeParseChatCompletion(completion, params); +} + +function str(x: unknown) { + return JSON.stringify(x); +} + +/** + * Represents a streamed chunk of a chat completion response returned by model, + * based on the provided input. + */ +export interface ChatCompletionSnapshot { + /** + * A unique identifier for the chat completion. + */ + id: string; + + /** + * A list of chat completion choices. Can be more than one if `n` is greater + * than 1. + */ + choices: Array; + + /** + * The Unix timestamp (in seconds) of when the chat completion was created. + */ + created: number; + + /** + * The model to generate the completion. + */ + model: string; + + // Note we do not include an "object" type on the snapshot, + // because the object is not a valid "chat.completion" until finalized. + // object: 'chat.completion'; + + /** + * This fingerprint represents the backend configuration that the model runs with. + * + * Can be used in conjunction with the `seed` request parameter to understand when + * backend changes have been made that might impact determinism. + */ + system_fingerprint?: string; +} + +export namespace ChatCompletionSnapshot { + export interface Choice { + /** + * A chat completion delta generated by streamed model responses. + */ + message: Choice.Message; + + /** + * The reason the model stopped generating tokens. This will be `stop` if the model + * hit a natural stop point or a provided stop sequence, `length` if the maximum + * number of tokens specified in the request was reached, `content_filter` if + * content was omitted due to a flag from our content filters, or `function_call` + * if the model called a function. + */ + finish_reason: ChatCompletion.Choice['finish_reason'] | null; + + /** + * Log probability information for the choice. + */ + logprobs: ChatCompletion.Choice.Logprobs | null; + + /** + * The index of the choice in the list of choices. + */ + index: number; + } + + export namespace Choice { + /** + * A chat completion delta generated by streamed model responses. + */ + export interface Message { + /** + * The contents of the chunk message. + */ + content?: string | null; + + refusal?: string | null; + + parsed?: unknown | null; + + /** + * The name and arguments of a function that should be called, as generated by the + * model. + */ + function_call?: Message.FunctionCall; + + tool_calls?: Array; + + /** + * The role of the author of this message. + */ + role?: 'system' | 'user' | 'assistant' | 'function' | 'tool'; + } + + export namespace Message { + export interface ToolCall { + /** + * The ID of the tool call. + */ + id: string; + + function: ToolCall.Function; + + /** + * The type of the tool. + */ + type: 'function'; + } + + export namespace ToolCall { + export interface Function { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments: string; + + parsed_arguments?: unknown; + + /** + * The name of the function to call. + */ + name: string; + } + } + + /** + * The name and arguments of a function that should be called, as generated by the + * model. + */ + export interface FunctionCall { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments?: string; + + /** + * The name of the function to call. + */ + name?: string; + } + } + } +} + +type AssertIsEmpty = keyof T extends never ? T : never; + +/** + * Ensures the given argument is an empty object, useful for + * asserting that all known properties on an object have been + * destructured. + */ +function assertIsEmpty(obj: AssertIsEmpty): asserts obj is AssertIsEmpty { + return; +} + +function assertNever(_x: never) {} diff --git a/node_modules/openai/src/lib/ChatCompletionStreamingRunner.ts b/node_modules/openai/src/lib/ChatCompletionStreamingRunner.ts new file mode 100644 index 0000000..ba0c649 --- /dev/null +++ b/node_modules/openai/src/lib/ChatCompletionStreamingRunner.ts @@ -0,0 +1,72 @@ +import { + type ChatCompletionChunk, + type ChatCompletionCreateParamsStreaming, +} from '../resources/chat/completions'; +import { RunnerOptions, type AbstractChatCompletionRunnerEvents } from './AbstractChatCompletionRunner'; +import { type ReadableStream } from '../_shims/index'; +import { RunnableTools, type BaseFunctionsArgs, type RunnableFunctions } from './RunnableFunction'; +import { ChatCompletionSnapshot, ChatCompletionStream } from './ChatCompletionStream'; +import OpenAI from '../index'; +import { AutoParseableTool } from '../lib/parser'; + +export interface ChatCompletionStreamEvents extends AbstractChatCompletionRunnerEvents { + content: (contentDelta: string, contentSnapshot: string) => void; + chunk: (chunk: ChatCompletionChunk, snapshot: ChatCompletionSnapshot) => void; +} + +export type ChatCompletionStreamingFunctionRunnerParams = Omit< + ChatCompletionCreateParamsStreaming, + 'functions' +> & { + functions: RunnableFunctions; +}; + +export type ChatCompletionStreamingToolRunnerParams = Omit< + ChatCompletionCreateParamsStreaming, + 'tools' +> & { + tools: RunnableTools | AutoParseableTool[]; +}; + +export class ChatCompletionStreamingRunner + extends ChatCompletionStream + implements AsyncIterable +{ + static override fromReadableStream(stream: ReadableStream): ChatCompletionStreamingRunner { + const runner = new ChatCompletionStreamingRunner(null); + runner._run(() => runner._fromReadableStream(stream)); + return runner; + } + + /** @deprecated - please use `runTools` instead. */ + static runFunctions( + client: OpenAI, + params: ChatCompletionStreamingFunctionRunnerParams, + options?: RunnerOptions, + ): ChatCompletionStreamingRunner { + const runner = new ChatCompletionStreamingRunner(null); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' }, + }; + runner._run(() => runner._runFunctions(client, params, opts)); + return runner; + } + + static runTools( + client: OpenAI, + params: ChatCompletionStreamingToolRunnerParams, + options?: RunnerOptions, + ): ChatCompletionStreamingRunner { + const runner = new ChatCompletionStreamingRunner( + // @ts-expect-error TODO these types are incompatible + params, + ); + const opts = { + ...options, + headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' }, + }; + runner._run(() => runner._runTools(client, params, opts)); + return runner; + } +} diff --git a/node_modules/openai/src/lib/EventStream.ts b/node_modules/openai/src/lib/EventStream.ts new file mode 100644 index 0000000..d3f485e --- /dev/null +++ b/node_modules/openai/src/lib/EventStream.ts @@ -0,0 +1,239 @@ +import { APIUserAbortError, OpenAIError } from '../error'; + +export class EventStream { + controller: AbortController = new AbortController(); + + #connectedPromise: Promise; + #resolveConnectedPromise: () => void = () => {}; + #rejectConnectedPromise: (error: OpenAIError) => void = () => {}; + + #endPromise: Promise; + #resolveEndPromise: () => void = () => {}; + #rejectEndPromise: (error: OpenAIError) => void = () => {}; + + #listeners: { + [Event in keyof EventTypes]?: EventListeners; + } = {}; + + #ended = false; + #errored = false; + #aborted = false; + #catchingPromiseCreated = false; + + constructor() { + this.#connectedPromise = new Promise((resolve, reject) => { + this.#resolveConnectedPromise = resolve; + this.#rejectConnectedPromise = reject; + }); + + this.#endPromise = new Promise((resolve, reject) => { + this.#resolveEndPromise = resolve; + this.#rejectEndPromise = reject; + }); + + // Don't let these promises cause unhandled rejection errors. + // we will manually cause an unhandled rejection error later + // if the user hasn't registered any error listener or called + // any promise-returning method. + this.#connectedPromise.catch(() => {}); + this.#endPromise.catch(() => {}); + } + + protected _run(this: EventStream, executor: () => Promise) { + // Unfortunately if we call `executor()` immediately we get runtime errors about + // references to `this` before the `super()` constructor call returns. + setTimeout(() => { + executor().then(() => { + this._emitFinal(); + this._emit('end'); + }, this.#handleError.bind(this)); + }, 0); + } + + protected _connected(this: EventStream) { + if (this.ended) return; + this.#resolveConnectedPromise(); + this._emit('connect'); + } + + get ended(): boolean { + return this.#ended; + } + + get errored(): boolean { + return this.#errored; + } + + get aborted(): boolean { + return this.#aborted; + } + + abort() { + this.controller.abort(); + } + + /** + * Adds the listener function to the end of the listeners array for the event. + * No checks are made to see if the listener has already been added. Multiple calls passing + * the same combination of event and listener will result in the listener being added, and + * called, multiple times. + * @returns this ChatCompletionStream, so that calls can be chained + */ + on(event: Event, listener: EventListener): this { + const listeners: EventListeners = + this.#listeners[event] || (this.#listeners[event] = []); + listeners.push({ listener }); + return this; + } + + /** + * Removes the specified listener from the listener array for the event. + * off() will remove, at most, one instance of a listener from the listener array. If any single + * listener has been added multiple times to the listener array for the specified event, then + * off() must be called multiple times to remove each instance. + * @returns this ChatCompletionStream, so that calls can be chained + */ + off(event: Event, listener: EventListener): this { + const listeners = this.#listeners[event]; + if (!listeners) return this; + const index = listeners.findIndex((l) => l.listener === listener); + if (index >= 0) listeners.splice(index, 1); + return this; + } + + /** + * Adds a one-time listener function for the event. The next time the event is triggered, + * this listener is removed and then invoked. + * @returns this ChatCompletionStream, so that calls can be chained + */ + once(event: Event, listener: EventListener): this { + const listeners: EventListeners = + this.#listeners[event] || (this.#listeners[event] = []); + listeners.push({ listener, once: true }); + return this; + } + + /** + * This is similar to `.once()`, but returns a Promise that resolves the next time + * the event is triggered, instead of calling a listener callback. + * @returns a Promise that resolves the next time given event is triggered, + * or rejects if an error is emitted. (If you request the 'error' event, + * returns a promise that resolves with the error). + * + * Example: + * + * const message = await stream.emitted('message') // rejects if the stream errors + */ + emitted( + event: Event, + ): Promise< + EventParameters extends [infer Param] ? Param + : EventParameters extends [] ? void + : EventParameters + > { + return new Promise((resolve, reject) => { + this.#catchingPromiseCreated = true; + if (event !== 'error') this.once('error', reject); + this.once(event, resolve as any); + }); + } + + async done(): Promise { + this.#catchingPromiseCreated = true; + await this.#endPromise; + } + + #handleError(this: EventStream, error: unknown) { + this.#errored = true; + if (error instanceof Error && error.name === 'AbortError') { + error = new APIUserAbortError(); + } + if (error instanceof APIUserAbortError) { + this.#aborted = true; + return this._emit('abort', error); + } + if (error instanceof OpenAIError) { + return this._emit('error', error); + } + if (error instanceof Error) { + const openAIError: OpenAIError = new OpenAIError(error.message); + // @ts-ignore + openAIError.cause = error; + return this._emit('error', openAIError); + } + return this._emit('error', new OpenAIError(String(error))); + } + + _emit(event: Event, ...args: EventParameters): void; + _emit(event: Event, ...args: EventParameters): void; + _emit( + this: EventStream, + event: Event, + ...args: EventParameters + ) { + // make sure we don't emit any events after end + if (this.#ended) { + return; + } + + if (event === 'end') { + this.#ended = true; + this.#resolveEndPromise(); + } + + const listeners: EventListeners | undefined = this.#listeners[event]; + if (listeners) { + this.#listeners[event] = listeners.filter((l) => !l.once) as any; + listeners.forEach(({ listener }: any) => listener(...(args as any))); + } + + if (event === 'abort') { + const error = args[0] as APIUserAbortError; + if (!this.#catchingPromiseCreated && !listeners?.length) { + Promise.reject(error); + } + this.#rejectConnectedPromise(error); + this.#rejectEndPromise(error); + this._emit('end'); + return; + } + + if (event === 'error') { + // NOTE: _emit('error', error) should only be called from #handleError(). + + const error = args[0] as OpenAIError; + if (!this.#catchingPromiseCreated && !listeners?.length) { + // Trigger an unhandled rejection if the user hasn't registered any error handlers. + // If you are seeing stack traces here, make sure to handle errors via either: + // - runner.on('error', () => ...) + // - await runner.done() + // - await runner.finalChatCompletion() + // - etc. + Promise.reject(error); + } + this.#rejectConnectedPromise(error); + this.#rejectEndPromise(error); + this._emit('end'); + } + } + + protected _emitFinal(): void {} +} + +type EventListener = Events[EventType]; + +type EventListeners = Array<{ + listener: EventListener; + once?: boolean; +}>; + +export type EventParameters = { + [Event in EventType]: EventListener extends (...args: infer P) => any ? P : never; +}[EventType]; + +export interface BaseEvents { + connect: () => void; + error: (error: OpenAIError) => void; + abort: (error: APIUserAbortError) => void; + end: () => void; +} diff --git a/node_modules/openai/src/lib/RunnableFunction.ts b/node_modules/openai/src/lib/RunnableFunction.ts new file mode 100644 index 0000000..a645f5e --- /dev/null +++ b/node_modules/openai/src/lib/RunnableFunction.ts @@ -0,0 +1,136 @@ +import { type ChatCompletionRunner } from './ChatCompletionRunner'; +import { type ChatCompletionStreamingRunner } from './ChatCompletionStreamingRunner'; +import { JSONSchema } from './jsonschema'; + +type PromiseOrValue = T | Promise; + +export type RunnableFunctionWithParse = { + /** + * @param args the return value from `parse`. + * @param runner the runner evaluating this callback. + * @returns a string to send back to OpenAI. + */ + function: ( + args: Args, + runner: ChatCompletionRunner | ChatCompletionStreamingRunner, + ) => PromiseOrValue; + /** + * @param input the raw args from the OpenAI function call. + * @returns the parsed arguments to pass to `function` + */ + parse: (input: string) => PromiseOrValue; + /** + * The parameters the function accepts, describes as a JSON Schema object. + */ + parameters: JSONSchema; + /** + * A description of what the function does, used by the model to choose when and how to call the function. + */ + description: string; + /** + * The name of the function to be called. Will default to function.name if omitted. + */ + name?: string | undefined; + strict?: boolean | undefined; +}; + +export type RunnableFunctionWithoutParse = { + /** + * @param args the raw args from the OpenAI function call. + * @returns a string to send back to OpenAI + */ + function: ( + args: string, + runner: ChatCompletionRunner | ChatCompletionStreamingRunner, + ) => PromiseOrValue; + /** + * The parameters the function accepts, describes as a JSON Schema object. + */ + parameters: JSONSchema; + /** + * A description of what the function does, used by the model to choose when and how to call the function. + */ + description: string; + /** + * The name of the function to be called. Will default to function.name if omitted. + */ + name?: string | undefined; + strict?: boolean | undefined; +}; + +export type RunnableFunction = + Args extends string ? RunnableFunctionWithoutParse + : Args extends object ? RunnableFunctionWithParse + : never; + +export type RunnableToolFunction = + Args extends string ? RunnableToolFunctionWithoutParse + : Args extends object ? RunnableToolFunctionWithParse + : never; + +export type RunnableToolFunctionWithoutParse = { + type: 'function'; + function: RunnableFunctionWithoutParse; +}; +export type RunnableToolFunctionWithParse = { + type: 'function'; + function: RunnableFunctionWithParse; +}; + +export function isRunnableFunctionWithParse( + fn: any, +): fn is RunnableFunctionWithParse { + return typeof (fn as any).parse === 'function'; +} + +export type BaseFunctionsArgs = readonly (object | string)[]; + +export type RunnableFunctions = + [any[]] extends [FunctionsArgs] ? readonly RunnableFunction[] + : { + [Index in keyof FunctionsArgs]: Index extends number ? RunnableFunction + : FunctionsArgs[Index]; + }; + +export type RunnableTools = + [any[]] extends [FunctionsArgs] ? readonly RunnableToolFunction[] + : { + [Index in keyof FunctionsArgs]: Index extends number ? RunnableToolFunction + : FunctionsArgs[Index]; + }; + +/** + * This is helper class for passing a `function` and `parse` where the `function` + * argument type matches the `parse` return type. + * + * @deprecated - please use ParsingToolFunction instead. + */ +export class ParsingFunction { + function: RunnableFunctionWithParse['function']; + parse: RunnableFunctionWithParse['parse']; + parameters: RunnableFunctionWithParse['parameters']; + description: RunnableFunctionWithParse['description']; + name?: RunnableFunctionWithParse['name']; + + constructor(input: RunnableFunctionWithParse) { + this.function = input.function; + this.parse = input.parse; + this.parameters = input.parameters; + this.description = input.description; + this.name = input.name; + } +} + +/** + * This is helper class for passing a `function` and `parse` where the `function` + * argument type matches the `parse` return type. + */ +export class ParsingToolFunction { + type: 'function'; + function: RunnableFunctionWithParse; + + constructor(input: RunnableFunctionWithParse) { + this.type = 'function'; + this.function = input; + } +} diff --git a/node_modules/openai/src/lib/Util.ts b/node_modules/openai/src/lib/Util.ts new file mode 100644 index 0000000..ae09b8a --- /dev/null +++ b/node_modules/openai/src/lib/Util.ts @@ -0,0 +1,23 @@ +/** + * Like `Promise.allSettled()` but throws an error if any promises are rejected. + */ +export const allSettledWithThrow = async (promises: Promise[]): Promise => { + const results = await Promise.allSettled(promises); + const rejected = results.filter((result): result is PromiseRejectedResult => result.status === 'rejected'); + if (rejected.length) { + for (const result of rejected) { + console.error(result.reason); + } + + throw new Error(`${rejected.length} promise(s) failed - see the above errors`); + } + + // Note: TS was complaining about using `.filter().map()` here for some reason + const values: R[] = []; + for (const result of results) { + if (result.status === 'fulfilled') { + values.push(result.value); + } + } + return values; +}; diff --git a/node_modules/openai/src/lib/chatCompletionUtils.ts b/node_modules/openai/src/lib/chatCompletionUtils.ts new file mode 100644 index 0000000..7e9f8a0 --- /dev/null +++ b/node_modules/openai/src/lib/chatCompletionUtils.ts @@ -0,0 +1,28 @@ +import { + type ChatCompletionAssistantMessageParam, + type ChatCompletionFunctionMessageParam, + type ChatCompletionMessageParam, + type ChatCompletionToolMessageParam, +} from '../resources'; + +export const isAssistantMessage = ( + message: ChatCompletionMessageParam | null | undefined, +): message is ChatCompletionAssistantMessageParam => { + return message?.role === 'assistant'; +}; + +export const isFunctionMessage = ( + message: ChatCompletionMessageParam | null | undefined, +): message is ChatCompletionFunctionMessageParam => { + return message?.role === 'function'; +}; + +export const isToolMessage = ( + message: ChatCompletionMessageParam | null | undefined, +): message is ChatCompletionToolMessageParam => { + return message?.role === 'tool'; +}; + +export function isPresent(obj: T | null | undefined): obj is T { + return obj != null; +} diff --git a/node_modules/openai/src/lib/jsonschema.ts b/node_modules/openai/src/lib/jsonschema.ts new file mode 100644 index 0000000..6362777 --- /dev/null +++ b/node_modules/openai/src/lib/jsonschema.ts @@ -0,0 +1,148 @@ +// File mostly copied from @types/json-schema, but stripped down a bit for brevity +// https://github.com/DefinitelyTyped/DefinitelyTyped/blob/817274f3280152ba2929a6067c93df8b34c4c9aa/types/json-schema/index.d.ts +// +// ================================================================================================== +// JSON Schema Draft 07 +// ================================================================================================== +// https://tools.ietf.org/html/draft-handrews-json-schema-validation-01 +// -------------------------------------------------------------------------------------------------- + +/** + * Primitive type + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.1.1 + */ +export type JSONSchemaTypeName = + | ({} & string) + | 'string' + | 'number' + | 'integer' + | 'boolean' + | 'object' + | 'array' + | 'null'; + +/** + * Primitive type + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.1.1 + */ +export type JSONSchemaType = + | string // + | number + | boolean + | JSONSchemaObject + | JSONSchemaArray + | null; + +// Workaround for infinite type recursion +export interface JSONSchemaObject { + [key: string]: JSONSchemaType; +} + +// Workaround for infinite type recursion +// https://github.com/Microsoft/TypeScript/issues/3496#issuecomment-128553540 +export interface JSONSchemaArray extends Array {} + +/** + * Meta schema + * + * Recommended values: + * - 'http://json-schema.org/schema#' + * - 'http://json-schema.org/hyper-schema#' + * - 'http://json-schema.org/draft-07/schema#' + * - 'http://json-schema.org/draft-07/hyper-schema#' + * + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-5 + */ +export type JSONSchemaVersion = string; + +/** + * JSON Schema v7 + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01 + */ +export type JSONSchemaDefinition = JSONSchema | boolean; +export interface JSONSchema { + $id?: string | undefined; + $comment?: string | undefined; + + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.1 + */ + type?: JSONSchemaTypeName | JSONSchemaTypeName[] | undefined; + enum?: JSONSchemaType[] | undefined; + const?: JSONSchemaType | undefined; + + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.2 + */ + multipleOf?: number | undefined; + maximum?: number | undefined; + exclusiveMaximum?: number | undefined; + minimum?: number | undefined; + exclusiveMinimum?: number | undefined; + + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.3 + */ + maxLength?: number | undefined; + minLength?: number | undefined; + pattern?: string | undefined; + + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.4 + */ + items?: JSONSchemaDefinition | JSONSchemaDefinition[] | undefined; + additionalItems?: JSONSchemaDefinition | undefined; + maxItems?: number | undefined; + minItems?: number | undefined; + uniqueItems?: boolean | undefined; + contains?: JSONSchemaDefinition | undefined; + + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.5 + */ + maxProperties?: number | undefined; + minProperties?: number | undefined; + required?: string[] | undefined; + properties?: + | { + [key: string]: JSONSchemaDefinition; + } + | undefined; + patternProperties?: + | { + [key: string]: JSONSchemaDefinition; + } + | undefined; + additionalProperties?: JSONSchemaDefinition | undefined; + propertyNames?: JSONSchemaDefinition | undefined; + + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.6 + */ + if?: JSONSchemaDefinition | undefined; + then?: JSONSchemaDefinition | undefined; + else?: JSONSchemaDefinition | undefined; + + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.7 + */ + allOf?: JSONSchemaDefinition[] | undefined; + anyOf?: JSONSchemaDefinition[] | undefined; + oneOf?: JSONSchemaDefinition[] | undefined; + not?: JSONSchemaDefinition | undefined; + + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-7 + */ + format?: string | undefined; + + /** + * @see https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-10 + */ + title?: string | undefined; + description?: string | undefined; + default?: JSONSchemaType | undefined; + readOnly?: boolean | undefined; + writeOnly?: boolean | undefined; + examples?: JSONSchemaType | undefined; +} diff --git a/node_modules/openai/src/lib/parser.ts b/node_modules/openai/src/lib/parser.ts new file mode 100644 index 0000000..f2678e3 --- /dev/null +++ b/node_modules/openai/src/lib/parser.ts @@ -0,0 +1,235 @@ +import { + ChatCompletion, + ChatCompletionCreateParams, + ChatCompletionMessageToolCall, + ChatCompletionTool, +} from '../resources/chat/completions'; +import { + ChatCompletionStreamingToolRunnerParams, + ChatCompletionStreamParams, + ChatCompletionToolRunnerParams, + ParsedChatCompletion, + ParsedChoice, + ParsedFunctionToolCall, +} from '../resources/beta/chat/completions'; +import { ResponseFormatJSONSchema } from '../resources/shared'; +import { ContentFilterFinishReasonError, LengthFinishReasonError, OpenAIError } from '../error'; + +type AnyChatCompletionCreateParams = + | ChatCompletionCreateParams + | ChatCompletionToolRunnerParams + | ChatCompletionStreamingToolRunnerParams + | ChatCompletionStreamParams; + +export type ExtractParsedContentFromParams = + Params['response_format'] extends AutoParseableResponseFormat ? P : null; + +export type AutoParseableResponseFormat = ResponseFormatJSONSchema & { + __output: ParsedT; // type-level only + + $brand: 'auto-parseable-response-format'; + $parseRaw(content: string): ParsedT; +}; + +export function makeParseableResponseFormat( + response_format: ResponseFormatJSONSchema, + parser: (content: string) => ParsedT, +): AutoParseableResponseFormat { + const obj = { ...response_format }; + + Object.defineProperties(obj, { + $brand: { + value: 'auto-parseable-response-format', + enumerable: false, + }, + $parseRaw: { + value: parser, + enumerable: false, + }, + }); + + return obj as AutoParseableResponseFormat; +} + +export function isAutoParsableResponseFormat( + response_format: any, +): response_format is AutoParseableResponseFormat { + return response_format?.['$brand'] === 'auto-parseable-response-format'; +} + +type ToolOptions = { + name: string; + arguments: any; + function?: ((args: any) => any) | undefined; +}; + +export type AutoParseableTool< + OptionsT extends ToolOptions, + HasFunction = OptionsT['function'] extends Function ? true : false, +> = ChatCompletionTool & { + __arguments: OptionsT['arguments']; // type-level only + __name: OptionsT['name']; // type-level only + __hasFunction: HasFunction; // type-level only + + $brand: 'auto-parseable-tool'; + $callback: ((args: OptionsT['arguments']) => any) | undefined; + $parseRaw(args: string): OptionsT['arguments']; +}; + +export function makeParseableTool( + tool: ChatCompletionTool, + { + parser, + callback, + }: { + parser: (content: string) => OptionsT['arguments']; + callback: ((args: any) => any) | undefined; + }, +): AutoParseableTool { + const obj = { ...tool }; + + Object.defineProperties(obj, { + $brand: { + value: 'auto-parseable-tool', + enumerable: false, + }, + $parseRaw: { + value: parser, + enumerable: false, + }, + $callback: { + value: callback, + enumerable: false, + }, + }); + + return obj as AutoParseableTool; +} + +export function isAutoParsableTool(tool: any): tool is AutoParseableTool { + return tool?.['$brand'] === 'auto-parseable-tool'; +} + +export function maybeParseChatCompletion< + Params extends ChatCompletionCreateParams | null, + ParsedT = Params extends null ? null : ExtractParsedContentFromParams>, +>(completion: ChatCompletion, params: Params): ParsedChatCompletion { + if (!params || !hasAutoParseableInput(params)) { + return { + ...completion, + choices: completion.choices.map((choice) => ({ + ...choice, + message: { ...choice.message, parsed: null, tool_calls: choice.message.tool_calls ?? [] }, + })), + }; + } + + return parseChatCompletion(completion, params); +} + +export function parseChatCompletion< + Params extends ChatCompletionCreateParams, + ParsedT = ExtractParsedContentFromParams, +>(completion: ChatCompletion, params: Params): ParsedChatCompletion { + const choices: Array> = completion.choices.map((choice): ParsedChoice => { + if (choice.finish_reason === 'length') { + throw new LengthFinishReasonError(); + } + + if (choice.finish_reason === 'content_filter') { + throw new ContentFilterFinishReasonError(); + } + + return { + ...choice, + message: { + ...choice.message, + tool_calls: choice.message.tool_calls?.map((toolCall) => parseToolCall(params, toolCall)) ?? [], + parsed: + choice.message.content && !choice.message.refusal ? + parseResponseFormat(params, choice.message.content) + : null, + }, + }; + }); + + return { ...completion, choices }; +} + +function parseResponseFormat< + Params extends ChatCompletionCreateParams, + ParsedT = ExtractParsedContentFromParams, +>(params: Params, content: string): ParsedT | null { + if (params.response_format?.type !== 'json_schema') { + return null; + } + + if (params.response_format?.type === 'json_schema') { + if ('$parseRaw' in params.response_format) { + const response_format = params.response_format as AutoParseableResponseFormat; + + return response_format.$parseRaw(content); + } + + return JSON.parse(content); + } + + return null; +} + +function parseToolCall( + params: Params, + toolCall: ChatCompletionMessageToolCall, +): ParsedFunctionToolCall { + const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); + return { + ...toolCall, + function: { + ...toolCall.function, + parsed_arguments: + isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCall.function.arguments) + : inputTool?.function.strict ? JSON.parse(toolCall.function.arguments) + : null, + }, + }; +} + +export function shouldParseToolCall( + params: ChatCompletionCreateParams | null | undefined, + toolCall: ChatCompletionMessageToolCall, +): boolean { + if (!params) { + return false; + } + + const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); + return isAutoParsableTool(inputTool) || inputTool?.function.strict || false; +} + +export function hasAutoParseableInput(params: AnyChatCompletionCreateParams): boolean { + if (isAutoParsableResponseFormat(params.response_format)) { + return true; + } + + return ( + params.tools?.some( + (t) => isAutoParsableTool(t) || (t.type === 'function' && t.function.strict === true), + ) ?? false + ); +} + +export function validateInputTools(tools: ChatCompletionTool[] | undefined) { + for (const tool of tools ?? []) { + if (tool.type !== 'function') { + throw new OpenAIError( + `Currently only \`function\` tool types support auto-parsing; Received \`${tool.type}\``, + ); + } + + if (tool.function.strict !== true) { + throw new OpenAIError( + `The \`${tool.function.name}\` tool is not marked with \`strict: true\`. Only strict function tools can be auto-parsed`, + ); + } + } +} diff --git a/node_modules/openai/src/pagination.ts b/node_modules/openai/src/pagination.ts new file mode 100644 index 0000000..63644e3 --- /dev/null +++ b/node_modules/openai/src/pagination.ts @@ -0,0 +1,98 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { AbstractPage, Response, APIClient, FinalRequestOptions, PageInfo } from './core'; + +export interface PageResponse { + data: Array; + + object: string; +} + +/** + * Note: no pagination actually occurs yet, this is for forwards-compatibility. + */ +export class Page extends AbstractPage implements PageResponse { + data: Array; + + object: string; + + constructor(client: APIClient, response: Response, body: PageResponse, options: FinalRequestOptions) { + super(client, response, body, options); + + this.data = body.data || []; + this.object = body.object; + } + + getPaginatedItems(): Item[] { + return this.data ?? []; + } + + // @deprecated Please use `nextPageInfo()` instead + /** + * This page represents a response that isn't actually paginated at the API level + * so there will never be any next page params. + */ + nextPageParams(): null { + return null; + } + + nextPageInfo(): null { + return null; + } +} + +export interface CursorPageResponse { + data: Array; +} + +export interface CursorPageParams { + after?: string; + + limit?: number; +} + +export class CursorPage + extends AbstractPage + implements CursorPageResponse +{ + data: Array; + + constructor( + client: APIClient, + response: Response, + body: CursorPageResponse, + options: FinalRequestOptions, + ) { + super(client, response, body, options); + + this.data = body.data || []; + } + + getPaginatedItems(): Item[] { + return this.data ?? []; + } + + // @deprecated Please use `nextPageInfo()` instead + nextPageParams(): Partial | null { + const info = this.nextPageInfo(); + if (!info) return null; + if ('params' in info) return info.params; + const params = Object.fromEntries(info.url.searchParams); + if (!Object.keys(params).length) return null; + return params; + } + + nextPageInfo(): PageInfo | null { + const data = this.getPaginatedItems(); + if (!data.length) { + return null; + } + + const id = data[data.length - 1]?.id; + if (!id) { + return null; + } + + return { params: { after: id } }; + } +} diff --git a/node_modules/openai/src/resource.ts b/node_modules/openai/src/resource.ts new file mode 100644 index 0000000..87847c8 --- /dev/null +++ b/node_modules/openai/src/resource.ts @@ -0,0 +1,11 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import type { OpenAI } from './index'; + +export class APIResource { + protected _client: OpenAI; + + constructor(client: OpenAI) { + this._client = client; + } +} diff --git a/node_modules/openai/src/resources/audio/audio.ts b/node_modules/openai/src/resources/audio/audio.ts new file mode 100644 index 0000000..b9a7ad4 --- /dev/null +++ b/node_modules/openai/src/resources/audio/audio.ts @@ -0,0 +1,65 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import * as SpeechAPI from './speech'; +import { Speech, SpeechCreateParams, SpeechModel } from './speech'; +import * as TranscriptionsAPI from './transcriptions'; +import { + Transcription, + TranscriptionCreateParams, + TranscriptionCreateResponse, + TranscriptionSegment, + TranscriptionVerbose, + TranscriptionWord, + Transcriptions, +} from './transcriptions'; +import * as TranslationsAPI from './translations'; +import { + Translation, + TranslationCreateParams, + TranslationCreateResponse, + TranslationVerbose, + Translations, +} from './translations'; + +export class Audio extends APIResource { + transcriptions: TranscriptionsAPI.Transcriptions = new TranscriptionsAPI.Transcriptions(this._client); + translations: TranslationsAPI.Translations = new TranslationsAPI.Translations(this._client); + speech: SpeechAPI.Speech = new SpeechAPI.Speech(this._client); +} + +export type AudioModel = 'whisper-1'; + +/** + * The format of the output, in one of these options: `json`, `text`, `srt`, + * `verbose_json`, or `vtt`. + */ +export type AudioResponseFormat = 'json' | 'text' | 'srt' | 'verbose_json' | 'vtt'; + +Audio.Transcriptions = Transcriptions; +Audio.Translations = Translations; +Audio.Speech = Speech; + +export declare namespace Audio { + export { type AudioModel as AudioModel, type AudioResponseFormat as AudioResponseFormat }; + + export { + Transcriptions as Transcriptions, + type Transcription as Transcription, + type TranscriptionSegment as TranscriptionSegment, + type TranscriptionVerbose as TranscriptionVerbose, + type TranscriptionWord as TranscriptionWord, + type TranscriptionCreateResponse as TranscriptionCreateResponse, + type TranscriptionCreateParams as TranscriptionCreateParams, + }; + + export { + Translations as Translations, + type Translation as Translation, + type TranslationVerbose as TranslationVerbose, + type TranslationCreateResponse as TranslationCreateResponse, + type TranslationCreateParams as TranslationCreateParams, + }; + + export { Speech as Speech, type SpeechModel as SpeechModel, type SpeechCreateParams as SpeechCreateParams }; +} diff --git a/node_modules/openai/src/resources/audio/index.ts b/node_modules/openai/src/resources/audio/index.ts new file mode 100644 index 0000000..2bbe9e3 --- /dev/null +++ b/node_modules/openai/src/resources/audio/index.ts @@ -0,0 +1,20 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { Audio, type AudioModel, type AudioResponseFormat } from './audio'; +export { Speech, type SpeechModel, type SpeechCreateParams } from './speech'; +export { + Transcriptions, + type Transcription, + type TranscriptionSegment, + type TranscriptionVerbose, + type TranscriptionWord, + type TranscriptionCreateResponse, + type TranscriptionCreateParams, +} from './transcriptions'; +export { + Translations, + type Translation, + type TranslationVerbose, + type TranslationCreateResponse, + type TranslationCreateParams, +} from './translations'; diff --git a/node_modules/openai/src/resources/audio/speech.ts b/node_modules/openai/src/resources/audio/speech.ts new file mode 100644 index 0000000..1cda80f --- /dev/null +++ b/node_modules/openai/src/resources/audio/speech.ts @@ -0,0 +1,53 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import * as Core from '../../core'; +import { type Response } from '../../_shims/index'; + +export class Speech extends APIResource { + /** + * Generates audio from the input text. + */ + create(body: SpeechCreateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/audio/speech', { body, ...options, __binaryResponse: true }); + } +} + +export type SpeechModel = 'tts-1' | 'tts-1-hd'; + +export interface SpeechCreateParams { + /** + * The text to generate audio for. The maximum length is 4096 characters. + */ + input: string; + + /** + * One of the available [TTS models](https://platform.openai.com/docs/models#tts): + * `tts-1` or `tts-1-hd` + */ + model: (string & {}) | SpeechModel; + + /** + * The voice to use when generating the audio. Supported voices are `alloy`, + * `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are + * available in the + * [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). + */ + voice: 'alloy' | 'echo' | 'fable' | 'onyx' | 'nova' | 'shimmer'; + + /** + * The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, + * `wav`, and `pcm`. + */ + response_format?: 'mp3' | 'opus' | 'aac' | 'flac' | 'wav' | 'pcm'; + + /** + * The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is + * the default. + */ + speed?: number; +} + +export declare namespace Speech { + export { type SpeechModel as SpeechModel, type SpeechCreateParams as SpeechCreateParams }; +} diff --git a/node_modules/openai/src/resources/audio/transcriptions.ts b/node_modules/openai/src/resources/audio/transcriptions.ts new file mode 100644 index 0000000..0b6da46 --- /dev/null +++ b/node_modules/openai/src/resources/audio/transcriptions.ts @@ -0,0 +1,216 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import * as Core from '../../core'; +import * as AudioAPI from './audio'; + +export class Transcriptions extends APIResource { + /** + * Transcribes audio into the input language. + */ + create( + body: TranscriptionCreateParams<'json' | undefined>, + options?: Core.RequestOptions, + ): Core.APIPromise; + create( + body: TranscriptionCreateParams<'verbose_json'>, + options?: Core.RequestOptions, + ): Core.APIPromise; + create( + body: TranscriptionCreateParams<'srt' | 'vtt' | 'text'>, + options?: Core.RequestOptions, + ): Core.APIPromise; + create(body: TranscriptionCreateParams, options?: Core.RequestOptions): Core.APIPromise; + create( + body: TranscriptionCreateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post('/audio/transcriptions', Core.multipartFormRequestOptions({ body, ...options })); + } +} + +/** + * Represents a transcription response returned by model, based on the provided + * input. + */ +export interface Transcription { + /** + * The transcribed text. + */ + text: string; +} + +export interface TranscriptionSegment { + /** + * Unique identifier of the segment. + */ + id: number; + + /** + * Average logprob of the segment. If the value is lower than -1, consider the + * logprobs failed. + */ + avg_logprob: number; + + /** + * Compression ratio of the segment. If the value is greater than 2.4, consider the + * compression failed. + */ + compression_ratio: number; + + /** + * End time of the segment in seconds. + */ + end: number; + + /** + * Probability of no speech in the segment. If the value is higher than 1.0 and the + * `avg_logprob` is below -1, consider this segment silent. + */ + no_speech_prob: number; + + /** + * Seek offset of the segment. + */ + seek: number; + + /** + * Start time of the segment in seconds. + */ + start: number; + + /** + * Temperature parameter used for generating the segment. + */ + temperature: number; + + /** + * Text content of the segment. + */ + text: string; + + /** + * Array of token IDs for the text content. + */ + tokens: Array; +} + +/** + * Represents a verbose json transcription response returned by model, based on the + * provided input. + */ +export interface TranscriptionVerbose { + /** + * The duration of the input audio. + */ + duration: string; + + /** + * The language of the input audio. + */ + language: string; + + /** + * The transcribed text. + */ + text: string; + + /** + * Segments of the transcribed text and their corresponding details. + */ + segments?: Array; + + /** + * Extracted words and their corresponding timestamps. + */ + words?: Array; +} + +export interface TranscriptionWord { + /** + * End time of the word in seconds. + */ + end: number; + + /** + * Start time of the word in seconds. + */ + start: number; + + /** + * The text content of the word. + */ + word: string; +} + +/** + * Represents a transcription response returned by model, based on the provided + * input. + */ +export type TranscriptionCreateResponse = Transcription | TranscriptionVerbose; + +export interface TranscriptionCreateParams< + ResponseFormat extends AudioAPI.AudioResponseFormat | undefined = AudioAPI.AudioResponseFormat | undefined, +> { + /** + * The audio file object (not file name) to transcribe, in one of these formats: + * flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + */ + file: Core.Uploadable; + + /** + * ID of the model to use. Only `whisper-1` (which is powered by our open source + * Whisper V2 model) is currently available. + */ + model: (string & {}) | AudioAPI.AudioModel; + + /** + * The language of the input audio. Supplying the input language in + * [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) format will + * improve accuracy and latency. + */ + language?: string; + + /** + * An optional text to guide the model's style or continue a previous audio + * segment. The + * [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + * should match the audio language. + */ + prompt?: string; + + /** + * The format of the output, in one of these options: `json`, `text`, `srt`, + * `verbose_json`, or `vtt`. + */ + response_format?: ResponseFormat; + + /** + * The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + * output more random, while lower values like 0.2 will make it more focused and + * deterministic. If set to 0, the model will use + * [log probability](https://en.wikipedia.org/wiki/Log_probability) to + * automatically increase the temperature until certain thresholds are hit. + */ + temperature?: number; + + /** + * The timestamp granularities to populate for this transcription. + * `response_format` must be set `verbose_json` to use timestamp granularities. + * Either or both of these options are supported: `word`, or `segment`. Note: There + * is no additional latency for segment timestamps, but generating word timestamps + * incurs additional latency. + */ + timestamp_granularities?: Array<'word' | 'segment'>; +} + +export declare namespace Transcriptions { + export { + type Transcription as Transcription, + type TranscriptionSegment as TranscriptionSegment, + type TranscriptionVerbose as TranscriptionVerbose, + type TranscriptionWord as TranscriptionWord, + type TranscriptionCreateResponse as TranscriptionCreateResponse, + type TranscriptionCreateParams as TranscriptionCreateParams, + }; +} diff --git a/node_modules/openai/src/resources/audio/translations.ts b/node_modules/openai/src/resources/audio/translations.ts new file mode 100644 index 0000000..c6bf7c8 --- /dev/null +++ b/node_modules/openai/src/resources/audio/translations.ts @@ -0,0 +1,107 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import * as Core from '../../core'; +import * as AudioAPI from './audio'; +import * as TranscriptionsAPI from './transcriptions'; + +export class Translations extends APIResource { + /** + * Translates audio into English. + */ + create( + body: TranslationCreateParams<'json' | undefined>, + options?: Core.RequestOptions, + ): Core.APIPromise; + create( + body: TranslationCreateParams<'verbose_json'>, + options?: Core.RequestOptions, + ): Core.APIPromise; + create( + body: TranslationCreateParams<'text' | 'srt' | 'vtt'>, + options?: Core.RequestOptions, + ): Core.APIPromise; + create(body: TranslationCreateParams, options?: Core.RequestOptions): Core.APIPromise; + create( + body: TranslationCreateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post('/audio/translations', Core.multipartFormRequestOptions({ body, ...options })); + } +} + +export interface Translation { + text: string; +} + +export interface TranslationVerbose { + /** + * The duration of the input audio. + */ + duration: string; + + /** + * The language of the output translation (always `english`). + */ + language: string; + + /** + * The translated text. + */ + text: string; + + /** + * Segments of the translated text and their corresponding details. + */ + segments?: Array; +} + +export type TranslationCreateResponse = Translation | TranslationVerbose; + +export interface TranslationCreateParams< + ResponseFormat extends AudioAPI.AudioResponseFormat | undefined = AudioAPI.AudioResponseFormat | undefined, +> { + /** + * The audio file object (not file name) translate, in one of these formats: flac, + * mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + */ + file: Core.Uploadable; + + /** + * ID of the model to use. Only `whisper-1` (which is powered by our open source + * Whisper V2 model) is currently available. + */ + model: (string & {}) | AudioAPI.AudioModel; + + /** + * An optional text to guide the model's style or continue a previous audio + * segment. The + * [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + * should be in English. + */ + prompt?: string; + + /** + * The format of the output, in one of these options: `json`, `text`, `srt`, + * `verbose_json`, or `vtt`. + */ + response_format?: ResponseFormat; + + /** + * The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + * output more random, while lower values like 0.2 will make it more focused and + * deterministic. If set to 0, the model will use + * [log probability](https://en.wikipedia.org/wiki/Log_probability) to + * automatically increase the temperature until certain thresholds are hit. + */ + temperature?: number; +} + +export declare namespace Translations { + export { + type Translation as Translation, + type TranslationVerbose as TranslationVerbose, + type TranslationCreateResponse as TranslationCreateResponse, + type TranslationCreateParams as TranslationCreateParams, + }; +} diff --git a/node_modules/openai/src/resources/batches.ts b/node_modules/openai/src/resources/batches.ts new file mode 100644 index 0000000..ec5ca63 --- /dev/null +++ b/node_modules/openai/src/resources/batches.ts @@ -0,0 +1,258 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../resource'; +import { isRequestOptions } from '../core'; +import * as Core from '../core'; +import * as BatchesAPI from './batches'; +import { CursorPage, type CursorPageParams } from '../pagination'; + +export class Batches extends APIResource { + /** + * Creates and executes a batch from an uploaded file of requests + */ + create(body: BatchCreateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/batches', { body, ...options }); + } + + /** + * Retrieves a batch. + */ + retrieve(batchId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/batches/${batchId}`, options); + } + + /** + * List your organization's batches. + */ + list(query?: BatchListParams, options?: Core.RequestOptions): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + list( + query: BatchListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/batches', BatchesPage, { query, ...options }); + } + + /** + * Cancels an in-progress batch. The batch will be in status `cancelling` for up to + * 10 minutes, before changing to `cancelled`, where it will have partial results + * (if any) available in the output file. + */ + cancel(batchId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post(`/batches/${batchId}/cancel`, options); + } +} + +export class BatchesPage extends CursorPage {} + +export interface Batch { + id: string; + + /** + * The time frame within which the batch should be processed. + */ + completion_window: string; + + /** + * The Unix timestamp (in seconds) for when the batch was created. + */ + created_at: number; + + /** + * The OpenAI API endpoint used by the batch. + */ + endpoint: string; + + /** + * The ID of the input file for the batch. + */ + input_file_id: string; + + /** + * The object type, which is always `batch`. + */ + object: 'batch'; + + /** + * The current status of the batch. + */ + status: + | 'validating' + | 'failed' + | 'in_progress' + | 'finalizing' + | 'completed' + | 'expired' + | 'cancelling' + | 'cancelled'; + + /** + * The Unix timestamp (in seconds) for when the batch was cancelled. + */ + cancelled_at?: number; + + /** + * The Unix timestamp (in seconds) for when the batch started cancelling. + */ + cancelling_at?: number; + + /** + * The Unix timestamp (in seconds) for when the batch was completed. + */ + completed_at?: number; + + /** + * The ID of the file containing the outputs of requests with errors. + */ + error_file_id?: string; + + errors?: Batch.Errors; + + /** + * The Unix timestamp (in seconds) for when the batch expired. + */ + expired_at?: number; + + /** + * The Unix timestamp (in seconds) for when the batch will expire. + */ + expires_at?: number; + + /** + * The Unix timestamp (in seconds) for when the batch failed. + */ + failed_at?: number; + + /** + * The Unix timestamp (in seconds) for when the batch started finalizing. + */ + finalizing_at?: number; + + /** + * The Unix timestamp (in seconds) for when the batch started processing. + */ + in_progress_at?: number; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The ID of the file containing the outputs of successfully executed requests. + */ + output_file_id?: string; + + /** + * The request counts for different statuses within the batch. + */ + request_counts?: BatchRequestCounts; +} + +export namespace Batch { + export interface Errors { + data?: Array; + + /** + * The object type, which is always `list`. + */ + object?: string; + } +} + +export interface BatchError { + /** + * An error code identifying the error type. + */ + code?: string; + + /** + * The line number of the input file where the error occurred, if applicable. + */ + line?: number | null; + + /** + * A human-readable message providing more details about the error. + */ + message?: string; + + /** + * The name of the parameter that caused the error, if applicable. + */ + param?: string | null; +} + +/** + * The request counts for different statuses within the batch. + */ +export interface BatchRequestCounts { + /** + * Number of requests that have been completed successfully. + */ + completed: number; + + /** + * Number of requests that have failed. + */ + failed: number; + + /** + * Total number of requests in the batch. + */ + total: number; +} + +export interface BatchCreateParams { + /** + * The time frame within which the batch should be processed. Currently only `24h` + * is supported. + */ + completion_window: '24h'; + + /** + * The endpoint to be used for all requests in the batch. Currently + * `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. + * Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 + * embedding inputs across all requests in the batch. + */ + endpoint: '/v1/chat/completions' | '/v1/embeddings' | '/v1/completions'; + + /** + * The ID of an uploaded file that contains requests for the new batch. + * + * See [upload file](https://platform.openai.com/docs/api-reference/files/create) + * for how to upload a file. + * + * Your input file must be formatted as a + * [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), + * and must be uploaded with the purpose `batch`. The file can contain up to 50,000 + * requests, and can be up to 200 MB in size. + */ + input_file_id: string; + + /** + * Optional custom metadata for the batch. + */ + metadata?: Record | null; +} + +export interface BatchListParams extends CursorPageParams {} + +Batches.BatchesPage = BatchesPage; + +export declare namespace Batches { + export { + type Batch as Batch, + type BatchError as BatchError, + type BatchRequestCounts as BatchRequestCounts, + BatchesPage as BatchesPage, + type BatchCreateParams as BatchCreateParams, + type BatchListParams as BatchListParams, + }; +} diff --git a/node_modules/openai/src/resources/beta/assistants.ts b/node_modules/openai/src/resources/beta/assistants.ts new file mode 100644 index 0000000..0e657b1 --- /dev/null +++ b/node_modules/openai/src/resources/beta/assistants.ts @@ -0,0 +1,1418 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import { isRequestOptions } from '../../core'; +import * as Core from '../../core'; +import * as Shared from '../shared'; +import * as ChatAPI from '../chat/chat'; +import * as MessagesAPI from './threads/messages'; +import * as ThreadsAPI from './threads/threads'; +import * as VectorStoresAPI from './vector-stores/vector-stores'; +import * as RunsAPI from './threads/runs/runs'; +import * as StepsAPI from './threads/runs/steps'; +import { CursorPage, type CursorPageParams } from '../../pagination'; + +export class Assistants extends APIResource { + /** + * Create an assistant with a model and instructions. + */ + create(body: AssistantCreateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/assistants', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Retrieves an assistant. + */ + retrieve(assistantId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/assistants/${assistantId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Modifies an assistant. + */ + update( + assistantId: string, + body: AssistantUpdateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post(`/assistants/${assistantId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Returns a list of assistants. + */ + list( + query?: AssistantListParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + list( + query: AssistantListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/assistants', AssistantsPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Delete an assistant. + */ + del(assistantId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.delete(`/assistants/${assistantId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} + +export class AssistantsPage extends CursorPage {} + +/** + * Represents an `assistant` that can call the model and use tools. + */ +export interface Assistant { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + + /** + * The Unix timestamp (in seconds) for when the assistant was created. + */ + created_at: number; + + /** + * The description of the assistant. The maximum length is 512 characters. + */ + description: string | null; + + /** + * The system instructions that the assistant uses. The maximum length is 256,000 + * characters. + */ + instructions: string | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model: string; + + /** + * The name of the assistant. The maximum length is 256 characters. + */ + name: string | null; + + /** + * The object type, which is always `assistant`. + */ + object: 'assistant'; + + /** + * A list of tool enabled on the assistant. There can be a maximum of 128 tools per + * assistant. Tools can be of types `code_interpreter`, `file_search`, or + * `function`. + */ + tools: Array; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: Assistant.ToolResources | null; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; +} + +export namespace Assistant { + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter`` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } +} + +export interface AssistantDeleted { + id: string; + + deleted: boolean; + + object: 'assistant.deleted'; +} + +/** + * Represents an event emitted when streaming a Run. + * + * Each event in a server-sent events stream has an `event` and `data` property: + * + * ``` + * event: thread.created + * data: {"id": "thread_123", "object": "thread", ...} + * ``` + * + * We emit events whenever a new object is created, transitions to a new state, or + * is being streamed in parts (deltas). For example, we emit `thread.run.created` + * when a new run is created, `thread.run.completed` when a run completes, and so + * on. When an Assistant chooses to create a message during a run, we emit a + * `thread.message.created event`, a `thread.message.in_progress` event, many + * `thread.message.delta` events, and finally a `thread.message.completed` event. + * + * We may add additional events over time, so we recommend handling unknown events + * gracefully in your code. See the + * [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) + * to learn how to integrate the Assistants API with streaming. + */ +export type AssistantStreamEvent = + | AssistantStreamEvent.ThreadCreated + | AssistantStreamEvent.ThreadRunCreated + | AssistantStreamEvent.ThreadRunQueued + | AssistantStreamEvent.ThreadRunInProgress + | AssistantStreamEvent.ThreadRunRequiresAction + | AssistantStreamEvent.ThreadRunCompleted + | AssistantStreamEvent.ThreadRunIncomplete + | AssistantStreamEvent.ThreadRunFailed + | AssistantStreamEvent.ThreadRunCancelling + | AssistantStreamEvent.ThreadRunCancelled + | AssistantStreamEvent.ThreadRunExpired + | AssistantStreamEvent.ThreadRunStepCreated + | AssistantStreamEvent.ThreadRunStepInProgress + | AssistantStreamEvent.ThreadRunStepDelta + | AssistantStreamEvent.ThreadRunStepCompleted + | AssistantStreamEvent.ThreadRunStepFailed + | AssistantStreamEvent.ThreadRunStepCancelled + | AssistantStreamEvent.ThreadRunStepExpired + | AssistantStreamEvent.ThreadMessageCreated + | AssistantStreamEvent.ThreadMessageInProgress + | AssistantStreamEvent.ThreadMessageDelta + | AssistantStreamEvent.ThreadMessageCompleted + | AssistantStreamEvent.ThreadMessageIncomplete + | AssistantStreamEvent.ErrorEvent; + +export namespace AssistantStreamEvent { + /** + * Occurs when a new + * [thread](https://platform.openai.com/docs/api-reference/threads/object) is + * created. + */ + export interface ThreadCreated { + /** + * Represents a thread that contains + * [messages](https://platform.openai.com/docs/api-reference/messages). + */ + data: ThreadsAPI.Thread; + + event: 'thread.created'; + + /** + * Whether to enable input audio transcription. + */ + enabled?: boolean; + } + + /** + * Occurs when a new + * [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + */ + export interface ThreadRunCreated { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.created'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `queued` status. + */ + export interface ThreadRunQueued { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.queued'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to an `in_progress` status. + */ + export interface ThreadRunInProgress { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.in_progress'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `requires_action` status. + */ + export interface ThreadRunRequiresAction { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.requires_action'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * is completed. + */ + export interface ThreadRunCompleted { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.completed'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * ends with status `incomplete`. + */ + export interface ThreadRunIncomplete { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.incomplete'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * fails. + */ + export interface ThreadRunFailed { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.failed'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `cancelling` status. + */ + export interface ThreadRunCancelling { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.cancelling'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * is cancelled. + */ + export interface ThreadRunCancelled { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.cancelled'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * expires. + */ + export interface ThreadRunExpired { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.expired'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is created. + */ + export interface ThreadRunStepCreated { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.created'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * moves to an `in_progress` state. + */ + export interface ThreadRunStepInProgress { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.in_progress'; + } + + /** + * Occurs when parts of a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * are being streamed. + */ + export interface ThreadRunStepDelta { + /** + * Represents a run step delta i.e. any changed fields on a run step during + * streaming. + */ + data: StepsAPI.RunStepDeltaEvent; + + event: 'thread.run.step.delta'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is completed. + */ + export interface ThreadRunStepCompleted { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.completed'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * fails. + */ + export interface ThreadRunStepFailed { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.failed'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is cancelled. + */ + export interface ThreadRunStepCancelled { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.cancelled'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * expires. + */ + export interface ThreadRunStepExpired { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.expired'; + } + + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * created. + */ + export interface ThreadMessageCreated { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + + event: 'thread.message.created'; + } + + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) moves + * to an `in_progress` state. + */ + export interface ThreadMessageInProgress { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + + event: 'thread.message.in_progress'; + } + + /** + * Occurs when parts of a + * [Message](https://platform.openai.com/docs/api-reference/messages/object) are + * being streamed. + */ + export interface ThreadMessageDelta { + /** + * Represents a message delta i.e. any changed fields on a message during + * streaming. + */ + data: MessagesAPI.MessageDeltaEvent; + + event: 'thread.message.delta'; + } + + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * completed. + */ + export interface ThreadMessageCompleted { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + + event: 'thread.message.completed'; + } + + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) ends + * before it is completed. + */ + export interface ThreadMessageIncomplete { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + + event: 'thread.message.incomplete'; + } + + /** + * Occurs when an + * [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. + * This can happen due to an internal server error or a timeout. + */ + export interface ErrorEvent { + data: Shared.ErrorObject; + + event: 'error'; + } +} + +export type AssistantTool = CodeInterpreterTool | FileSearchTool | FunctionTool; + +export interface CodeInterpreterTool { + /** + * The type of tool being defined: `code_interpreter` + */ + type: 'code_interpreter'; +} + +export interface FileSearchTool { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + + /** + * Overrides for the file search tool. + */ + file_search?: FileSearchTool.FileSearch; +} + +export namespace FileSearchTool { + /** + * Overrides for the file search tool. + */ + export interface FileSearch { + /** + * The maximum number of results the file search tool should output. The default is + * 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between + * 1 and 50 inclusive. + * + * Note that the file search tool may output fewer than `max_num_results` results. + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + max_num_results?: number; + + /** + * The ranking options for the file search. If not specified, the file search tool + * will use the `auto` ranker and a score_threshold of 0. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + ranking_options?: FileSearch.RankingOptions; + } + + export namespace FileSearch { + /** + * The ranking options for the file search. If not specified, the file search tool + * will use the `auto` ranker and a score_threshold of 0. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + export interface RankingOptions { + /** + * The score threshold for the file search. All values must be a floating point + * number between 0 and 1. + */ + score_threshold: number; + + /** + * The ranker to use for the file search. If not specified will use the `auto` + * ranker. + */ + ranker?: 'auto' | 'default_2024_08_21'; + } + } +} + +export interface FunctionTool { + function: Shared.FunctionDefinition; + + /** + * The type of tool being defined: `function` + */ + type: 'function'; +} + +/** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * created. + */ +export type MessageStreamEvent = + | MessageStreamEvent.ThreadMessageCreated + | MessageStreamEvent.ThreadMessageInProgress + | MessageStreamEvent.ThreadMessageDelta + | MessageStreamEvent.ThreadMessageCompleted + | MessageStreamEvent.ThreadMessageIncomplete; + +export namespace MessageStreamEvent { + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * created. + */ + export interface ThreadMessageCreated { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + + event: 'thread.message.created'; + } + + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) moves + * to an `in_progress` state. + */ + export interface ThreadMessageInProgress { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + + event: 'thread.message.in_progress'; + } + + /** + * Occurs when parts of a + * [Message](https://platform.openai.com/docs/api-reference/messages/object) are + * being streamed. + */ + export interface ThreadMessageDelta { + /** + * Represents a message delta i.e. any changed fields on a message during + * streaming. + */ + data: MessagesAPI.MessageDeltaEvent; + + event: 'thread.message.delta'; + } + + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) is + * completed. + */ + export interface ThreadMessageCompleted { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + + event: 'thread.message.completed'; + } + + /** + * Occurs when a + * [message](https://platform.openai.com/docs/api-reference/messages/object) ends + * before it is completed. + */ + export interface ThreadMessageIncomplete { + /** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: MessagesAPI.Message; + + event: 'thread.message.incomplete'; + } +} + +/** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is created. + */ +export type RunStepStreamEvent = + | RunStepStreamEvent.ThreadRunStepCreated + | RunStepStreamEvent.ThreadRunStepInProgress + | RunStepStreamEvent.ThreadRunStepDelta + | RunStepStreamEvent.ThreadRunStepCompleted + | RunStepStreamEvent.ThreadRunStepFailed + | RunStepStreamEvent.ThreadRunStepCancelled + | RunStepStreamEvent.ThreadRunStepExpired; + +export namespace RunStepStreamEvent { + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is created. + */ + export interface ThreadRunStepCreated { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.created'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * moves to an `in_progress` state. + */ + export interface ThreadRunStepInProgress { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.in_progress'; + } + + /** + * Occurs when parts of a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * are being streamed. + */ + export interface ThreadRunStepDelta { + /** + * Represents a run step delta i.e. any changed fields on a run step during + * streaming. + */ + data: StepsAPI.RunStepDeltaEvent; + + event: 'thread.run.step.delta'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is completed. + */ + export interface ThreadRunStepCompleted { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.completed'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * fails. + */ + export interface ThreadRunStepFailed { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.failed'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * is cancelled. + */ + export interface ThreadRunStepCancelled { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.cancelled'; + } + + /** + * Occurs when a + * [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + * expires. + */ + export interface ThreadRunStepExpired { + /** + * Represents a step in execution of a run. + */ + data: StepsAPI.RunStep; + + event: 'thread.run.step.expired'; + } +} + +/** + * Occurs when a new + * [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + */ +export type RunStreamEvent = + | RunStreamEvent.ThreadRunCreated + | RunStreamEvent.ThreadRunQueued + | RunStreamEvent.ThreadRunInProgress + | RunStreamEvent.ThreadRunRequiresAction + | RunStreamEvent.ThreadRunCompleted + | RunStreamEvent.ThreadRunIncomplete + | RunStreamEvent.ThreadRunFailed + | RunStreamEvent.ThreadRunCancelling + | RunStreamEvent.ThreadRunCancelled + | RunStreamEvent.ThreadRunExpired; + +export namespace RunStreamEvent { + /** + * Occurs when a new + * [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + */ + export interface ThreadRunCreated { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.created'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `queued` status. + */ + export interface ThreadRunQueued { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.queued'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to an `in_progress` status. + */ + export interface ThreadRunInProgress { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.in_progress'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `requires_action` status. + */ + export interface ThreadRunRequiresAction { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.requires_action'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * is completed. + */ + export interface ThreadRunCompleted { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.completed'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * ends with status `incomplete`. + */ + export interface ThreadRunIncomplete { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.incomplete'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * fails. + */ + export interface ThreadRunFailed { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.failed'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * moves to a `cancelling` status. + */ + export interface ThreadRunCancelling { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.cancelling'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * is cancelled. + */ + export interface ThreadRunCancelled { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.cancelled'; + } + + /** + * Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + * expires. + */ + export interface ThreadRunExpired { + /** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ + data: RunsAPI.Run; + + event: 'thread.run.expired'; + } +} + +/** + * Occurs when a new + * [thread](https://platform.openai.com/docs/api-reference/threads/object) is + * created. + */ +export interface ThreadStreamEvent { + /** + * Represents a thread that contains + * [messages](https://platform.openai.com/docs/api-reference/messages). + */ + data: ThreadsAPI.Thread; + + event: 'thread.created'; + + /** + * Whether to enable input audio transcription. + */ + enabled?: boolean; +} + +export interface AssistantCreateParams { + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model: (string & {}) | ChatAPI.ChatModel; + + /** + * The description of the assistant. The maximum length is 512 characters. + */ + description?: string | null; + + /** + * The system instructions that the assistant uses. The maximum length is 256,000 + * characters. + */ + instructions?: string | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The name of the assistant. The maximum length is 256 characters. + */ + name?: string | null; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: AssistantCreateParams.ToolResources | null; + + /** + * A list of tool enabled on the assistant. There can be a maximum of 128 tools per + * assistant. Tools can be of types `code_interpreter`, `file_search`, or + * `function`. + */ + tools?: Array; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; +} + +export namespace AssistantCreateParams { + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this assistant. There can be a maximum of 1 + * vector store attached to the assistant. + */ + vector_stores?: Array; + } + + export namespace FileSearch { + export interface VectorStore { + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; + + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } +} + +export interface AssistantUpdateParams { + /** + * The description of the assistant. The maximum length is 512 characters. + */ + description?: string | null; + + /** + * The system instructions that the assistant uses. The maximum length is 256,000 + * characters. + */ + instructions?: string | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model?: string; + + /** + * The name of the assistant. The maximum length is 256 characters. + */ + name?: string | null; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: AssistantUpdateParams.ToolResources | null; + + /** + * A list of tool enabled on the assistant. There can be a maximum of 128 tools per + * assistant. Tools can be of types `code_interpreter`, `file_search`, or + * `function`. + */ + tools?: Array; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; +} + +export namespace AssistantUpdateParams { + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * Overrides the list of + * [file](https://platform.openai.com/docs/api-reference/files) IDs made available + * to the `code_interpreter` tool. There can be a maximum of 20 files associated + * with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * Overrides the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } +} + +export interface AssistantListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} + +Assistants.AssistantsPage = AssistantsPage; + +export declare namespace Assistants { + export { + type Assistant as Assistant, + type AssistantDeleted as AssistantDeleted, + type AssistantStreamEvent as AssistantStreamEvent, + type AssistantTool as AssistantTool, + type CodeInterpreterTool as CodeInterpreterTool, + type FileSearchTool as FileSearchTool, + type FunctionTool as FunctionTool, + type MessageStreamEvent as MessageStreamEvent, + type RunStepStreamEvent as RunStepStreamEvent, + type RunStreamEvent as RunStreamEvent, + type ThreadStreamEvent as ThreadStreamEvent, + AssistantsPage as AssistantsPage, + type AssistantCreateParams as AssistantCreateParams, + type AssistantUpdateParams as AssistantUpdateParams, + type AssistantListParams as AssistantListParams, + }; +} diff --git a/node_modules/openai/src/resources/beta/beta.ts b/node_modules/openai/src/resources/beta/beta.ts new file mode 100644 index 0000000..b904abe --- /dev/null +++ b/node_modules/openai/src/resources/beta/beta.ts @@ -0,0 +1,128 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import * as AssistantsAPI from './assistants'; +import * as ChatAPI from './chat/chat'; +import { + Assistant, + AssistantCreateParams, + AssistantDeleted, + AssistantListParams, + AssistantStreamEvent, + AssistantTool, + AssistantUpdateParams, + Assistants, + AssistantsPage, + CodeInterpreterTool, + FileSearchTool, + FunctionTool, + MessageStreamEvent, + RunStepStreamEvent, + RunStreamEvent, + ThreadStreamEvent, +} from './assistants'; +import * as ThreadsAPI from './threads/threads'; +import { + AssistantResponseFormatOption, + AssistantToolChoice, + AssistantToolChoiceFunction, + AssistantToolChoiceOption, + Thread, + ThreadCreateAndRunParams, + ThreadCreateAndRunParamsNonStreaming, + ThreadCreateAndRunParamsStreaming, + ThreadCreateAndRunPollParams, + ThreadCreateAndRunStreamParams, + ThreadCreateParams, + ThreadDeleted, + ThreadUpdateParams, + Threads, +} from './threads/threads'; +import * as VectorStoresAPI from './vector-stores/vector-stores'; +import { + AutoFileChunkingStrategyParam, + FileChunkingStrategy, + FileChunkingStrategyParam, + OtherFileChunkingStrategyObject, + StaticFileChunkingStrategy, + StaticFileChunkingStrategyObject, + StaticFileChunkingStrategyParam, + VectorStore, + VectorStoreCreateParams, + VectorStoreDeleted, + VectorStoreListParams, + VectorStoreUpdateParams, + VectorStores, + VectorStoresPage, +} from './vector-stores/vector-stores'; +import { Chat } from './chat/chat'; + +export class Beta extends APIResource { + vectorStores: VectorStoresAPI.VectorStores = new VectorStoresAPI.VectorStores(this._client); + chat: ChatAPI.Chat = new ChatAPI.Chat(this._client); + assistants: AssistantsAPI.Assistants = new AssistantsAPI.Assistants(this._client); + threads: ThreadsAPI.Threads = new ThreadsAPI.Threads(this._client); +} + +Beta.VectorStores = VectorStores; +Beta.VectorStoresPage = VectorStoresPage; +Beta.Assistants = Assistants; +Beta.AssistantsPage = AssistantsPage; +Beta.Threads = Threads; + +export declare namespace Beta { + export { + VectorStores as VectorStores, + type AutoFileChunkingStrategyParam as AutoFileChunkingStrategyParam, + type FileChunkingStrategy as FileChunkingStrategy, + type FileChunkingStrategyParam as FileChunkingStrategyParam, + type OtherFileChunkingStrategyObject as OtherFileChunkingStrategyObject, + type StaticFileChunkingStrategy as StaticFileChunkingStrategy, + type StaticFileChunkingStrategyObject as StaticFileChunkingStrategyObject, + type StaticFileChunkingStrategyParam as StaticFileChunkingStrategyParam, + type VectorStore as VectorStore, + type VectorStoreDeleted as VectorStoreDeleted, + VectorStoresPage as VectorStoresPage, + type VectorStoreCreateParams as VectorStoreCreateParams, + type VectorStoreUpdateParams as VectorStoreUpdateParams, + type VectorStoreListParams as VectorStoreListParams, + }; + + export { Chat }; + + export { + Assistants as Assistants, + type Assistant as Assistant, + type AssistantDeleted as AssistantDeleted, + type AssistantStreamEvent as AssistantStreamEvent, + type AssistantTool as AssistantTool, + type CodeInterpreterTool as CodeInterpreterTool, + type FileSearchTool as FileSearchTool, + type FunctionTool as FunctionTool, + type MessageStreamEvent as MessageStreamEvent, + type RunStepStreamEvent as RunStepStreamEvent, + type RunStreamEvent as RunStreamEvent, + type ThreadStreamEvent as ThreadStreamEvent, + AssistantsPage as AssistantsPage, + type AssistantCreateParams as AssistantCreateParams, + type AssistantUpdateParams as AssistantUpdateParams, + type AssistantListParams as AssistantListParams, + }; + + export { + Threads as Threads, + type AssistantResponseFormatOption as AssistantResponseFormatOption, + type AssistantToolChoice as AssistantToolChoice, + type AssistantToolChoiceFunction as AssistantToolChoiceFunction, + type AssistantToolChoiceOption as AssistantToolChoiceOption, + type Thread as Thread, + type ThreadDeleted as ThreadDeleted, + type ThreadCreateParams as ThreadCreateParams, + type ThreadUpdateParams as ThreadUpdateParams, + type ThreadCreateAndRunParams as ThreadCreateAndRunParams, + type ThreadCreateAndRunParamsNonStreaming as ThreadCreateAndRunParamsNonStreaming, + type ThreadCreateAndRunParamsStreaming as ThreadCreateAndRunParamsStreaming, + type ThreadCreateAndRunPollParams, + type ThreadCreateAndRunStreamParams, + }; +} diff --git a/node_modules/openai/src/resources/beta/chat/chat.ts b/node_modules/openai/src/resources/beta/chat/chat.ts new file mode 100644 index 0000000..110ae46 --- /dev/null +++ b/node_modules/openai/src/resources/beta/chat/chat.ts @@ -0,0 +1,12 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../resource'; +import * as CompletionsAPI from './completions'; + +export class Chat extends APIResource { + completions: CompletionsAPI.Completions = new CompletionsAPI.Completions(this._client); +} + +export namespace Chat { + export import Completions = CompletionsAPI.Completions; +} diff --git a/node_modules/openai/src/resources/beta/chat/completions.ts b/node_modules/openai/src/resources/beta/chat/completions.ts new file mode 100644 index 0000000..c9360a9 --- /dev/null +++ b/node_modules/openai/src/resources/beta/chat/completions.ts @@ -0,0 +1,161 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import * as Core from '../../../core'; +import { APIResource } from '../../../resource'; +import { ChatCompletionRunner, ChatCompletionFunctionRunnerParams } from '../../../lib/ChatCompletionRunner'; +import { + ChatCompletionStreamingRunner, + ChatCompletionStreamingFunctionRunnerParams, +} from '../../../lib/ChatCompletionStreamingRunner'; +import { BaseFunctionsArgs } from '../../../lib/RunnableFunction'; +import { RunnerOptions } from '../../../lib/AbstractChatCompletionRunner'; +import { ChatCompletionToolRunnerParams } from '../../../lib/ChatCompletionRunner'; +import { ChatCompletionStreamingToolRunnerParams } from '../../../lib/ChatCompletionStreamingRunner'; +import { ChatCompletionStream, type ChatCompletionStreamParams } from '../../../lib/ChatCompletionStream'; +import { + ChatCompletion, + ChatCompletionCreateParamsNonStreaming, + ChatCompletionMessage, + ChatCompletionMessageToolCall, +} from '../../chat/completions'; +import { ExtractParsedContentFromParams, parseChatCompletion, validateInputTools } from '../../../lib/parser'; + +export { + ChatCompletionStreamingRunner, + type ChatCompletionStreamingFunctionRunnerParams, +} from '../../../lib/ChatCompletionStreamingRunner'; +export { + type RunnableFunction, + type RunnableFunctions, + type RunnableFunctionWithParse, + type RunnableFunctionWithoutParse, + ParsingFunction, + ParsingToolFunction, +} from '../../../lib/RunnableFunction'; +export { type ChatCompletionToolRunnerParams } from '../../../lib/ChatCompletionRunner'; +export { type ChatCompletionStreamingToolRunnerParams } from '../../../lib/ChatCompletionStreamingRunner'; +export { ChatCompletionStream, type ChatCompletionStreamParams } from '../../../lib/ChatCompletionStream'; +export { + ChatCompletionRunner, + type ChatCompletionFunctionRunnerParams, +} from '../../../lib/ChatCompletionRunner'; + +export interface ParsedFunction extends ChatCompletionMessageToolCall.Function { + parsed_arguments?: unknown; +} + +export interface ParsedFunctionToolCall extends ChatCompletionMessageToolCall { + function: ParsedFunction; +} + +export interface ParsedChatCompletionMessage extends ChatCompletionMessage { + parsed: ParsedT | null; + tool_calls: Array; +} + +export interface ParsedChoice extends ChatCompletion.Choice { + message: ParsedChatCompletionMessage; +} + +export interface ParsedChatCompletion extends ChatCompletion { + choices: Array>; +} + +export type ChatCompletionParseParams = ChatCompletionCreateParamsNonStreaming; + +export class Completions extends APIResource { + parse>( + body: Params, + options?: Core.RequestOptions, + ): Core.APIPromise> { + validateInputTools(body.tools); + + return this._client.chat.completions + .create(body, { + ...options, + headers: { + ...options?.headers, + 'X-Stainless-Helper-Method': 'beta.chat.completions.parse', + }, + }) + ._thenUnwrap((completion) => parseChatCompletion(completion, body)); + } + + /** + * @deprecated - use `runTools` instead. + */ + runFunctions( + body: ChatCompletionFunctionRunnerParams, + options?: Core.RequestOptions, + ): ChatCompletionRunner; + runFunctions( + body: ChatCompletionStreamingFunctionRunnerParams, + options?: Core.RequestOptions, + ): ChatCompletionStreamingRunner; + runFunctions( + body: + | ChatCompletionFunctionRunnerParams + | ChatCompletionStreamingFunctionRunnerParams, + options?: Core.RequestOptions, + ): ChatCompletionRunner | ChatCompletionStreamingRunner { + if (body.stream) { + return ChatCompletionStreamingRunner.runFunctions( + this._client, + body as ChatCompletionStreamingFunctionRunnerParams, + options, + ); + } + return ChatCompletionRunner.runFunctions( + this._client, + body as ChatCompletionFunctionRunnerParams, + options, + ); + } + + /** + * A convenience helper for using tool calls with the /chat/completions endpoint + * which automatically calls the JavaScript functions you provide and sends their + * results back to the /chat/completions endpoint, looping as long as the model + * requests function calls. + * + * For more details and examples, see + * [the docs](https://github.com/openai/openai-node#automated-function-calls) + */ + runTools< + Params extends ChatCompletionToolRunnerParams, + ParsedT = ExtractParsedContentFromParams, + >(body: Params, options?: RunnerOptions): ChatCompletionRunner; + + runTools< + Params extends ChatCompletionStreamingToolRunnerParams, + ParsedT = ExtractParsedContentFromParams, + >(body: Params, options?: RunnerOptions): ChatCompletionStreamingRunner; + + runTools< + Params extends ChatCompletionToolRunnerParams | ChatCompletionStreamingToolRunnerParams, + ParsedT = ExtractParsedContentFromParams, + >( + body: Params, + options?: RunnerOptions, + ): ChatCompletionRunner | ChatCompletionStreamingRunner { + if (body.stream) { + return ChatCompletionStreamingRunner.runTools( + this._client, + body as ChatCompletionStreamingToolRunnerParams, + options, + ); + } + + return ChatCompletionRunner.runTools(this._client, body as ChatCompletionToolRunnerParams, options); + } + + /** + * Creates a chat completion stream + */ + stream>( + body: Params, + options?: Core.RequestOptions, + ): ChatCompletionStream { + return ChatCompletionStream.createChatCompletion(this._client, body, options); + } +} diff --git a/node_modules/openai/src/resources/beta/chat/index.ts b/node_modules/openai/src/resources/beta/chat/index.ts new file mode 100644 index 0000000..23b1b8f --- /dev/null +++ b/node_modules/openai/src/resources/beta/chat/index.ts @@ -0,0 +1,4 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { Chat } from './chat'; +export { Completions } from './completions'; diff --git a/node_modules/openai/src/resources/beta/index.ts b/node_modules/openai/src/resources/beta/index.ts new file mode 100644 index 0000000..d711128 --- /dev/null +++ b/node_modules/openai/src/resources/beta/index.ts @@ -0,0 +1,54 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { + AssistantsPage, + Assistants, + type Assistant, + type AssistantDeleted, + type AssistantStreamEvent, + type AssistantTool, + type CodeInterpreterTool, + type FileSearchTool, + type FunctionTool, + type MessageStreamEvent, + type RunStepStreamEvent, + type RunStreamEvent, + type ThreadStreamEvent, + type AssistantCreateParams, + type AssistantUpdateParams, + type AssistantListParams, +} from './assistants'; +export { Beta } from './beta'; +export { Chat } from './chat/index'; +export { + Threads, + type AssistantResponseFormatOption, + type AssistantToolChoice, + type AssistantToolChoiceFunction, + type AssistantToolChoiceOption, + type Thread, + type ThreadDeleted, + type ThreadCreateParams, + type ThreadUpdateParams, + type ThreadCreateAndRunParams, + type ThreadCreateAndRunParamsNonStreaming, + type ThreadCreateAndRunParamsStreaming, + type ThreadCreateAndRunPollParams, + type ThreadCreateAndRunStreamParams, +} from './threads/index'; +export { + VectorStoresPage, + VectorStores, + type AutoFileChunkingStrategyParam, + type FileChunkingStrategy, + type FileChunkingStrategyParam, + type OtherFileChunkingStrategyObject, + type StaticFileChunkingStrategy, + type StaticFileChunkingStrategyObject, + type StaticFileChunkingStrategyParam, + type VectorStore, + type VectorStoreDeleted, + type VectorStoreCreateParams, + type VectorStoreUpdateParams, + type VectorStoreListParams, +} from './vector-stores/index'; diff --git a/node_modules/openai/src/resources/beta/threads/index.ts b/node_modules/openai/src/resources/beta/threads/index.ts new file mode 100644 index 0000000..f67a1ed --- /dev/null +++ b/node_modules/openai/src/resources/beta/threads/index.ts @@ -0,0 +1,73 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { + MessagesPage, + Messages, + type Annotation, + type AnnotationDelta, + type FileCitationAnnotation, + type FileCitationDeltaAnnotation, + type FilePathAnnotation, + type FilePathDeltaAnnotation, + type ImageFile, + type ImageFileContentBlock, + type ImageFileDelta, + type ImageFileDeltaBlock, + type ImageURL, + type ImageURLContentBlock, + type ImageURLDelta, + type ImageURLDeltaBlock, + type Message, + type MessageContent, + type MessageContentDelta, + type MessageContentPartParam, + type MessageDeleted, + type MessageDelta, + type MessageDeltaEvent, + type RefusalContentBlock, + type RefusalDeltaBlock, + type Text, + type TextContentBlock, + type TextContentBlockParam, + type TextDelta, + type TextDeltaBlock, + type MessageCreateParams, + type MessageUpdateParams, + type MessageListParams, +} from './messages'; +export { + RunsPage, + Runs, + type RequiredActionFunctionToolCall, + type Run, + type RunStatus, + type RunCreateParams, + type RunCreateParamsNonStreaming, + type RunCreateParamsStreaming, + type RunUpdateParams, + type RunListParams, + type RunSubmitToolOutputsParams, + type RunSubmitToolOutputsParamsNonStreaming, + type RunSubmitToolOutputsParamsStreaming, + type RunCreateAndPollParams, + type RunCreateAndStreamParams, + type RunStreamParams, + type RunSubmitToolOutputsAndPollParams, + type RunSubmitToolOutputsStreamParams, +} from './runs/index'; +export { + Threads, + type AssistantResponseFormatOption, + type AssistantToolChoice, + type AssistantToolChoiceFunction, + type AssistantToolChoiceOption, + type Thread, + type ThreadDeleted, + type ThreadCreateParams, + type ThreadUpdateParams, + type ThreadCreateAndRunParams, + type ThreadCreateAndRunParamsNonStreaming, + type ThreadCreateAndRunParamsStreaming, + type ThreadCreateAndRunPollParams, + type ThreadCreateAndRunStreamParams, +} from './threads'; diff --git a/node_modules/openai/src/resources/beta/threads/messages.ts b/node_modules/openai/src/resources/beta/threads/messages.ts new file mode 100644 index 0000000..8124f56 --- /dev/null +++ b/node_modules/openai/src/resources/beta/threads/messages.ts @@ -0,0 +1,761 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../resource'; +import { isRequestOptions } from '../../../core'; +import * as Core from '../../../core'; +import * as AssistantsAPI from '../assistants'; +import { CursorPage, type CursorPageParams } from '../../../pagination'; + +export class Messages extends APIResource { + /** + * Create a message. + */ + create( + threadId: string, + body: MessageCreateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post(`/threads/${threadId}/messages`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Retrieve a message. + */ + retrieve(threadId: string, messageId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/threads/${threadId}/messages/${messageId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Modifies a message. + */ + update( + threadId: string, + messageId: string, + body: MessageUpdateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post(`/threads/${threadId}/messages/${messageId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Returns a list of messages for a given thread. + */ + list( + threadId: string, + query?: MessageListParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + list(threadId: string, options?: Core.RequestOptions): Core.PagePromise; + list( + threadId: string, + query: MessageListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list(threadId, {}, query); + } + return this._client.getAPIList(`/threads/${threadId}/messages`, MessagesPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Deletes a message. + */ + del(threadId: string, messageId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.delete(`/threads/${threadId}/messages/${messageId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} + +export class MessagesPage extends CursorPage {} + +/** + * A citation within the message that points to a specific quote from a specific + * File associated with the assistant or the message. Generated when the assistant + * uses the "file_search" tool to search files. + */ +export type Annotation = FileCitationAnnotation | FilePathAnnotation; + +/** + * A citation within the message that points to a specific quote from a specific + * File associated with the assistant or the message. Generated when the assistant + * uses the "file_search" tool to search files. + */ +export type AnnotationDelta = FileCitationDeltaAnnotation | FilePathDeltaAnnotation; + +/** + * A citation within the message that points to a specific quote from a specific + * File associated with the assistant or the message. Generated when the assistant + * uses the "file_search" tool to search files. + */ +export interface FileCitationAnnotation { + end_index: number; + + file_citation: FileCitationAnnotation.FileCitation; + + start_index: number; + + /** + * The text in the message content that needs to be replaced. + */ + text: string; + + /** + * Always `file_citation`. + */ + type: 'file_citation'; +} + +export namespace FileCitationAnnotation { + export interface FileCitation { + /** + * The ID of the specific File the citation is from. + */ + file_id: string; + } +} + +/** + * A citation within the message that points to a specific quote from a specific + * File associated with the assistant or the message. Generated when the assistant + * uses the "file_search" tool to search files. + */ +export interface FileCitationDeltaAnnotation { + /** + * The index of the annotation in the text content part. + */ + index: number; + + /** + * Always `file_citation`. + */ + type: 'file_citation'; + + end_index?: number; + + file_citation?: FileCitationDeltaAnnotation.FileCitation; + + start_index?: number; + + /** + * The text in the message content that needs to be replaced. + */ + text?: string; +} + +export namespace FileCitationDeltaAnnotation { + export interface FileCitation { + /** + * The ID of the specific File the citation is from. + */ + file_id?: string; + + /** + * The specific quote in the file. + */ + quote?: string; + } +} + +/** + * A URL for the file that's generated when the assistant used the + * `code_interpreter` tool to generate a file. + */ +export interface FilePathAnnotation { + end_index: number; + + file_path: FilePathAnnotation.FilePath; + + start_index: number; + + /** + * The text in the message content that needs to be replaced. + */ + text: string; + + /** + * Always `file_path`. + */ + type: 'file_path'; +} + +export namespace FilePathAnnotation { + export interface FilePath { + /** + * The ID of the file that was generated. + */ + file_id: string; + } +} + +/** + * A URL for the file that's generated when the assistant used the + * `code_interpreter` tool to generate a file. + */ +export interface FilePathDeltaAnnotation { + /** + * The index of the annotation in the text content part. + */ + index: number; + + /** + * Always `file_path`. + */ + type: 'file_path'; + + end_index?: number; + + file_path?: FilePathDeltaAnnotation.FilePath; + + start_index?: number; + + /** + * The text in the message content that needs to be replaced. + */ + text?: string; +} + +export namespace FilePathDeltaAnnotation { + export interface FilePath { + /** + * The ID of the file that was generated. + */ + file_id?: string; + } +} + +export interface ImageFile { + /** + * The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + * in the message content. Set `purpose="vision"` when uploading the File if you + * need to later display the file content. + */ + file_id: string; + + /** + * Specifies the detail level of the image if specified by the user. `low` uses + * fewer tokens, you can opt in to high resolution using `high`. + */ + detail?: 'auto' | 'low' | 'high'; +} + +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export interface ImageFileContentBlock { + image_file: ImageFile; + + /** + * Always `image_file`. + */ + type: 'image_file'; +} + +export interface ImageFileDelta { + /** + * Specifies the detail level of the image if specified by the user. `low` uses + * fewer tokens, you can opt in to high resolution using `high`. + */ + detail?: 'auto' | 'low' | 'high'; + + /** + * The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + * in the message content. Set `purpose="vision"` when uploading the File if you + * need to later display the file content. + */ + file_id?: string; +} + +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export interface ImageFileDeltaBlock { + /** + * The index of the content part in the message. + */ + index: number; + + /** + * Always `image_file`. + */ + type: 'image_file'; + + image_file?: ImageFileDelta; +} + +export interface ImageURL { + /** + * The external URL of the image, must be a supported image types: jpeg, jpg, png, + * gif, webp. + */ + url: string; + + /** + * Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + * to high resolution using `high`. Default value is `auto` + */ + detail?: 'auto' | 'low' | 'high'; +} + +/** + * References an image URL in the content of a message. + */ +export interface ImageURLContentBlock { + image_url: ImageURL; + + /** + * The type of the content part. + */ + type: 'image_url'; +} + +export interface ImageURLDelta { + /** + * Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + * to high resolution using `high`. + */ + detail?: 'auto' | 'low' | 'high'; + + /** + * The URL of the image, must be a supported image types: jpeg, jpg, png, gif, + * webp. + */ + url?: string; +} + +/** + * References an image URL in the content of a message. + */ +export interface ImageURLDeltaBlock { + /** + * The index of the content part in the message. + */ + index: number; + + /** + * Always `image_url`. + */ + type: 'image_url'; + + image_url?: ImageURLDelta; +} + +/** + * Represents a message within a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ +export interface Message { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + + /** + * If applicable, the ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) that + * authored this message. + */ + assistant_id: string | null; + + /** + * A list of files attached to the message, and the tools they were added to. + */ + attachments: Array | null; + + /** + * The Unix timestamp (in seconds) for when the message was completed. + */ + completed_at: number | null; + + /** + * The content of the message in array of text and/or images. + */ + content: Array; + + /** + * The Unix timestamp (in seconds) for when the message was created. + */ + created_at: number; + + /** + * The Unix timestamp (in seconds) for when the message was marked as incomplete. + */ + incomplete_at: number | null; + + /** + * On an incomplete message, details about why the message is incomplete. + */ + incomplete_details: Message.IncompleteDetails | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + + /** + * The object type, which is always `thread.message`. + */ + object: 'thread.message'; + + /** + * The entity that produced the message. One of `user` or `assistant`. + */ + role: 'user' | 'assistant'; + + /** + * The ID of the [run](https://platform.openai.com/docs/api-reference/runs) + * associated with the creation of this message. Value is `null` when messages are + * created manually using the create message or create thread endpoints. + */ + run_id: string | null; + + /** + * The status of the message, which can be either `in_progress`, `incomplete`, or + * `completed`. + */ + status: 'in_progress' | 'incomplete' | 'completed'; + + /** + * The [thread](https://platform.openai.com/docs/api-reference/threads) ID that + * this message belongs to. + */ + thread_id: string; +} + +export namespace Message { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + + export namespace Attachment { + export interface AssistantToolsFileSearchTypeOnly { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } + + /** + * On an incomplete message, details about why the message is incomplete. + */ + export interface IncompleteDetails { + /** + * The reason the message is incomplete. + */ + reason: 'content_filter' | 'max_tokens' | 'run_cancelled' | 'run_expired' | 'run_failed'; + } +} + +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export type MessageContent = + | ImageFileContentBlock + | ImageURLContentBlock + | TextContentBlock + | RefusalContentBlock; + +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export type MessageContentDelta = + | ImageFileDeltaBlock + | TextDeltaBlock + | RefusalDeltaBlock + | ImageURLDeltaBlock; + +/** + * References an image [File](https://platform.openai.com/docs/api-reference/files) + * in the content of a message. + */ +export type MessageContentPartParam = ImageFileContentBlock | ImageURLContentBlock | TextContentBlockParam; + +export interface MessageDeleted { + id: string; + + deleted: boolean; + + object: 'thread.message.deleted'; +} + +/** + * The delta containing the fields that have changed on the Message. + */ +export interface MessageDelta { + /** + * The content of the message in array of text and/or images. + */ + content?: Array; + + /** + * The entity that produced the message. One of `user` or `assistant`. + */ + role?: 'user' | 'assistant'; +} + +/** + * Represents a message delta i.e. any changed fields on a message during + * streaming. + */ +export interface MessageDeltaEvent { + /** + * The identifier of the message, which can be referenced in API endpoints. + */ + id: string; + + /** + * The delta containing the fields that have changed on the Message. + */ + delta: MessageDelta; + + /** + * The object type, which is always `thread.message.delta`. + */ + object: 'thread.message.delta'; +} + +/** + * The refusal content generated by the assistant. + */ +export interface RefusalContentBlock { + refusal: string; + + /** + * Always `refusal`. + */ + type: 'refusal'; +} + +/** + * The refusal content that is part of a message. + */ +export interface RefusalDeltaBlock { + /** + * The index of the refusal part in the message. + */ + index: number; + + /** + * Always `refusal`. + */ + type: 'refusal'; + + refusal?: string; +} + +export interface Text { + annotations: Array; + + /** + * The data that makes up the text. + */ + value: string; +} + +/** + * The text content that is part of a message. + */ +export interface TextContentBlock { + text: Text; + + /** + * Always `text`. + */ + type: 'text'; +} + +/** + * The text content that is part of a message. + */ +export interface TextContentBlockParam { + /** + * Text content to be sent to the model + */ + text: string; + + /** + * Always `text`. + */ + type: 'text'; +} + +export interface TextDelta { + annotations?: Array; + + /** + * The data that makes up the text. + */ + value?: string; +} + +/** + * The text content that is part of a message. + */ +export interface TextDeltaBlock { + /** + * The index of the content part in the message. + */ + index: number; + + /** + * Always `text`. + */ + type: 'text'; + + text?: TextDelta; +} + +export interface MessageCreateParams { + /** + * The text contents of the message. + */ + content: string | Array; + + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; +} + +export namespace MessageCreateParams { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + + export namespace Attachment { + export interface FileSearch { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } +} + +export interface MessageUpdateParams { + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; +} + +export interface MessageListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; + + /** + * Filter messages by the run ID that generated them. + */ + run_id?: string; +} + +Messages.MessagesPage = MessagesPage; + +export declare namespace Messages { + export { + type Annotation as Annotation, + type AnnotationDelta as AnnotationDelta, + type FileCitationAnnotation as FileCitationAnnotation, + type FileCitationDeltaAnnotation as FileCitationDeltaAnnotation, + type FilePathAnnotation as FilePathAnnotation, + type FilePathDeltaAnnotation as FilePathDeltaAnnotation, + type ImageFile as ImageFile, + type ImageFileContentBlock as ImageFileContentBlock, + type ImageFileDelta as ImageFileDelta, + type ImageFileDeltaBlock as ImageFileDeltaBlock, + type ImageURL as ImageURL, + type ImageURLContentBlock as ImageURLContentBlock, + type ImageURLDelta as ImageURLDelta, + type ImageURLDeltaBlock as ImageURLDeltaBlock, + type Message as Message, + type MessageContent as MessageContent, + type MessageContentDelta as MessageContentDelta, + type MessageContentPartParam as MessageContentPartParam, + type MessageDeleted as MessageDeleted, + type MessageDelta as MessageDelta, + type MessageDeltaEvent as MessageDeltaEvent, + type RefusalContentBlock as RefusalContentBlock, + type RefusalDeltaBlock as RefusalDeltaBlock, + type Text as Text, + type TextContentBlock as TextContentBlock, + type TextContentBlockParam as TextContentBlockParam, + type TextDelta as TextDelta, + type TextDeltaBlock as TextDeltaBlock, + MessagesPage as MessagesPage, + type MessageCreateParams as MessageCreateParams, + type MessageUpdateParams as MessageUpdateParams, + type MessageListParams as MessageListParams, + }; +} diff --git a/node_modules/openai/src/resources/beta/threads/runs/index.ts b/node_modules/openai/src/resources/beta/threads/runs/index.ts new file mode 100644 index 0000000..9dbe575 --- /dev/null +++ b/node_modules/openai/src/resources/beta/threads/runs/index.ts @@ -0,0 +1,46 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { + RunStepsPage, + Steps, + type CodeInterpreterLogs, + type CodeInterpreterOutputImage, + type CodeInterpreterToolCall, + type CodeInterpreterToolCallDelta, + type FileSearchToolCall, + type FileSearchToolCallDelta, + type FunctionToolCall, + type FunctionToolCallDelta, + type MessageCreationStepDetails, + type RunStep, + type RunStepDelta, + type RunStepDeltaEvent, + type RunStepDeltaMessageDelta, + type RunStepInclude, + type ToolCall, + type ToolCallDelta, + type ToolCallDeltaObject, + type ToolCallsStepDetails, + type StepRetrieveParams, + type StepListParams, +} from './steps'; +export { + RunsPage, + Runs, + type RequiredActionFunctionToolCall, + type Run, + type RunStatus, + type RunCreateParams, + type RunCreateParamsNonStreaming, + type RunCreateParamsStreaming, + type RunUpdateParams, + type RunListParams, + type RunCreateAndPollParams, + type RunCreateAndStreamParams, + type RunStreamParams, + type RunSubmitToolOutputsParams, + type RunSubmitToolOutputsParamsNonStreaming, + type RunSubmitToolOutputsParamsStreaming, + type RunSubmitToolOutputsAndPollParams, + type RunSubmitToolOutputsStreamParams, +} from './runs'; diff --git a/node_modules/openai/src/resources/beta/threads/runs/runs.ts b/node_modules/openai/src/resources/beta/threads/runs/runs.ts new file mode 100644 index 0000000..814ad3e --- /dev/null +++ b/node_modules/openai/src/resources/beta/threads/runs/runs.ts @@ -0,0 +1,1695 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../../resource'; +import { isRequestOptions } from '../../../../core'; +import { APIPromise } from '../../../../core'; +import * as Core from '../../../../core'; +import { AssistantStream, RunCreateParamsBaseStream } from '../../../../lib/AssistantStream'; +import { sleep } from '../../../../core'; +import { RunSubmitToolOutputsParamsStream } from '../../../../lib/AssistantStream'; +import * as RunsAPI from './runs'; +import * as AssistantsAPI from '../../assistants'; +import * as ChatAPI from '../../../chat/chat'; +import * as MessagesAPI from '../messages'; +import * as ThreadsAPI from '../threads'; +import * as StepsAPI from './steps'; +import { + CodeInterpreterLogs, + CodeInterpreterOutputImage, + CodeInterpreterToolCall, + CodeInterpreterToolCallDelta, + FileSearchToolCall, + FileSearchToolCallDelta, + FunctionToolCall, + FunctionToolCallDelta, + MessageCreationStepDetails, + RunStep, + RunStepDelta, + RunStepDeltaEvent, + RunStepDeltaMessageDelta, + RunStepInclude, + RunStepsPage, + StepListParams, + StepRetrieveParams, + Steps, + ToolCall, + ToolCallDelta, + ToolCallDeltaObject, + ToolCallsStepDetails, +} from './steps'; +import { CursorPage, type CursorPageParams } from '../../../../pagination'; +import { Stream } from '../../../../streaming'; + +export class Runs extends APIResource { + steps: StepsAPI.Steps = new StepsAPI.Steps(this._client); + + /** + * Create a run. + */ + create( + threadId: string, + params: RunCreateParamsNonStreaming, + options?: Core.RequestOptions, + ): APIPromise; + create( + threadId: string, + params: RunCreateParamsStreaming, + options?: Core.RequestOptions, + ): APIPromise>; + create( + threadId: string, + params: RunCreateParamsBase, + options?: Core.RequestOptions, + ): APIPromise | Run>; + create( + threadId: string, + params: RunCreateParams, + options?: Core.RequestOptions, + ): APIPromise | APIPromise> { + const { include, ...body } = params; + return this._client.post(`/threads/${threadId}/runs`, { + query: { include }, + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + stream: params.stream ?? false, + }) as APIPromise | APIPromise>; + } + + /** + * Retrieves a run. + */ + retrieve(threadId: string, runId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/threads/${threadId}/runs/${runId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Modifies a run. + */ + update( + threadId: string, + runId: string, + body: RunUpdateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post(`/threads/${threadId}/runs/${runId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Returns a list of runs belonging to a thread. + */ + list( + threadId: string, + query?: RunListParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + list(threadId: string, options?: Core.RequestOptions): Core.PagePromise; + list( + threadId: string, + query: RunListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list(threadId, {}, query); + } + return this._client.getAPIList(`/threads/${threadId}/runs`, RunsPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Cancels a run that is `in_progress`. + */ + cancel(threadId: string, runId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post(`/threads/${threadId}/runs/${runId}/cancel`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * A helper to create a run an poll for a terminal state. More information on Run + * lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async createAndPoll( + threadId: string, + body: RunCreateParamsNonStreaming, + options?: Core.RequestOptions & { pollIntervalMs?: number }, + ): Promise { + const run = await this.create(threadId, body, options); + return await this.poll(threadId, run.id, options); + } + + /** + * Create a Run stream + * + * @deprecated use `stream` instead + */ + createAndStream( + threadId: string, + body: RunCreateParamsBaseStream, + options?: Core.RequestOptions, + ): AssistantStream { + return AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options); + } + + /** + * A helper to poll a run status until it reaches a terminal state. More + * information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async poll( + threadId: string, + runId: string, + options?: Core.RequestOptions & { pollIntervalMs?: number }, + ): Promise { + const headers: { [key: string]: string } = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' }; + + if (options?.pollIntervalMs) { + headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString(); + } + + while (true) { + const { data: run, response } = await this.retrieve(threadId, runId, { + ...options, + headers: { ...options?.headers, ...headers }, + }).withResponse(); + + switch (run.status) { + //If we are in any sort of intermediate state we poll + case 'queued': + case 'in_progress': + case 'cancelling': + let sleepInterval = 5000; + + if (options?.pollIntervalMs) { + sleepInterval = options.pollIntervalMs; + } else { + const headerInterval = response.headers.get('openai-poll-after-ms'); + if (headerInterval) { + const headerIntervalMs = parseInt(headerInterval); + if (!isNaN(headerIntervalMs)) { + sleepInterval = headerIntervalMs; + } + } + } + await sleep(sleepInterval); + break; + //We return the run in any terminal state. + case 'requires_action': + case 'incomplete': + case 'cancelled': + case 'completed': + case 'failed': + case 'expired': + return run; + } + } + } + + /** + * Create a Run stream + */ + stream(threadId: string, body: RunCreateParamsBaseStream, options?: Core.RequestOptions): AssistantStream { + return AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options); + } + + /** + * When a run has the `status: "requires_action"` and `required_action.type` is + * `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + * tool calls once they're all completed. All outputs must be submitted in a single + * request. + */ + submitToolOutputs( + threadId: string, + runId: string, + body: RunSubmitToolOutputsParamsNonStreaming, + options?: Core.RequestOptions, + ): APIPromise; + submitToolOutputs( + threadId: string, + runId: string, + body: RunSubmitToolOutputsParamsStreaming, + options?: Core.RequestOptions, + ): APIPromise>; + submitToolOutputs( + threadId: string, + runId: string, + body: RunSubmitToolOutputsParamsBase, + options?: Core.RequestOptions, + ): APIPromise | Run>; + submitToolOutputs( + threadId: string, + runId: string, + body: RunSubmitToolOutputsParams, + options?: Core.RequestOptions, + ): APIPromise | APIPromise> { + return this._client.post(`/threads/${threadId}/runs/${runId}/submit_tool_outputs`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + stream: body.stream ?? false, + }) as APIPromise | APIPromise>; + } + + /** + * A helper to submit a tool output to a run and poll for a terminal run state. + * More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async submitToolOutputsAndPoll( + threadId: string, + runId: string, + body: RunSubmitToolOutputsParamsNonStreaming, + options?: Core.RequestOptions & { pollIntervalMs?: number }, + ): Promise { + const run = await this.submitToolOutputs(threadId, runId, body, options); + return await this.poll(threadId, run.id, options); + } + + /** + * Submit the tool outputs from a previous run and stream the run to a terminal + * state. More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + submitToolOutputsStream( + threadId: string, + runId: string, + body: RunSubmitToolOutputsParamsStream, + options?: Core.RequestOptions, + ): AssistantStream { + return AssistantStream.createToolAssistantStream( + threadId, + runId, + this._client.beta.threads.runs, + body, + options, + ); + } +} + +export class RunsPage extends CursorPage {} + +/** + * Tool call objects + */ +export interface RequiredActionFunctionToolCall { + /** + * The ID of the tool call. This ID must be referenced when you submit the tool + * outputs in using the + * [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + * endpoint. + */ + id: string; + + /** + * The function definition. + */ + function: RequiredActionFunctionToolCall.Function; + + /** + * The type of tool call the output is required for. For now, this is always + * `function`. + */ + type: 'function'; +} + +export namespace RequiredActionFunctionToolCall { + /** + * The function definition. + */ + export interface Function { + /** + * The arguments that the model expects you to pass to the function. + */ + arguments: string; + + /** + * The name of the function. + */ + name: string; + } +} + +/** + * Represents an execution run on a + * [thread](https://platform.openai.com/docs/api-reference/threads). + */ +export interface Run { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + * execution of this run. + */ + assistant_id: string; + + /** + * The Unix timestamp (in seconds) for when the run was cancelled. + */ + cancelled_at: number | null; + + /** + * The Unix timestamp (in seconds) for when the run was completed. + */ + completed_at: number | null; + + /** + * The Unix timestamp (in seconds) for when the run was created. + */ + created_at: number; + + /** + * The Unix timestamp (in seconds) for when the run will expire. + */ + expires_at: number | null; + + /** + * The Unix timestamp (in seconds) for when the run failed. + */ + failed_at: number | null; + + /** + * Details on why the run is incomplete. Will be `null` if the run is not + * incomplete. + */ + incomplete_details: Run.IncompleteDetails | null; + + /** + * The instructions that the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + * this run. + */ + instructions: string; + + /** + * The last error associated with this run. Will be `null` if there are no errors. + */ + last_error: Run.LastError | null; + + /** + * The maximum number of completion tokens specified to have been used over the + * course of the run. + */ + max_completion_tokens: number | null; + + /** + * The maximum number of prompt tokens specified to have been used over the course + * of the run. + */ + max_prompt_tokens: number | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + + /** + * The model that the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + * this run. + */ + model: string; + + /** + * The object type, which is always `thread.run`. + */ + object: 'thread.run'; + + /** + * Whether to enable + * [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + * during tool use. + */ + parallel_tool_calls: boolean; + + /** + * Details on the action required to continue the run. Will be `null` if no action + * is required. + */ + required_action: Run.RequiredAction | null; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format: ThreadsAPI.AssistantResponseFormatOption | null; + + /** + * The Unix timestamp (in seconds) for when the run was started. + */ + started_at: number | null; + + /** + * The status of the run, which can be either `queued`, `in_progress`, + * `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + * `incomplete`, or `expired`. + */ + status: RunStatus; + + /** + * The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + * that was executed on as a part of this run. + */ + thread_id: string; + + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice: ThreadsAPI.AssistantToolChoiceOption | null; + + /** + * The list of tools that the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + * this run. + */ + tools: Array; + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy: Run.TruncationStrategy | null; + + /** + * Usage statistics related to the run. This value will be `null` if the run is not + * in a terminal state (i.e. `in_progress`, `queued`, etc.). + */ + usage: Run.Usage | null; + + /** + * The sampling temperature used for this run. If not set, defaults to 1. + */ + temperature?: number | null; + + /** + * The nucleus sampling value used for this run. If not set, defaults to 1. + */ + top_p?: number | null; +} + +export namespace Run { + /** + * Details on why the run is incomplete. Will be `null` if the run is not + * incomplete. + */ + export interface IncompleteDetails { + /** + * The reason why the run is incomplete. This will point to which specific token + * limit was reached over the course of the run. + */ + reason?: 'max_completion_tokens' | 'max_prompt_tokens'; + } + + /** + * The last error associated with this run. Will be `null` if there are no errors. + */ + export interface LastError { + /** + * One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. + */ + code: 'server_error' | 'rate_limit_exceeded' | 'invalid_prompt'; + + /** + * A human-readable description of the error. + */ + message: string; + } + + /** + * Details on the action required to continue the run. Will be `null` if no action + * is required. + */ + export interface RequiredAction { + /** + * Details on the tool outputs needed for this run to continue. + */ + submit_tool_outputs: RequiredAction.SubmitToolOutputs; + + /** + * For now, this is always `submit_tool_outputs`. + */ + type: 'submit_tool_outputs'; + } + + export namespace RequiredAction { + /** + * Details on the tool outputs needed for this run to continue. + */ + export interface SubmitToolOutputs { + /** + * A list of the relevant tool calls. + */ + tool_calls: Array; + } + } + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + export interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } + + /** + * Usage statistics related to the run. This value will be `null` if the run is not + * in a terminal state (i.e. `in_progress`, `queued`, etc.). + */ + export interface Usage { + /** + * Number of completion tokens used over the course of the run. + */ + completion_tokens: number; + + /** + * Number of prompt tokens used over the course of the run. + */ + prompt_tokens: number; + + /** + * Total number of tokens used (prompt + completion). + */ + total_tokens: number; + } +} + +/** + * The status of the run, which can be either `queued`, `in_progress`, + * `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + * `incomplete`, or `expired`. + */ +export type RunStatus = + | 'queued' + | 'in_progress' + | 'requires_action' + | 'cancelling' + | 'cancelled' + | 'failed' + | 'completed' + | 'incomplete' + | 'expired'; + +export type RunCreateParams = RunCreateParamsNonStreaming | RunCreateParamsStreaming; + +export interface RunCreateParamsBase { + /** + * Body param: The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + + /** + * Query param: A list of additional fields to include in the response. Currently + * the only supported value is + * `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + * search result content. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + include?: Array; + + /** + * Body param: Appends additional instructions at the end of the instructions for + * the run. This is useful for modifying the behavior on a per-run basis without + * overriding other instructions. + */ + additional_instructions?: string | null; + + /** + * Body param: Adds additional messages to the thread before creating the run. + */ + additional_messages?: Array | null; + + /** + * Body param: Overrides the + * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + * of the assistant. This is useful for modifying the behavior on a per-run basis. + */ + instructions?: string | null; + + /** + * Body param: The maximum number of completion tokens that may be used over the + * course of the run. The run will make a best effort to use only the number of + * completion tokens specified, across multiple turns of the run. If the run + * exceeds the number of completion tokens specified, the run will end with status + * `incomplete`. See `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + + /** + * Body param: The maximum number of prompt tokens that may be used over the course + * of the run. The run will make a best effort to use only the number of prompt + * tokens specified, across multiple turns of the run. If the run exceeds the + * number of prompt tokens specified, the run will end with status `incomplete`. + * See `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + + /** + * Body param: Set of 16 key-value pairs that can be attached to an object. This + * can be useful for storing additional information about the object in a + * structured format. Keys can be a maximum of 64 characters long and values can be + * a maxium of 512 characters long. + */ + metadata?: unknown | null; + + /** + * Body param: The ID of the + * [Model](https://platform.openai.com/docs/api-reference/models) to be used to + * execute this run. If a value is provided here, it will override the model + * associated with the assistant. If not, the model associated with the assistant + * will be used. + */ + model?: (string & {}) | ChatAPI.ChatModel | null; + + /** + * Body param: Whether to enable + * [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + * during tool use. + */ + parallel_tool_calls?: boolean; + + /** + * Body param: Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + + /** + * Body param: If `true`, returns a stream of events that happen during the Run as + * server-sent events, terminating when the Run enters a terminal state with a + * `data: [DONE]` message. + */ + stream?: boolean | null; + + /** + * Body param: What sampling temperature to use, between 0 and 2. Higher values + * like 0.8 will make the output more random, while lower values like 0.2 will make + * it more focused and deterministic. + */ + temperature?: number | null; + + /** + * Body param: Controls which (if any) tool is called by the model. `none` means + * the model will not call any tools and instead generates a message. `auto` is the + * default value and means the model can pick between generating a message or + * calling one or more tools. `required` means the model must call one or more + * tools before responding to the user. Specifying a particular tool like + * `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: ThreadsAPI.AssistantToolChoiceOption | null; + + /** + * Body param: Override the tools the assistant can use for this run. This is + * useful for modifying the behavior on a per-run basis. + */ + tools?: Array | null; + + /** + * Body param: An alternative to sampling with temperature, called nucleus + * sampling, where the model considers the results of the tokens with top_p + * probability mass. So 0.1 means only the tokens comprising the top 10% + * probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + + /** + * Body param: Controls for how a thread will be truncated prior to the run. Use + * this to control the intial context window of the run. + */ + truncation_strategy?: RunCreateParams.TruncationStrategy | null; +} + +export namespace RunCreateParams { + export interface AdditionalMessage { + /** + * The text contents of the message. + */ + content: string | Array; + + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + + export namespace AdditionalMessage { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + + export namespace Attachment { + export interface FileSearch { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } + } + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + export interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } + + export type RunCreateParamsNonStreaming = RunsAPI.RunCreateParamsNonStreaming; + export type RunCreateParamsStreaming = RunsAPI.RunCreateParamsStreaming; +} + +export interface RunCreateParamsNonStreaming extends RunCreateParamsBase { + /** + * Body param: If `true`, returns a stream of events that happen during the Run as + * server-sent events, terminating when the Run enters a terminal state with a + * `data: [DONE]` message. + */ + stream?: false | null; +} + +export interface RunCreateParamsStreaming extends RunCreateParamsBase { + /** + * Body param: If `true`, returns a stream of events that happen during the Run as + * server-sent events, terminating when the Run enters a terminal state with a + * `data: [DONE]` message. + */ + stream: true; +} + +export interface RunUpdateParams { + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; +} + +export interface RunListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} + +export interface RunCreateAndPollParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + + /** + * Appends additional instructions at the end of the instructions for the run. This + * is useful for modifying the behavior on a per-run basis without overriding other + * instructions. + */ + additional_instructions?: string | null; + + /** + * Adds additional messages to the thread before creating the run. + */ + additional_messages?: Array | null; + + /** + * Overrides the + * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + * of the assistant. This is useful for modifying the behavior on a per-run basis. + */ + instructions?: string | null; + + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: + | (string & {}) + | 'gpt-4o' + | 'gpt-4o-2024-05-13' + | 'gpt-4-turbo' + | 'gpt-4-turbo-2024-04-09' + | 'gpt-4-0125-preview' + | 'gpt-4-turbo-preview' + | 'gpt-4-1106-preview' + | 'gpt-4-vision-preview' + | 'gpt-4' + | 'gpt-4-0314' + | 'gpt-4-0613' + | 'gpt-4-32k' + | 'gpt-4-32k-0314' + | 'gpt-4-32k-0613' + | 'gpt-3.5-turbo' + | 'gpt-3.5-turbo-16k' + | 'gpt-3.5-turbo-0613' + | 'gpt-3.5-turbo-1106' + | 'gpt-3.5-turbo-0125' + | 'gpt-3.5-turbo-16k-0613' + | null; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: ThreadsAPI.AssistantToolChoiceOption | null; + + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array | null; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: RunCreateAndPollParams.TruncationStrategy | null; +} + +export namespace RunCreateAndPollParams { + export interface AdditionalMessage { + /** + * The text contents of the message. + */ + content: string | Array; + + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + + export namespace AdditionalMessage { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + export interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} + +export interface RunCreateAndStreamParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + + /** + * Appends additional instructions at the end of the instructions for the run. This + * is useful for modifying the behavior on a per-run basis without overriding other + * instructions. + */ + additional_instructions?: string | null; + + /** + * Adds additional messages to the thread before creating the run. + */ + additional_messages?: Array | null; + + /** + * Overrides the + * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + * of the assistant. This is useful for modifying the behavior on a per-run basis. + */ + instructions?: string | null; + + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: + | (string & {}) + | 'gpt-4o' + | 'gpt-4o-2024-05-13' + | 'gpt-4-turbo' + | 'gpt-4-turbo-2024-04-09' + | 'gpt-4-0125-preview' + | 'gpt-4-turbo-preview' + | 'gpt-4-1106-preview' + | 'gpt-4-vision-preview' + | 'gpt-4' + | 'gpt-4-0314' + | 'gpt-4-0613' + | 'gpt-4-32k' + | 'gpt-4-32k-0314' + | 'gpt-4-32k-0613' + | 'gpt-3.5-turbo' + | 'gpt-3.5-turbo-16k' + | 'gpt-3.5-turbo-0613' + | 'gpt-3.5-turbo-1106' + | 'gpt-3.5-turbo-0125' + | 'gpt-3.5-turbo-16k-0613' + | null; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: ThreadsAPI.AssistantToolChoiceOption | null; + + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array | null; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: RunCreateAndStreamParams.TruncationStrategy | null; +} + +export namespace RunCreateAndStreamParams { + export interface AdditionalMessage { + /** + * The text contents of the message. + */ + content: string | Array; + + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + + export namespace AdditionalMessage { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + export interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} + +export interface RunStreamParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + + /** + * Appends additional instructions at the end of the instructions for the run. This + * is useful for modifying the behavior on a per-run basis without overriding other + * instructions. + */ + additional_instructions?: string | null; + + /** + * Adds additional messages to the thread before creating the run. + */ + additional_messages?: Array | null; + + /** + * Overrides the + * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + * of the assistant. This is useful for modifying the behavior on a per-run basis. + */ + instructions?: string | null; + + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: + | (string & {}) + | 'gpt-4o' + | 'gpt-4o-2024-05-13' + | 'gpt-4-turbo' + | 'gpt-4-turbo-2024-04-09' + | 'gpt-4-0125-preview' + | 'gpt-4-turbo-preview' + | 'gpt-4-1106-preview' + | 'gpt-4-vision-preview' + | 'gpt-4' + | 'gpt-4-0314' + | 'gpt-4-0613' + | 'gpt-4-32k' + | 'gpt-4-32k-0314' + | 'gpt-4-32k-0613' + | 'gpt-3.5-turbo' + | 'gpt-3.5-turbo-16k' + | 'gpt-3.5-turbo-0613' + | 'gpt-3.5-turbo-1106' + | 'gpt-3.5-turbo-0125' + | 'gpt-3.5-turbo-16k-0613' + | null; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: ThreadsAPI.AssistantResponseFormatOption | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: ThreadsAPI.AssistantToolChoiceOption | null; + + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array | null; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: RunStreamParams.TruncationStrategy | null; +} + +export namespace RunStreamParams { + export interface AdditionalMessage { + /** + * The text contents of the message. + */ + content: string | Array; + + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + + export namespace AdditionalMessage { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + export interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} + +export type RunSubmitToolOutputsParams = + | RunSubmitToolOutputsParamsNonStreaming + | RunSubmitToolOutputsParamsStreaming; + +export interface RunSubmitToolOutputsParamsBase { + /** + * A list of tools for which the outputs are being submitted. + */ + tool_outputs: Array; + + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream?: boolean | null; +} + +export namespace RunSubmitToolOutputsParams { + export interface ToolOutput { + /** + * The output of the tool call to be submitted to continue the run. + */ + output?: string; + + /** + * The ID of the tool call in the `required_action` object within the run object + * the output is being submitted for. + */ + tool_call_id?: string; + } + + export type RunSubmitToolOutputsParamsNonStreaming = RunsAPI.RunSubmitToolOutputsParamsNonStreaming; + export type RunSubmitToolOutputsParamsStreaming = RunsAPI.RunSubmitToolOutputsParamsStreaming; +} + +export interface RunSubmitToolOutputsParamsNonStreaming extends RunSubmitToolOutputsParamsBase { + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream?: false | null; +} + +export interface RunSubmitToolOutputsParamsStreaming extends RunSubmitToolOutputsParamsBase { + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream: true; +} + +export interface RunSubmitToolOutputsAndPollParams { + /** + * A list of tools for which the outputs are being submitted. + */ + tool_outputs: Array; +} + +export namespace RunSubmitToolOutputsAndPollParams { + export interface ToolOutput { + /** + * The output of the tool call to be submitted to continue the run. + */ + output?: string; + + /** + * The ID of the tool call in the `required_action` object within the run object + * the output is being submitted for. + */ + tool_call_id?: string; + } +} + +export interface RunSubmitToolOutputsStreamParams { + /** + * A list of tools for which the outputs are being submitted. + */ + tool_outputs: Array; +} + +export namespace RunSubmitToolOutputsStreamParams { + export interface ToolOutput { + /** + * The output of the tool call to be submitted to continue the run. + */ + output?: string; + + /** + * The ID of the tool call in the `required_action` object within the run object + * the output is being submitted for. + */ + tool_call_id?: string; + } +} + +Runs.RunsPage = RunsPage; +Runs.Steps = Steps; +Runs.RunStepsPage = RunStepsPage; + +export declare namespace Runs { + export { + type RequiredActionFunctionToolCall as RequiredActionFunctionToolCall, + type Run as Run, + type RunStatus as RunStatus, + RunsPage as RunsPage, + type RunCreateParams as RunCreateParams, + type RunCreateParamsNonStreaming as RunCreateParamsNonStreaming, + type RunCreateParamsStreaming as RunCreateParamsStreaming, + type RunUpdateParams as RunUpdateParams, + type RunListParams as RunListParams, + type RunCreateAndPollParams, + type RunCreateAndStreamParams, + type RunStreamParams, + type RunSubmitToolOutputsParams as RunSubmitToolOutputsParams, + type RunSubmitToolOutputsParamsNonStreaming as RunSubmitToolOutputsParamsNonStreaming, + type RunSubmitToolOutputsParamsStreaming as RunSubmitToolOutputsParamsStreaming, + type RunSubmitToolOutputsAndPollParams, + type RunSubmitToolOutputsStreamParams, + }; + + export { + Steps as Steps, + type CodeInterpreterLogs as CodeInterpreterLogs, + type CodeInterpreterOutputImage as CodeInterpreterOutputImage, + type CodeInterpreterToolCall as CodeInterpreterToolCall, + type CodeInterpreterToolCallDelta as CodeInterpreterToolCallDelta, + type FileSearchToolCall as FileSearchToolCall, + type FileSearchToolCallDelta as FileSearchToolCallDelta, + type FunctionToolCall as FunctionToolCall, + type FunctionToolCallDelta as FunctionToolCallDelta, + type MessageCreationStepDetails as MessageCreationStepDetails, + type RunStep as RunStep, + type RunStepDelta as RunStepDelta, + type RunStepDeltaEvent as RunStepDeltaEvent, + type RunStepDeltaMessageDelta as RunStepDeltaMessageDelta, + type RunStepInclude as RunStepInclude, + type ToolCall as ToolCall, + type ToolCallDelta as ToolCallDelta, + type ToolCallDeltaObject as ToolCallDeltaObject, + type ToolCallsStepDetails as ToolCallsStepDetails, + RunStepsPage as RunStepsPage, + type StepRetrieveParams as StepRetrieveParams, + type StepListParams as StepListParams, + }; +} diff --git a/node_modules/openai/src/resources/beta/threads/runs/steps.ts b/node_modules/openai/src/resources/beta/threads/runs/steps.ts new file mode 100644 index 0000000..6c6722b --- /dev/null +++ b/node_modules/openai/src/resources/beta/threads/runs/steps.ts @@ -0,0 +1,767 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../../resource'; +import { isRequestOptions } from '../../../../core'; +import * as Core from '../../../../core'; +import * as StepsAPI from './steps'; +import { CursorPage, type CursorPageParams } from '../../../../pagination'; + +export class Steps extends APIResource { + /** + * Retrieves a run step. + */ + retrieve( + threadId: string, + runId: string, + stepId: string, + query?: StepRetrieveParams, + options?: Core.RequestOptions, + ): Core.APIPromise; + retrieve( + threadId: string, + runId: string, + stepId: string, + options?: Core.RequestOptions, + ): Core.APIPromise; + retrieve( + threadId: string, + runId: string, + stepId: string, + query: StepRetrieveParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.APIPromise { + if (isRequestOptions(query)) { + return this.retrieve(threadId, runId, stepId, {}, query); + } + return this._client.get(`/threads/${threadId}/runs/${runId}/steps/${stepId}`, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Returns a list of run steps belonging to a run. + */ + list( + threadId: string, + runId: string, + query?: StepListParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + list( + threadId: string, + runId: string, + options?: Core.RequestOptions, + ): Core.PagePromise; + list( + threadId: string, + runId: string, + query: StepListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list(threadId, runId, {}, query); + } + return this._client.getAPIList(`/threads/${threadId}/runs/${runId}/steps`, RunStepsPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} + +export class RunStepsPage extends CursorPage {} + +/** + * Text output from the Code Interpreter tool call as part of a run step. + */ +export interface CodeInterpreterLogs { + /** + * The index of the output in the outputs array. + */ + index: number; + + /** + * Always `logs`. + */ + type: 'logs'; + + /** + * The text output from the Code Interpreter tool call. + */ + logs?: string; +} + +export interface CodeInterpreterOutputImage { + /** + * The index of the output in the outputs array. + */ + index: number; + + /** + * Always `image`. + */ + type: 'image'; + + image?: CodeInterpreterOutputImage.Image; +} + +export namespace CodeInterpreterOutputImage { + export interface Image { + /** + * The [file](https://platform.openai.com/docs/api-reference/files) ID of the + * image. + */ + file_id?: string; + } +} + +/** + * Details of the Code Interpreter tool call the run step was involved in. + */ +export interface CodeInterpreterToolCall { + /** + * The ID of the tool call. + */ + id: string; + + /** + * The Code Interpreter tool call definition. + */ + code_interpreter: CodeInterpreterToolCall.CodeInterpreter; + + /** + * The type of tool call. This is always going to be `code_interpreter` for this + * type of tool call. + */ + type: 'code_interpreter'; +} + +export namespace CodeInterpreterToolCall { + /** + * The Code Interpreter tool call definition. + */ + export interface CodeInterpreter { + /** + * The input to the Code Interpreter tool call. + */ + input: string; + + /** + * The outputs from the Code Interpreter tool call. Code Interpreter can output one + * or more items, including text (`logs`) or images (`image`). Each of these are + * represented by a different object type. + */ + outputs: Array; + } + + export namespace CodeInterpreter { + /** + * Text output from the Code Interpreter tool call as part of a run step. + */ + export interface Logs { + /** + * The text output from the Code Interpreter tool call. + */ + logs: string; + + /** + * Always `logs`. + */ + type: 'logs'; + } + + export interface Image { + image: Image.Image; + + /** + * Always `image`. + */ + type: 'image'; + } + + export namespace Image { + export interface Image { + /** + * The [file](https://platform.openai.com/docs/api-reference/files) ID of the + * image. + */ + file_id: string; + } + } + } +} + +/** + * Details of the Code Interpreter tool call the run step was involved in. + */ +export interface CodeInterpreterToolCallDelta { + /** + * The index of the tool call in the tool calls array. + */ + index: number; + + /** + * The type of tool call. This is always going to be `code_interpreter` for this + * type of tool call. + */ + type: 'code_interpreter'; + + /** + * The ID of the tool call. + */ + id?: string; + + /** + * The Code Interpreter tool call definition. + */ + code_interpreter?: CodeInterpreterToolCallDelta.CodeInterpreter; +} + +export namespace CodeInterpreterToolCallDelta { + /** + * The Code Interpreter tool call definition. + */ + export interface CodeInterpreter { + /** + * The input to the Code Interpreter tool call. + */ + input?: string; + + /** + * The outputs from the Code Interpreter tool call. Code Interpreter can output one + * or more items, including text (`logs`) or images (`image`). Each of these are + * represented by a different object type. + */ + outputs?: Array; + } +} + +export interface FileSearchToolCall { + /** + * The ID of the tool call object. + */ + id: string; + + /** + * For now, this is always going to be an empty object. + */ + file_search: FileSearchToolCall.FileSearch; + + /** + * The type of tool call. This is always going to be `file_search` for this type of + * tool call. + */ + type: 'file_search'; +} + +export namespace FileSearchToolCall { + /** + * For now, this is always going to be an empty object. + */ + export interface FileSearch { + /** + * The ranking options for the file search. + */ + ranking_options?: FileSearch.RankingOptions; + + /** + * The results of the file search. + */ + results?: Array; + } + + export namespace FileSearch { + /** + * The ranking options for the file search. + */ + export interface RankingOptions { + /** + * The ranker used for the file search. + */ + ranker: 'default_2024_08_21'; + + /** + * The score threshold for the file search. All values must be a floating point + * number between 0 and 1. + */ + score_threshold: number; + } + + /** + * A result instance of the file search. + */ + export interface Result { + /** + * The ID of the file that result was found in. + */ + file_id: string; + + /** + * The name of the file that result was found in. + */ + file_name: string; + + /** + * The score of the result. All values must be a floating point number between 0 + * and 1. + */ + score: number; + + /** + * The content of the result that was found. The content is only included if + * requested via the include query parameter. + */ + content?: Array; + } + + export namespace Result { + export interface Content { + /** + * The text content of the file. + */ + text?: string; + + /** + * The type of the content. + */ + type?: 'text'; + } + } + } +} + +export interface FileSearchToolCallDelta { + /** + * For now, this is always going to be an empty object. + */ + file_search: unknown; + + /** + * The index of the tool call in the tool calls array. + */ + index: number; + + /** + * The type of tool call. This is always going to be `file_search` for this type of + * tool call. + */ + type: 'file_search'; + + /** + * The ID of the tool call object. + */ + id?: string; +} + +export interface FunctionToolCall { + /** + * The ID of the tool call object. + */ + id: string; + + /** + * The definition of the function that was called. + */ + function: FunctionToolCall.Function; + + /** + * The type of tool call. This is always going to be `function` for this type of + * tool call. + */ + type: 'function'; +} + +export namespace FunctionToolCall { + /** + * The definition of the function that was called. + */ + export interface Function { + /** + * The arguments passed to the function. + */ + arguments: string; + + /** + * The name of the function. + */ + name: string; + + /** + * The output of the function. This will be `null` if the outputs have not been + * [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + * yet. + */ + output: string | null; + } +} + +export interface FunctionToolCallDelta { + /** + * The index of the tool call in the tool calls array. + */ + index: number; + + /** + * The type of tool call. This is always going to be `function` for this type of + * tool call. + */ + type: 'function'; + + /** + * The ID of the tool call object. + */ + id?: string; + + /** + * The definition of the function that was called. + */ + function?: FunctionToolCallDelta.Function; +} + +export namespace FunctionToolCallDelta { + /** + * The definition of the function that was called. + */ + export interface Function { + /** + * The arguments passed to the function. + */ + arguments?: string; + + /** + * The name of the function. + */ + name?: string; + + /** + * The output of the function. This will be `null` if the outputs have not been + * [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + * yet. + */ + output?: string | null; + } +} + +/** + * Details of the message creation by the run step. + */ +export interface MessageCreationStepDetails { + message_creation: MessageCreationStepDetails.MessageCreation; + + /** + * Always `message_creation`. + */ + type: 'message_creation'; +} + +export namespace MessageCreationStepDetails { + export interface MessageCreation { + /** + * The ID of the message that was created by this run step. + */ + message_id: string; + } +} + +/** + * Represents a step in execution of a run. + */ +export interface RunStep { + /** + * The identifier of the run step, which can be referenced in API endpoints. + */ + id: string; + + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) + * associated with the run step. + */ + assistant_id: string; + + /** + * The Unix timestamp (in seconds) for when the run step was cancelled. + */ + cancelled_at: number | null; + + /** + * The Unix timestamp (in seconds) for when the run step completed. + */ + completed_at: number | null; + + /** + * The Unix timestamp (in seconds) for when the run step was created. + */ + created_at: number; + + /** + * The Unix timestamp (in seconds) for when the run step expired. A step is + * considered expired if the parent run is expired. + */ + expired_at: number | null; + + /** + * The Unix timestamp (in seconds) for when the run step failed. + */ + failed_at: number | null; + + /** + * The last error associated with this run step. Will be `null` if there are no + * errors. + */ + last_error: RunStep.LastError | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + + /** + * The object type, which is always `thread.run.step`. + */ + object: 'thread.run.step'; + + /** + * The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that + * this run step is a part of. + */ + run_id: string; + + /** + * The status of the run step, which can be either `in_progress`, `cancelled`, + * `failed`, `completed`, or `expired`. + */ + status: 'in_progress' | 'cancelled' | 'failed' | 'completed' | 'expired'; + + /** + * The details of the run step. + */ + step_details: MessageCreationStepDetails | ToolCallsStepDetails; + + /** + * The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + * that was run. + */ + thread_id: string; + + /** + * The type of run step, which can be either `message_creation` or `tool_calls`. + */ + type: 'message_creation' | 'tool_calls'; + + /** + * Usage statistics related to the run step. This value will be `null` while the + * run step's status is `in_progress`. + */ + usage: RunStep.Usage | null; +} + +export namespace RunStep { + /** + * The last error associated with this run step. Will be `null` if there are no + * errors. + */ + export interface LastError { + /** + * One of `server_error` or `rate_limit_exceeded`. + */ + code: 'server_error' | 'rate_limit_exceeded'; + + /** + * A human-readable description of the error. + */ + message: string; + } + + /** + * Usage statistics related to the run step. This value will be `null` while the + * run step's status is `in_progress`. + */ + export interface Usage { + /** + * Number of completion tokens used over the course of the run step. + */ + completion_tokens: number; + + /** + * Number of prompt tokens used over the course of the run step. + */ + prompt_tokens: number; + + /** + * Total number of tokens used (prompt + completion). + */ + total_tokens: number; + } +} + +/** + * The delta containing the fields that have changed on the run step. + */ +export interface RunStepDelta { + /** + * The details of the run step. + */ + step_details?: RunStepDeltaMessageDelta | ToolCallDeltaObject; +} + +/** + * Represents a run step delta i.e. any changed fields on a run step during + * streaming. + */ +export interface RunStepDeltaEvent { + /** + * The identifier of the run step, which can be referenced in API endpoints. + */ + id: string; + + /** + * The delta containing the fields that have changed on the run step. + */ + delta: RunStepDelta; + + /** + * The object type, which is always `thread.run.step.delta`. + */ + object: 'thread.run.step.delta'; +} + +/** + * Details of the message creation by the run step. + */ +export interface RunStepDeltaMessageDelta { + /** + * Always `message_creation`. + */ + type: 'message_creation'; + + message_creation?: RunStepDeltaMessageDelta.MessageCreation; +} + +export namespace RunStepDeltaMessageDelta { + export interface MessageCreation { + /** + * The ID of the message that was created by this run step. + */ + message_id?: string; + } +} + +export type RunStepInclude = 'step_details.tool_calls[*].file_search.results[*].content'; + +/** + * Details of the Code Interpreter tool call the run step was involved in. + */ +export type ToolCall = CodeInterpreterToolCall | FileSearchToolCall | FunctionToolCall; + +/** + * Details of the Code Interpreter tool call the run step was involved in. + */ +export type ToolCallDelta = CodeInterpreterToolCallDelta | FileSearchToolCallDelta | FunctionToolCallDelta; + +/** + * Details of the tool call. + */ +export interface ToolCallDeltaObject { + /** + * Always `tool_calls`. + */ + type: 'tool_calls'; + + /** + * An array of tool calls the run step was involved in. These can be associated + * with one of three types of tools: `code_interpreter`, `file_search`, or + * `function`. + */ + tool_calls?: Array; +} + +/** + * Details of the tool call. + */ +export interface ToolCallsStepDetails { + /** + * An array of tool calls the run step was involved in. These can be associated + * with one of three types of tools: `code_interpreter`, `file_search`, or + * `function`. + */ + tool_calls: Array; + + /** + * Always `tool_calls`. + */ + type: 'tool_calls'; +} + +export interface StepRetrieveParams { + /** + * A list of additional fields to include in the response. Currently the only + * supported value is `step_details.tool_calls[*].file_search.results[*].content` + * to fetch the file search result content. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + include?: Array; +} + +export interface StepListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + + /** + * A list of additional fields to include in the response. Currently the only + * supported value is `step_details.tool_calls[*].file_search.results[*].content` + * to fetch the file search result content. + * + * See the + * [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + * for more information. + */ + include?: Array; + + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} + +Steps.RunStepsPage = RunStepsPage; + +export declare namespace Steps { + export { + type CodeInterpreterLogs as CodeInterpreterLogs, + type CodeInterpreterOutputImage as CodeInterpreterOutputImage, + type CodeInterpreterToolCall as CodeInterpreterToolCall, + type CodeInterpreterToolCallDelta as CodeInterpreterToolCallDelta, + type FileSearchToolCall as FileSearchToolCall, + type FileSearchToolCallDelta as FileSearchToolCallDelta, + type FunctionToolCall as FunctionToolCall, + type FunctionToolCallDelta as FunctionToolCallDelta, + type MessageCreationStepDetails as MessageCreationStepDetails, + type RunStep as RunStep, + type RunStepDelta as RunStepDelta, + type RunStepDeltaEvent as RunStepDeltaEvent, + type RunStepDeltaMessageDelta as RunStepDeltaMessageDelta, + type RunStepInclude as RunStepInclude, + type ToolCall as ToolCall, + type ToolCallDelta as ToolCallDelta, + type ToolCallDeltaObject as ToolCallDeltaObject, + type ToolCallsStepDetails as ToolCallsStepDetails, + RunStepsPage as RunStepsPage, + type StepRetrieveParams as StepRetrieveParams, + type StepListParams as StepListParams, + }; +} diff --git a/node_modules/openai/src/resources/beta/threads/threads.ts b/node_modules/openai/src/resources/beta/threads/threads.ts new file mode 100644 index 0000000..453d8fa --- /dev/null +++ b/node_modules/openai/src/resources/beta/threads/threads.ts @@ -0,0 +1,1625 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../resource'; +import { isRequestOptions } from '../../../core'; +import { AssistantStream, ThreadCreateAndRunParamsBaseStream } from '../../../lib/AssistantStream'; +import { APIPromise } from '../../../core'; +import * as Core from '../../../core'; +import * as ThreadsAPI from './threads'; +import * as Shared from '../../shared'; +import * as AssistantsAPI from '../assistants'; +import * as ChatAPI from '../../chat/chat'; +import * as MessagesAPI from './messages'; +import { + Annotation, + AnnotationDelta, + FileCitationAnnotation, + FileCitationDeltaAnnotation, + FilePathAnnotation, + FilePathDeltaAnnotation, + ImageFile, + ImageFileContentBlock, + ImageFileDelta, + ImageFileDeltaBlock, + ImageURL, + ImageURLContentBlock, + ImageURLDelta, + ImageURLDeltaBlock, + Message as MessagesAPIMessage, + MessageContent, + MessageContentDelta, + MessageContentPartParam, + MessageCreateParams, + MessageDeleted, + MessageDelta, + MessageDeltaEvent, + MessageListParams, + MessageUpdateParams, + Messages, + MessagesPage, + RefusalContentBlock, + RefusalDeltaBlock, + Text, + TextContentBlock, + TextContentBlockParam, + TextDelta, + TextDeltaBlock, +} from './messages'; +import * as VectorStoresAPI from '../vector-stores/vector-stores'; +import * as RunsAPI from './runs/runs'; +import { + RequiredActionFunctionToolCall, + Run, + RunCreateAndPollParams, + RunCreateAndStreamParams, + RunCreateParams, + RunCreateParamsNonStreaming, + RunCreateParamsStreaming, + RunListParams, + RunStatus, + RunStreamParams, + RunSubmitToolOutputsAndPollParams, + RunSubmitToolOutputsParams, + RunSubmitToolOutputsParamsNonStreaming, + RunSubmitToolOutputsParamsStreaming, + RunSubmitToolOutputsStreamParams, + RunUpdateParams, + Runs, + RunsPage, +} from './runs/runs'; +import { Stream } from '../../../streaming'; + +export class Threads extends APIResource { + runs: RunsAPI.Runs = new RunsAPI.Runs(this._client); + messages: MessagesAPI.Messages = new MessagesAPI.Messages(this._client); + + /** + * Create a thread. + */ + create(body?: ThreadCreateParams, options?: Core.RequestOptions): Core.APIPromise; + create(options?: Core.RequestOptions): Core.APIPromise; + create( + body: ThreadCreateParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.APIPromise { + if (isRequestOptions(body)) { + return this.create({}, body); + } + return this._client.post('/threads', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Retrieves a thread. + */ + retrieve(threadId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/threads/${threadId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Modifies a thread. + */ + update(threadId: string, body: ThreadUpdateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post(`/threads/${threadId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Delete a thread. + */ + del(threadId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.delete(`/threads/${threadId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Create a thread and run it in one request. + */ + createAndRun( + body: ThreadCreateAndRunParamsNonStreaming, + options?: Core.RequestOptions, + ): APIPromise; + createAndRun( + body: ThreadCreateAndRunParamsStreaming, + options?: Core.RequestOptions, + ): APIPromise>; + createAndRun( + body: ThreadCreateAndRunParamsBase, + options?: Core.RequestOptions, + ): APIPromise | RunsAPI.Run>; + createAndRun( + body: ThreadCreateAndRunParams, + options?: Core.RequestOptions, + ): APIPromise | APIPromise> { + return this._client.post('/threads/runs', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + stream: body.stream ?? false, + }) as APIPromise | APIPromise>; + } + + /** + * A helper to create a thread, start a run and then poll for a terminal state. + * More information on Run lifecycles can be found here: + * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps + */ + async createAndRunPoll( + body: ThreadCreateAndRunParamsNonStreaming, + options?: Core.RequestOptions & { pollIntervalMs?: number }, + ): Promise { + const run = await this.createAndRun(body, options); + return await this.runs.poll(run.thread_id, run.id, options); + } + + /** + * Create a thread and stream the run back + */ + createAndRunStream( + body: ThreadCreateAndRunParamsBaseStream, + options?: Core.RequestOptions, + ): AssistantStream { + return AssistantStream.createThreadAssistantStream(body, this._client.beta.threads, options); + } +} + +/** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ +export type AssistantResponseFormatOption = + | 'auto' + | Shared.ResponseFormatText + | Shared.ResponseFormatJSONObject + | Shared.ResponseFormatJSONSchema; + +/** + * Specifies a tool the model should use. Use to force the model to call a specific + * tool. + */ +export interface AssistantToolChoice { + /** + * The type of the tool. If type is `function`, the function name must be set + */ + type: 'function' | 'code_interpreter' | 'file_search'; + + function?: AssistantToolChoiceFunction; +} + +export interface AssistantToolChoiceFunction { + /** + * The name of the function to call. + */ + name: string; +} + +/** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ +export type AssistantToolChoiceOption = 'none' | 'auto' | 'required' | AssistantToolChoice; + +/** + * Represents a thread that contains + * [messages](https://platform.openai.com/docs/api-reference/messages). + */ +export interface Thread { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + + /** + * The Unix timestamp (in seconds) for when the thread was created. + */ + created_at: number; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + + /** + * The object type, which is always `thread`. + */ + object: 'thread'; + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources: Thread.ToolResources | null; +} + +export namespace Thread { + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + } + } +} + +export interface ThreadDeleted { + id: string; + + deleted: boolean; + + object: 'thread.deleted'; +} + +export interface ThreadCreateParams { + /** + * A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + * start the thread with. + */ + messages?: Array; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: ThreadCreateParams.ToolResources | null; +} + +export namespace ThreadCreateParams { + export interface Message { + /** + * The text contents of the message. + */ + content: string | Array; + + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + + export namespace Message { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + + export namespace Attachment { + export interface FileSearch { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } + } + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this thread. There can be a maximum of 1 vector + * store attached to the thread. + */ + vector_stores?: Array; + } + + export namespace FileSearch { + export interface VectorStore { + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; + + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } +} + +export interface ThreadUpdateParams { + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: ThreadUpdateParams.ToolResources | null; +} + +export namespace ThreadUpdateParams { + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + } + } +} + +export type ThreadCreateAndRunParams = + | ThreadCreateAndRunParamsNonStreaming + | ThreadCreateAndRunParamsStreaming; + +export interface ThreadCreateAndRunParamsBase { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + + /** + * Override the default system message of the assistant. This is useful for + * modifying the behavior on a per-run basis. + */ + instructions?: string | null; + + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: (string & {}) | ChatAPI.ChatModel | null; + + /** + * Whether to enable + * [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + * during tool use. + */ + parallel_tool_calls?: boolean; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: AssistantResponseFormatOption | null; + + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream?: boolean | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + + /** + * If no thread is provided, an empty thread will be created. + */ + thread?: ThreadCreateAndRunParams.Thread; + + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: AssistantToolChoiceOption | null; + + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: ThreadCreateAndRunParams.ToolResources | null; + + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array< + AssistantsAPI.CodeInterpreterTool | AssistantsAPI.FileSearchTool | AssistantsAPI.FunctionTool + > | null; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: ThreadCreateAndRunParams.TruncationStrategy | null; +} + +export namespace ThreadCreateAndRunParams { + /** + * If no thread is provided, an empty thread will be created. + */ + export interface Thread { + /** + * A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + * start the thread with. + */ + messages?: Array; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: Thread.ToolResources | null; + } + + export namespace Thread { + export interface Message { + /** + * The text contents of the message. + */ + content: string | Array; + + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + + export namespace Message { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + + export namespace Attachment { + export interface FileSearch { + /** + * The type of tool being defined: `file_search` + */ + type: 'file_search'; + } + } + } + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this thread. There can be a maximum of 1 vector + * store attached to the thread. + */ + vector_stores?: Array; + } + + export namespace FileSearch { + export interface VectorStore { + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; + + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } + } + + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + export interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } + + export type ThreadCreateAndRunParamsNonStreaming = ThreadsAPI.ThreadCreateAndRunParamsNonStreaming; + export type ThreadCreateAndRunParamsStreaming = ThreadsAPI.ThreadCreateAndRunParamsStreaming; +} + +export interface ThreadCreateAndRunParamsNonStreaming extends ThreadCreateAndRunParamsBase { + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream?: false | null; +} + +export interface ThreadCreateAndRunParamsStreaming extends ThreadCreateAndRunParamsBase { + /** + * If `true`, returns a stream of events that happen during the Run as server-sent + * events, terminating when the Run enters a terminal state with a `data: [DONE]` + * message. + */ + stream: true; +} + +export interface ThreadCreateAndRunPollParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + + /** + * Override the default system message of the assistant. This is useful for + * modifying the behavior on a per-run basis. + */ + instructions?: string | null; + + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: + | (string & {}) + | 'gpt-4o' + | 'gpt-4o-2024-05-13' + | 'gpt-4-turbo' + | 'gpt-4-turbo-2024-04-09' + | 'gpt-4-0125-preview' + | 'gpt-4-turbo-preview' + | 'gpt-4-1106-preview' + | 'gpt-4-vision-preview' + | 'gpt-4' + | 'gpt-4-0314' + | 'gpt-4-0613' + | 'gpt-4-32k' + | 'gpt-4-32k-0314' + | 'gpt-4-32k-0613' + | 'gpt-3.5-turbo' + | 'gpt-3.5-turbo-16k' + | 'gpt-3.5-turbo-0613' + | 'gpt-3.5-turbo-1106' + | 'gpt-3.5-turbo-0125' + | 'gpt-3.5-turbo-16k-0613' + | null; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: AssistantResponseFormatOption | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + + /** + * If no thread is provided, an empty thread will be created. + */ + thread?: ThreadCreateAndRunPollParams.Thread; + + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: AssistantToolChoiceOption | null; + + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: ThreadCreateAndRunPollParams.ToolResources | null; + + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array< + AssistantsAPI.CodeInterpreterTool | AssistantsAPI.FileSearchTool | AssistantsAPI.FunctionTool + > | null; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: ThreadCreateAndRunPollParams.TruncationStrategy | null; +} + +export namespace ThreadCreateAndRunPollParams { + /** + * If no thread is provided, an empty thread will be created. + */ + export interface Thread { + /** + * A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + * start the thread with. + */ + messages?: Array; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: Thread.ToolResources | null; + } + + export namespace Thread { + export interface Message { + /** + * The text contents of the message. + */ + content: string | Array; + + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + + export namespace Message { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this thread. There can be a maximum of 1 vector + * store attached to the thread. + */ + vector_stores?: Array; + } + + export namespace FileSearch { + export interface VectorStore { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } + } + + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + export interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} + +export interface ThreadCreateAndRunStreamParams { + /** + * The ID of the + * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + * execute this run. + */ + assistant_id: string; + + /** + * Override the default system message of the assistant. This is useful for + * modifying the behavior on a per-run basis. + */ + instructions?: string | null; + + /** + * The maximum number of completion tokens that may be used over the course of the + * run. The run will make a best effort to use only the number of completion tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * completion tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_completion_tokens?: number | null; + + /** + * The maximum number of prompt tokens that may be used over the course of the run. + * The run will make a best effort to use only the number of prompt tokens + * specified, across multiple turns of the run. If the run exceeds the number of + * prompt tokens specified, the run will end with status `incomplete`. See + * `incomplete_details` for more info. + */ + max_prompt_tokens?: number | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + * be used to execute this run. If a value is provided here, it will override the + * model associated with the assistant. If not, the model associated with the + * assistant will be used. + */ + model?: + | (string & {}) + | 'gpt-4o' + | 'gpt-4o-2024-05-13' + | 'gpt-4-turbo' + | 'gpt-4-turbo-2024-04-09' + | 'gpt-4-0125-preview' + | 'gpt-4-turbo-preview' + | 'gpt-4-1106-preview' + | 'gpt-4-vision-preview' + | 'gpt-4' + | 'gpt-4-0314' + | 'gpt-4-0613' + | 'gpt-4-32k' + | 'gpt-4-32k-0314' + | 'gpt-4-32k-0613' + | 'gpt-3.5-turbo' + | 'gpt-3.5-turbo-16k' + | 'gpt-3.5-turbo-0613' + | 'gpt-3.5-turbo-1106' + | 'gpt-3.5-turbo-0125' + | 'gpt-3.5-turbo-16k-0613' + | null; + + /** + * Specifies the format that the model must output. Compatible with + * [GPT-4o](https://platform.openai.com/docs/models/gpt-4o), + * [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4), + * and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: AssistantResponseFormatOption | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + */ + temperature?: number | null; + + /** + * If no thread is provided, an empty thread will be created. + */ + thread?: ThreadCreateAndRunStreamParams.Thread; + + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tools and instead generates a message. `auto` is the default value + * and means the model can pick between generating a message or calling one or more + * tools. `required` means the model must call one or more tools before responding + * to the user. Specifying a particular tool like `{"type": "file_search"}` or + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + */ + tool_choice?: AssistantToolChoiceOption | null; + + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + tool_resources?: ThreadCreateAndRunStreamParams.ToolResources | null; + + /** + * Override the tools the assistant can use for this run. This is useful for + * modifying the behavior on a per-run basis. + */ + tools?: Array< + AssistantsAPI.CodeInterpreterTool | AssistantsAPI.FileSearchTool | AssistantsAPI.FunctionTool + > | null; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or temperature but not both. + */ + top_p?: number | null; + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + truncation_strategy?: ThreadCreateAndRunStreamParams.TruncationStrategy | null; +} + +export namespace ThreadCreateAndRunStreamParams { + /** + * If no thread is provided, an empty thread will be created. + */ + export interface Thread { + /** + * A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + * start the thread with. + */ + messages?: Array; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + tool_resources?: Thread.ToolResources | null; + } + + export namespace Thread { + export interface Message { + /** + * The text contents of the message. + */ + content: string | Array; + + /** + * The role of the entity that is creating the message. Allowed values include: + * + * - `user`: Indicates the message is sent by an actual user and should be used in + * most cases to represent user-generated messages. + * - `assistant`: Indicates the message is generated by the assistant. Use this + * value to insert messages from the assistant into the conversation. + */ + role: 'user' | 'assistant'; + + /** + * A list of files attached to the message, and the tools they should be added to. + */ + attachments?: Array | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + } + + export namespace Message { + export interface Attachment { + /** + * The ID of the file to attach to the message. + */ + file_id?: string; + + /** + * The tools to add this file to. + */ + tools?: Array; + } + } + + /** + * A set of resources that are made available to the assistant's tools in this + * thread. The resources are specific to the type of tool. For example, the + * `code_interpreter` tool requires a list of file IDs, while the `file_search` + * tool requires a list of vector store IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this thread. There can be a maximum of 1 vector store attached to + * the thread. + */ + vector_store_ids?: Array; + + /** + * A helper to create a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * with file_ids and attach it to this thread. There can be a maximum of 1 vector + * store attached to the thread. + */ + vector_stores?: Array; + } + + export namespace FileSearch { + export interface VectorStore { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + * add to the vector store. There can be a maximum of 10000 files in a vector + * store. + */ + file_ids?: Array; + + /** + * Set of 16 key-value pairs that can be attached to a vector store. This can be + * useful for storing additional information about the vector store in a structured + * format. Keys can be a maximum of 64 characters long and values can be a maxium + * of 512 characters long. + */ + metadata?: unknown; + } + } + } + } + + /** + * A set of resources that are used by the assistant's tools. The resources are + * specific to the type of tool. For example, the `code_interpreter` tool requires + * a list of file IDs, while the `file_search` tool requires a list of vector store + * IDs. + */ + export interface ToolResources { + code_interpreter?: ToolResources.CodeInterpreter; + + file_search?: ToolResources.FileSearch; + } + + export namespace ToolResources { + export interface CodeInterpreter { + /** + * A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + * available to the `code_interpreter` tool. There can be a maximum of 20 files + * associated with the tool. + */ + file_ids?: Array; + } + + export interface FileSearch { + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * attached to this assistant. There can be a maximum of 1 vector store attached to + * the assistant. + */ + vector_store_ids?: Array; + } + } + + /** + * Controls for how a thread will be truncated prior to the run. Use this to + * control the intial context window of the run. + */ + export interface TruncationStrategy { + /** + * The truncation strategy to use for the thread. The default is `auto`. If set to + * `last_messages`, the thread will be truncated to the n most recent messages in + * the thread. When set to `auto`, messages in the middle of the thread will be + * dropped to fit the context length of the model, `max_prompt_tokens`. + */ + type: 'auto' | 'last_messages'; + + /** + * The number of most recent messages from the thread when constructing the context + * for the run. + */ + last_messages?: number | null; + } +} + +Threads.Runs = Runs; +Threads.RunsPage = RunsPage; +Threads.Messages = Messages; +Threads.MessagesPage = MessagesPage; + +export declare namespace Threads { + export { + type AssistantResponseFormatOption as AssistantResponseFormatOption, + type AssistantToolChoice as AssistantToolChoice, + type AssistantToolChoiceFunction as AssistantToolChoiceFunction, + type AssistantToolChoiceOption as AssistantToolChoiceOption, + type Thread as Thread, + type ThreadDeleted as ThreadDeleted, + type ThreadCreateParams as ThreadCreateParams, + type ThreadUpdateParams as ThreadUpdateParams, + type ThreadCreateAndRunParams as ThreadCreateAndRunParams, + type ThreadCreateAndRunParamsNonStreaming as ThreadCreateAndRunParamsNonStreaming, + type ThreadCreateAndRunParamsStreaming as ThreadCreateAndRunParamsStreaming, + type ThreadCreateAndRunPollParams, + type ThreadCreateAndRunStreamParams, + }; + + export { + Runs as Runs, + type RequiredActionFunctionToolCall as RequiredActionFunctionToolCall, + type Run as Run, + type RunStatus as RunStatus, + RunsPage as RunsPage, + type RunCreateParams as RunCreateParams, + type RunCreateParamsNonStreaming as RunCreateParamsNonStreaming, + type RunCreateParamsStreaming as RunCreateParamsStreaming, + type RunUpdateParams as RunUpdateParams, + type RunListParams as RunListParams, + type RunCreateAndPollParams, + type RunCreateAndStreamParams, + type RunStreamParams, + type RunSubmitToolOutputsParams as RunSubmitToolOutputsParams, + type RunSubmitToolOutputsParamsNonStreaming as RunSubmitToolOutputsParamsNonStreaming, + type RunSubmitToolOutputsParamsStreaming as RunSubmitToolOutputsParamsStreaming, + type RunSubmitToolOutputsAndPollParams, + type RunSubmitToolOutputsStreamParams, + }; + + export { + Messages as Messages, + type Annotation as Annotation, + type AnnotationDelta as AnnotationDelta, + type FileCitationAnnotation as FileCitationAnnotation, + type FileCitationDeltaAnnotation as FileCitationDeltaAnnotation, + type FilePathAnnotation as FilePathAnnotation, + type FilePathDeltaAnnotation as FilePathDeltaAnnotation, + type ImageFile as ImageFile, + type ImageFileContentBlock as ImageFileContentBlock, + type ImageFileDelta as ImageFileDelta, + type ImageFileDeltaBlock as ImageFileDeltaBlock, + type ImageURL as ImageURL, + type ImageURLContentBlock as ImageURLContentBlock, + type ImageURLDelta as ImageURLDelta, + type ImageURLDeltaBlock as ImageURLDeltaBlock, + type MessagesAPIMessage as Message, + type MessageContent as MessageContent, + type MessageContentDelta as MessageContentDelta, + type MessageContentPartParam as MessageContentPartParam, + type MessageDeleted as MessageDeleted, + type MessageDelta as MessageDelta, + type MessageDeltaEvent as MessageDeltaEvent, + type RefusalContentBlock as RefusalContentBlock, + type RefusalDeltaBlock as RefusalDeltaBlock, + type Text as Text, + type TextContentBlock as TextContentBlock, + type TextContentBlockParam as TextContentBlockParam, + type TextDelta as TextDelta, + type TextDeltaBlock as TextDeltaBlock, + MessagesPage as MessagesPage, + type MessageCreateParams as MessageCreateParams, + type MessageUpdateParams as MessageUpdateParams, + type MessageListParams as MessageListParams, + }; +} diff --git a/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts b/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts new file mode 100644 index 0000000..2c47cb9 --- /dev/null +++ b/node_modules/openai/src/resources/beta/vector-stores/file-batches.ts @@ -0,0 +1,304 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../resource'; +import { isRequestOptions } from '../../../core'; +import { sleep } from '../../../core'; +import { Uploadable } from '../../../core'; +import { allSettledWithThrow } from '../../../lib/Util'; +import * as Core from '../../../core'; +import * as FilesAPI from './files'; +import { VectorStoreFilesPage } from './files'; +import * as VectorStoresAPI from './vector-stores'; +import { type CursorPageParams } from '../../../pagination'; + +export class FileBatches extends APIResource { + /** + * Create a vector store file batch. + */ + create( + vectorStoreId: string, + body: FileBatchCreateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post(`/vector_stores/${vectorStoreId}/file_batches`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Retrieves a vector store file batch. + */ + retrieve( + vectorStoreId: string, + batchId: string, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.get(`/vector_stores/${vectorStoreId}/file_batches/${batchId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Cancel a vector store file batch. This attempts to cancel the processing of + * files in this batch as soon as possible. + */ + cancel( + vectorStoreId: string, + batchId: string, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/cancel`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Create a vector store batch and poll until all files have been processed. + */ + async createAndPoll( + vectorStoreId: string, + body: FileBatchCreateParams, + options?: Core.RequestOptions & { pollIntervalMs?: number }, + ): Promise { + const batch = await this.create(vectorStoreId, body); + return await this.poll(vectorStoreId, batch.id, options); + } + + /** + * Returns a list of vector store files in a batch. + */ + listFiles( + vectorStoreId: string, + batchId: string, + query?: FileBatchListFilesParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + listFiles( + vectorStoreId: string, + batchId: string, + options?: Core.RequestOptions, + ): Core.PagePromise; + listFiles( + vectorStoreId: string, + batchId: string, + query: FileBatchListFilesParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.listFiles(vectorStoreId, batchId, {}, query); + } + return this._client.getAPIList( + `/vector_stores/${vectorStoreId}/file_batches/${batchId}/files`, + VectorStoreFilesPage, + { query, ...options, headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers } }, + ); + } + + /** + * Wait for the given file batch to be processed. + * + * Note: this will return even if one of the files failed to process, you need to + * check batch.file_counts.failed_count to handle this case. + */ + async poll( + vectorStoreId: string, + batchId: string, + options?: Core.RequestOptions & { pollIntervalMs?: number }, + ): Promise { + const headers: { [key: string]: string } = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' }; + if (options?.pollIntervalMs) { + headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString(); + } + + while (true) { + const { data: batch, response } = await this.retrieve(vectorStoreId, batchId, { + ...options, + headers, + }).withResponse(); + + switch (batch.status) { + case 'in_progress': + let sleepInterval = 5000; + + if (options?.pollIntervalMs) { + sleepInterval = options.pollIntervalMs; + } else { + const headerInterval = response.headers.get('openai-poll-after-ms'); + if (headerInterval) { + const headerIntervalMs = parseInt(headerInterval); + if (!isNaN(headerIntervalMs)) { + sleepInterval = headerIntervalMs; + } + } + } + await sleep(sleepInterval); + break; + case 'failed': + case 'cancelled': + case 'completed': + return batch; + } + } + } + + /** + * Uploads the given files concurrently and then creates a vector store file batch. + * + * The concurrency limit is configurable using the `maxConcurrency` parameter. + */ + async uploadAndPoll( + vectorStoreId: string, + { files, fileIds = [] }: { files: Uploadable[]; fileIds?: string[] }, + options?: Core.RequestOptions & { pollIntervalMs?: number; maxConcurrency?: number }, + ): Promise { + if (files == null || files.length == 0) { + throw new Error( + `No \`files\` provided to process. If you've already uploaded files you should use \`.createAndPoll()\` instead`, + ); + } + + const configuredConcurrency = options?.maxConcurrency ?? 5; + + // We cap the number of workers at the number of files (so we don't start any unnecessary workers) + const concurrencyLimit = Math.min(configuredConcurrency, files.length); + + const client = this._client; + const fileIterator = files.values(); + const allFileIds: string[] = [...fileIds]; + + // This code is based on this design. The libraries don't accommodate our environment limits. + // https://stackoverflow.com/questions/40639432/what-is-the-best-way-to-limit-concurrency-when-using-es6s-promise-all + async function processFiles(iterator: IterableIterator) { + for (let item of iterator) { + const fileObj = await client.files.create({ file: item, purpose: 'assistants' }, options); + allFileIds.push(fileObj.id); + } + } + + // Start workers to process results + const workers = Array(concurrencyLimit).fill(fileIterator).map(processFiles); + + // Wait for all processing to complete. + await allSettledWithThrow(workers); + + return await this.createAndPoll(vectorStoreId, { + file_ids: allFileIds, + }); + } +} + +/** + * A batch of files attached to a vector store. + */ +export interface VectorStoreFileBatch { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + + /** + * The Unix timestamp (in seconds) for when the vector store files batch was + * created. + */ + created_at: number; + + file_counts: VectorStoreFileBatch.FileCounts; + + /** + * The object type, which is always `vector_store.file_batch`. + */ + object: 'vector_store.files_batch'; + + /** + * The status of the vector store files batch, which can be either `in_progress`, + * `completed`, `cancelled` or `failed`. + */ + status: 'in_progress' | 'completed' | 'cancelled' | 'failed'; + + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * that the [File](https://platform.openai.com/docs/api-reference/files) is + * attached to. + */ + vector_store_id: string; +} + +export namespace VectorStoreFileBatch { + export interface FileCounts { + /** + * The number of files that where cancelled. + */ + cancelled: number; + + /** + * The number of files that have been processed. + */ + completed: number; + + /** + * The number of files that have failed to process. + */ + failed: number; + + /** + * The number of files that are currently being processed. + */ + in_progress: number; + + /** + * The total number of files. + */ + total: number; + } +} + +export interface FileBatchCreateParams { + /** + * A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + * the vector store should use. Useful for tools like `file_search` that can access + * files. + */ + file_ids: Array; + + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; +} + +export interface FileBatchListFilesParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + + /** + * Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + */ + filter?: 'in_progress' | 'completed' | 'failed' | 'cancelled'; + + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} + +export declare namespace FileBatches { + export { + type VectorStoreFileBatch as VectorStoreFileBatch, + type FileBatchCreateParams as FileBatchCreateParams, + type FileBatchListFilesParams as FileBatchListFilesParams, + }; +} + +export { VectorStoreFilesPage }; diff --git a/node_modules/openai/src/resources/beta/vector-stores/files.ts b/node_modules/openai/src/resources/beta/vector-stores/files.ts new file mode 100644 index 0000000..1fda9a9 --- /dev/null +++ b/node_modules/openai/src/resources/beta/vector-stores/files.ts @@ -0,0 +1,298 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../resource'; +import { sleep, Uploadable, isRequestOptions } from '../../../core'; +import * as Core from '../../../core'; +import * as VectorStoresAPI from './vector-stores'; +import { CursorPage, type CursorPageParams } from '../../../pagination'; + +export class Files extends APIResource { + /** + * Create a vector store file by attaching a + * [File](https://platform.openai.com/docs/api-reference/files) to a + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). + */ + create( + vectorStoreId: string, + body: FileCreateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post(`/vector_stores/${vectorStoreId}/files`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Retrieves a vector store file. + */ + retrieve( + vectorStoreId: string, + fileId: string, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.get(`/vector_stores/${vectorStoreId}/files/${fileId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Returns a list of vector store files. + */ + list( + vectorStoreId: string, + query?: FileListParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + list( + vectorStoreId: string, + options?: Core.RequestOptions, + ): Core.PagePromise; + list( + vectorStoreId: string, + query: FileListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list(vectorStoreId, {}, query); + } + return this._client.getAPIList(`/vector_stores/${vectorStoreId}/files`, VectorStoreFilesPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Delete a vector store file. This will remove the file from the vector store but + * the file itself will not be deleted. To delete the file, use the + * [delete file](https://platform.openai.com/docs/api-reference/files/delete) + * endpoint. + */ + del( + vectorStoreId: string, + fileId: string, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.delete(`/vector_stores/${vectorStoreId}/files/${fileId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Attach a file to the given vector store and wait for it to be processed. + */ + async createAndPoll( + vectorStoreId: string, + body: FileCreateParams, + options?: Core.RequestOptions & { pollIntervalMs?: number }, + ): Promise { + const file = await this.create(vectorStoreId, body, options); + return await this.poll(vectorStoreId, file.id, options); + } + + /** + * Wait for the vector store file to finish processing. + * + * Note: this will return even if the file failed to process, you need to check + * file.last_error and file.status to handle these cases + */ + async poll( + vectorStoreId: string, + fileId: string, + options?: Core.RequestOptions & { pollIntervalMs?: number }, + ): Promise { + const headers: { [key: string]: string } = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' }; + if (options?.pollIntervalMs) { + headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString(); + } + while (true) { + const fileResponse = await this.retrieve(vectorStoreId, fileId, { + ...options, + headers, + }).withResponse(); + + const file = fileResponse.data; + + switch (file.status) { + case 'in_progress': + let sleepInterval = 5000; + + if (options?.pollIntervalMs) { + sleepInterval = options.pollIntervalMs; + } else { + const headerInterval = fileResponse.response.headers.get('openai-poll-after-ms'); + if (headerInterval) { + const headerIntervalMs = parseInt(headerInterval); + if (!isNaN(headerIntervalMs)) { + sleepInterval = headerIntervalMs; + } + } + } + await sleep(sleepInterval); + break; + case 'failed': + case 'completed': + return file; + } + } + } + + /** + * Upload a file to the `files` API and then attach it to the given vector store. + * + * Note the file will be asynchronously processed (you can use the alternative + * polling helper method to wait for processing to complete). + */ + async upload( + vectorStoreId: string, + file: Uploadable, + options?: Core.RequestOptions, + ): Promise { + const fileInfo = await this._client.files.create({ file: file, purpose: 'assistants' }, options); + return this.create(vectorStoreId, { file_id: fileInfo.id }, options); + } + + /** + * Add a file to a vector store and poll until processing is complete. + */ + async uploadAndPoll( + vectorStoreId: string, + file: Uploadable, + options?: Core.RequestOptions & { pollIntervalMs?: number }, + ): Promise { + const fileInfo = await this.upload(vectorStoreId, file, options); + return await this.poll(vectorStoreId, fileInfo.id, options); + } +} + +export class VectorStoreFilesPage extends CursorPage {} + +/** + * A list of files attached to a vector store. + */ +export interface VectorStoreFile { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + + /** + * The Unix timestamp (in seconds) for when the vector store file was created. + */ + created_at: number; + + /** + * The last error associated with this vector store file. Will be `null` if there + * are no errors. + */ + last_error: VectorStoreFile.LastError | null; + + /** + * The object type, which is always `vector_store.file`. + */ + object: 'vector_store.file'; + + /** + * The status of the vector store file, which can be either `in_progress`, + * `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + * vector store file is ready for use. + */ + status: 'in_progress' | 'completed' | 'cancelled' | 'failed'; + + /** + * The total vector store usage in bytes. Note that this may be different from the + * original file size. + */ + usage_bytes: number; + + /** + * The ID of the + * [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + * that the [File](https://platform.openai.com/docs/api-reference/files) is + * attached to. + */ + vector_store_id: string; + + /** + * The strategy used to chunk the file. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategy; +} + +export namespace VectorStoreFile { + /** + * The last error associated with this vector store file. Will be `null` if there + * are no errors. + */ + export interface LastError { + /** + * One of `server_error` or `rate_limit_exceeded`. + */ + code: 'server_error' | 'unsupported_file' | 'invalid_file'; + + /** + * A human-readable description of the error. + */ + message: string; + } +} + +export interface VectorStoreFileDeleted { + id: string; + + deleted: boolean; + + object: 'vector_store.file.deleted'; +} + +export interface FileCreateParams { + /** + * A [File](https://platform.openai.com/docs/api-reference/files) ID that the + * vector store should use. Useful for tools like `file_search` that can access + * files. + */ + file_id: string; + + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: VectorStoresAPI.FileChunkingStrategyParam; +} + +export interface FileListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + + /** + * Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + */ + filter?: 'in_progress' | 'completed' | 'failed' | 'cancelled'; + + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} + +Files.VectorStoreFilesPage = VectorStoreFilesPage; + +export declare namespace Files { + export { + type VectorStoreFile as VectorStoreFile, + type VectorStoreFileDeleted as VectorStoreFileDeleted, + VectorStoreFilesPage as VectorStoreFilesPage, + type FileCreateParams as FileCreateParams, + type FileListParams as FileListParams, + }; +} diff --git a/node_modules/openai/src/resources/beta/vector-stores/index.ts b/node_modules/openai/src/resources/beta/vector-stores/index.ts new file mode 100644 index 0000000..89fc0cd --- /dev/null +++ b/node_modules/openai/src/resources/beta/vector-stores/index.ts @@ -0,0 +1,32 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { + FileBatches, + type VectorStoreFileBatch, + type FileBatchCreateParams, + type FileBatchListFilesParams, +} from './file-batches'; +export { + VectorStoreFilesPage, + Files, + type VectorStoreFile, + type VectorStoreFileDeleted, + type FileCreateParams, + type FileListParams, +} from './files'; +export { + VectorStoresPage, + VectorStores, + type AutoFileChunkingStrategyParam, + type FileChunkingStrategy, + type FileChunkingStrategyParam, + type OtherFileChunkingStrategyObject, + type StaticFileChunkingStrategy, + type StaticFileChunkingStrategyObject, + type StaticFileChunkingStrategyParam, + type VectorStore, + type VectorStoreDeleted, + type VectorStoreCreateParams, + type VectorStoreUpdateParams, + type VectorStoreListParams, +} from './vector-stores'; diff --git a/node_modules/openai/src/resources/beta/vector-stores/vector-stores.ts b/node_modules/openai/src/resources/beta/vector-stores/vector-stores.ts new file mode 100644 index 0000000..35ad8c3 --- /dev/null +++ b/node_modules/openai/src/resources/beta/vector-stores/vector-stores.ts @@ -0,0 +1,424 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../resource'; +import { isRequestOptions } from '../../../core'; +import * as Core from '../../../core'; +import * as FileBatchesAPI from './file-batches'; +import { + FileBatchCreateParams, + FileBatchListFilesParams, + FileBatches, + VectorStoreFileBatch, +} from './file-batches'; +import * as FilesAPI from './files'; +import { + FileCreateParams, + FileListParams, + Files, + VectorStoreFile, + VectorStoreFileDeleted, + VectorStoreFilesPage, +} from './files'; +import { CursorPage, type CursorPageParams } from '../../../pagination'; + +export class VectorStores extends APIResource { + files: FilesAPI.Files = new FilesAPI.Files(this._client); + fileBatches: FileBatchesAPI.FileBatches = new FileBatchesAPI.FileBatches(this._client); + + /** + * Create a vector store. + */ + create(body: VectorStoreCreateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/vector_stores', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Retrieves a vector store. + */ + retrieve(vectorStoreId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/vector_stores/${vectorStoreId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Modifies a vector store. + */ + update( + vectorStoreId: string, + body: VectorStoreUpdateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post(`/vector_stores/${vectorStoreId}`, { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Returns a list of vector stores. + */ + list( + query?: VectorStoreListParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + list( + query: VectorStoreListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/vector_stores', VectorStoresPage, { + query, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } + + /** + * Delete a vector store. + */ + del(vectorStoreId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.delete(`/vector_stores/${vectorStoreId}`, { + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} + +export class VectorStoresPage extends CursorPage {} + +/** + * The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + * `800` and `chunk_overlap_tokens` of `400`. + */ +export interface AutoFileChunkingStrategyParam { + /** + * Always `auto`. + */ + type: 'auto'; +} + +/** + * The strategy used to chunk the file. + */ +export type FileChunkingStrategy = StaticFileChunkingStrategyObject | OtherFileChunkingStrategyObject; + +/** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ +export type FileChunkingStrategyParam = AutoFileChunkingStrategyParam | StaticFileChunkingStrategyParam; + +/** + * This is returned when the chunking strategy is unknown. Typically, this is + * because the file was indexed before the `chunking_strategy` concept was + * introduced in the API. + */ +export interface OtherFileChunkingStrategyObject { + /** + * Always `other`. + */ + type: 'other'; +} + +export interface StaticFileChunkingStrategy { + /** + * The number of tokens that overlap between chunks. The default value is `400`. + * + * Note that the overlap must not exceed half of `max_chunk_size_tokens`. + */ + chunk_overlap_tokens: number; + + /** + * The maximum number of tokens in each chunk. The default value is `800`. The + * minimum value is `100` and the maximum value is `4096`. + */ + max_chunk_size_tokens: number; +} + +export interface StaticFileChunkingStrategyObject { + static: StaticFileChunkingStrategy; + + /** + * Always `static`. + */ + type: 'static'; +} + +export interface StaticFileChunkingStrategyParam { + static: StaticFileChunkingStrategy; + + /** + * Always `static`. + */ + type: 'static'; +} + +/** + * A vector store is a collection of processed files can be used by the + * `file_search` tool. + */ +export interface VectorStore { + /** + * The identifier, which can be referenced in API endpoints. + */ + id: string; + + /** + * The Unix timestamp (in seconds) for when the vector store was created. + */ + created_at: number; + + file_counts: VectorStore.FileCounts; + + /** + * The Unix timestamp (in seconds) for when the vector store was last active. + */ + last_active_at: number | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata: unknown | null; + + /** + * The name of the vector store. + */ + name: string; + + /** + * The object type, which is always `vector_store`. + */ + object: 'vector_store'; + + /** + * The status of the vector store, which can be either `expired`, `in_progress`, or + * `completed`. A status of `completed` indicates that the vector store is ready + * for use. + */ + status: 'expired' | 'in_progress' | 'completed'; + + /** + * The total number of bytes used by the files in the vector store. + */ + usage_bytes: number; + + /** + * The expiration policy for a vector store. + */ + expires_after?: VectorStore.ExpiresAfter; + + /** + * The Unix timestamp (in seconds) for when the vector store will expire. + */ + expires_at?: number | null; +} + +export namespace VectorStore { + export interface FileCounts { + /** + * The number of files that were cancelled. + */ + cancelled: number; + + /** + * The number of files that have been successfully processed. + */ + completed: number; + + /** + * The number of files that have failed to process. + */ + failed: number; + + /** + * The number of files that are currently being processed. + */ + in_progress: number; + + /** + * The total number of files. + */ + total: number; + } + + /** + * The expiration policy for a vector store. + */ + export interface ExpiresAfter { + /** + * Anchor timestamp after which the expiration policy applies. Supported anchors: + * `last_active_at`. + */ + anchor: 'last_active_at'; + + /** + * The number of days after the anchor time that the vector store will expire. + */ + days: number; + } +} + +export interface VectorStoreDeleted { + id: string; + + deleted: boolean; + + object: 'vector_store.deleted'; +} + +export interface VectorStoreCreateParams { + /** + * The chunking strategy used to chunk the file(s). If not set, will use the `auto` + * strategy. Only applicable if `file_ids` is non-empty. + */ + chunking_strategy?: FileChunkingStrategyParam; + + /** + * The expiration policy for a vector store. + */ + expires_after?: VectorStoreCreateParams.ExpiresAfter; + + /** + * A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + * the vector store should use. Useful for tools like `file_search` that can access + * files. + */ + file_ids?: Array; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The name of the vector store. + */ + name?: string; +} + +export namespace VectorStoreCreateParams { + /** + * The expiration policy for a vector store. + */ + export interface ExpiresAfter { + /** + * Anchor timestamp after which the expiration policy applies. Supported anchors: + * `last_active_at`. + */ + anchor: 'last_active_at'; + + /** + * The number of days after the anchor time that the vector store will expire. + */ + days: number; + } +} + +export interface VectorStoreUpdateParams { + /** + * The expiration policy for a vector store. + */ + expires_after?: VectorStoreUpdateParams.ExpiresAfter | null; + + /** + * Set of 16 key-value pairs that can be attached to an object. This can be useful + * for storing additional information about the object in a structured format. Keys + * can be a maximum of 64 characters long and values can be a maxium of 512 + * characters long. + */ + metadata?: unknown | null; + + /** + * The name of the vector store. + */ + name?: string | null; +} + +export namespace VectorStoreUpdateParams { + /** + * The expiration policy for a vector store. + */ + export interface ExpiresAfter { + /** + * Anchor timestamp after which the expiration policy applies. Supported anchors: + * `last_active_at`. + */ + anchor: 'last_active_at'; + + /** + * The number of days after the anchor time that the vector store will expire. + */ + days: number; + } +} + +export interface VectorStoreListParams extends CursorPageParams { + /** + * A cursor for use in pagination. `before` is an object ID that defines your place + * in the list. For instance, if you make a list request and receive 100 objects, + * starting with obj_foo, your subsequent call can include before=obj_foo in order + * to fetch the previous page of the list. + */ + before?: string; + + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; +} + +VectorStores.VectorStoresPage = VectorStoresPage; +VectorStores.Files = Files; +VectorStores.VectorStoreFilesPage = VectorStoreFilesPage; +VectorStores.FileBatches = FileBatches; + +export declare namespace VectorStores { + export { + type AutoFileChunkingStrategyParam as AutoFileChunkingStrategyParam, + type FileChunkingStrategy as FileChunkingStrategy, + type FileChunkingStrategyParam as FileChunkingStrategyParam, + type OtherFileChunkingStrategyObject as OtherFileChunkingStrategyObject, + type StaticFileChunkingStrategy as StaticFileChunkingStrategy, + type StaticFileChunkingStrategyObject as StaticFileChunkingStrategyObject, + type StaticFileChunkingStrategyParam as StaticFileChunkingStrategyParam, + type VectorStore as VectorStore, + type VectorStoreDeleted as VectorStoreDeleted, + VectorStoresPage as VectorStoresPage, + type VectorStoreCreateParams as VectorStoreCreateParams, + type VectorStoreUpdateParams as VectorStoreUpdateParams, + type VectorStoreListParams as VectorStoreListParams, + }; + + export { + Files as Files, + type VectorStoreFile as VectorStoreFile, + type VectorStoreFileDeleted as VectorStoreFileDeleted, + VectorStoreFilesPage as VectorStoreFilesPage, + type FileCreateParams as FileCreateParams, + type FileListParams as FileListParams, + }; + + export { + FileBatches as FileBatches, + type VectorStoreFileBatch as VectorStoreFileBatch, + type FileBatchCreateParams as FileBatchCreateParams, + type FileBatchListFilesParams as FileBatchListFilesParams, + }; +} diff --git a/node_modules/openai/src/resources/chat/chat.ts b/node_modules/openai/src/resources/chat/chat.ts new file mode 100644 index 0000000..2230b19 --- /dev/null +++ b/node_modules/openai/src/resources/chat/chat.ts @@ -0,0 +1,130 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import * as CompletionsAPI from './completions'; +import { + ChatCompletion, + ChatCompletionAssistantMessageParam, + ChatCompletionAudio, + ChatCompletionAudioParam, + ChatCompletionChunk, + ChatCompletionContentPart, + ChatCompletionContentPartImage, + ChatCompletionContentPartInputAudio, + ChatCompletionContentPartRefusal, + ChatCompletionContentPartText, + ChatCompletionCreateParams, + ChatCompletionCreateParamsNonStreaming, + ChatCompletionCreateParamsStreaming, + ChatCompletionDeveloperMessageParam, + ChatCompletionFunctionCallOption, + ChatCompletionFunctionMessageParam, + ChatCompletionMessage, + ChatCompletionMessageParam, + ChatCompletionMessageToolCall, + ChatCompletionModality, + ChatCompletionNamedToolChoice, + ChatCompletionPredictionContent, + ChatCompletionReasoningEffort, + ChatCompletionRole, + ChatCompletionStreamOptions, + ChatCompletionSystemMessageParam, + ChatCompletionTokenLogprob, + ChatCompletionTool, + ChatCompletionToolChoiceOption, + ChatCompletionToolMessageParam, + ChatCompletionUserMessageParam, + CompletionCreateParams, + CompletionCreateParamsNonStreaming, + CompletionCreateParamsStreaming, + Completions, + CreateChatCompletionRequestMessage, +} from './completions'; + +export class Chat extends APIResource { + completions: CompletionsAPI.Completions = new CompletionsAPI.Completions(this._client); +} + +export type ChatModel = + | 'o1' + | 'o1-2024-12-17' + | 'o1-preview' + | 'o1-preview-2024-09-12' + | 'o1-mini' + | 'o1-mini-2024-09-12' + | 'gpt-4o' + | 'gpt-4o-2024-11-20' + | 'gpt-4o-2024-08-06' + | 'gpt-4o-2024-05-13' + | 'gpt-4o-audio-preview' + | 'gpt-4o-audio-preview-2024-10-01' + | 'gpt-4o-audio-preview-2024-12-17' + | 'gpt-4o-mini-audio-preview' + | 'gpt-4o-mini-audio-preview-2024-12-17' + | 'chatgpt-4o-latest' + | 'gpt-4o-mini' + | 'gpt-4o-mini-2024-07-18' + | 'gpt-4-turbo' + | 'gpt-4-turbo-2024-04-09' + | 'gpt-4-0125-preview' + | 'gpt-4-turbo-preview' + | 'gpt-4-1106-preview' + | 'gpt-4-vision-preview' + | 'gpt-4' + | 'gpt-4-0314' + | 'gpt-4-0613' + | 'gpt-4-32k' + | 'gpt-4-32k-0314' + | 'gpt-4-32k-0613' + | 'gpt-3.5-turbo' + | 'gpt-3.5-turbo-16k' + | 'gpt-3.5-turbo-0301' + | 'gpt-3.5-turbo-0613' + | 'gpt-3.5-turbo-1106' + | 'gpt-3.5-turbo-0125' + | 'gpt-3.5-turbo-16k-0613'; + +Chat.Completions = Completions; + +export declare namespace Chat { + export { type ChatModel as ChatModel }; + + export { + Completions as Completions, + type ChatCompletion as ChatCompletion, + type ChatCompletionAssistantMessageParam as ChatCompletionAssistantMessageParam, + type ChatCompletionAudio as ChatCompletionAudio, + type ChatCompletionAudioParam as ChatCompletionAudioParam, + type ChatCompletionChunk as ChatCompletionChunk, + type ChatCompletionContentPart as ChatCompletionContentPart, + type ChatCompletionContentPartImage as ChatCompletionContentPartImage, + type ChatCompletionContentPartInputAudio as ChatCompletionContentPartInputAudio, + type ChatCompletionContentPartRefusal as ChatCompletionContentPartRefusal, + type ChatCompletionContentPartText as ChatCompletionContentPartText, + type ChatCompletionDeveloperMessageParam as ChatCompletionDeveloperMessageParam, + type ChatCompletionFunctionCallOption as ChatCompletionFunctionCallOption, + type ChatCompletionFunctionMessageParam as ChatCompletionFunctionMessageParam, + type ChatCompletionMessage as ChatCompletionMessage, + type ChatCompletionMessageParam as ChatCompletionMessageParam, + type ChatCompletionMessageToolCall as ChatCompletionMessageToolCall, + type ChatCompletionModality as ChatCompletionModality, + type ChatCompletionNamedToolChoice as ChatCompletionNamedToolChoice, + type ChatCompletionPredictionContent as ChatCompletionPredictionContent, + type ChatCompletionReasoningEffort as ChatCompletionReasoningEffort, + type ChatCompletionRole as ChatCompletionRole, + type ChatCompletionStreamOptions as ChatCompletionStreamOptions, + type ChatCompletionSystemMessageParam as ChatCompletionSystemMessageParam, + type ChatCompletionTokenLogprob as ChatCompletionTokenLogprob, + type ChatCompletionTool as ChatCompletionTool, + type ChatCompletionToolChoiceOption as ChatCompletionToolChoiceOption, + type ChatCompletionToolMessageParam as ChatCompletionToolMessageParam, + type ChatCompletionUserMessageParam as ChatCompletionUserMessageParam, + type CreateChatCompletionRequestMessage as CreateChatCompletionRequestMessage, + type ChatCompletionCreateParams as ChatCompletionCreateParams, + type CompletionCreateParams as CompletionCreateParams, + type ChatCompletionCreateParamsNonStreaming as ChatCompletionCreateParamsNonStreaming, + type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming, + type ChatCompletionCreateParamsStreaming as ChatCompletionCreateParamsStreaming, + type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming, + }; +} diff --git a/node_modules/openai/src/resources/chat/completions.ts b/node_modules/openai/src/resources/chat/completions.ts new file mode 100644 index 0000000..31f5814 --- /dev/null +++ b/node_modules/openai/src/resources/chat/completions.ts @@ -0,0 +1,1314 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import { APIPromise } from '../../core'; +import * as Core from '../../core'; +import * as ChatCompletionsAPI from './completions'; +import * as CompletionsAPI from '../completions'; +import * as Shared from '../shared'; +import * as ChatAPI from './chat'; +import { Stream } from '../../streaming'; + +export class Completions extends APIResource { + /** + * Creates a model response for the given chat conversation. Learn more in the + * [text generation](https://platform.openai.com/docs/guides/text-generation), + * [vision](https://platform.openai.com/docs/guides/vision), and + * [audio](https://platform.openai.com/docs/guides/audio) guides. + * + * Parameter support can differ depending on the model used to generate the + * response, particularly for newer reasoning models. Parameters that are only + * supported for reasoning models are noted below. For the current state of + * unsupported parameters in reasoning models, + * [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). + */ + create( + body: ChatCompletionCreateParamsNonStreaming, + options?: Core.RequestOptions, + ): APIPromise; + create( + body: ChatCompletionCreateParamsStreaming, + options?: Core.RequestOptions, + ): APIPromise>; + create( + body: ChatCompletionCreateParamsBase, + options?: Core.RequestOptions, + ): APIPromise | ChatCompletion>; + create( + body: ChatCompletionCreateParams, + options?: Core.RequestOptions, + ): APIPromise | APIPromise> { + return this._client.post('/chat/completions', { body, ...options, stream: body.stream ?? false }) as + | APIPromise + | APIPromise>; + } +} + +/** + * Represents a chat completion response returned by model, based on the provided + * input. + */ +export interface ChatCompletion { + /** + * A unique identifier for the chat completion. + */ + id: string; + + /** + * A list of chat completion choices. Can be more than one if `n` is greater + * than 1. + */ + choices: Array; + + /** + * The Unix timestamp (in seconds) of when the chat completion was created. + */ + created: number; + + /** + * The model used for the chat completion. + */ + model: string; + + /** + * The object type, which is always `chat.completion`. + */ + object: 'chat.completion'; + + /** + * The service tier used for processing the request. This field is only included if + * the `service_tier` parameter is specified in the request. + */ + service_tier?: 'scale' | 'default' | null; + + /** + * This fingerprint represents the backend configuration that the model runs with. + * + * Can be used in conjunction with the `seed` request parameter to understand when + * backend changes have been made that might impact determinism. + */ + system_fingerprint?: string; + + /** + * Usage statistics for the completion request. + */ + usage?: CompletionsAPI.CompletionUsage; +} + +export namespace ChatCompletion { + export interface Choice { + /** + * The reason the model stopped generating tokens. This will be `stop` if the model + * hit a natural stop point or a provided stop sequence, `length` if the maximum + * number of tokens specified in the request was reached, `content_filter` if + * content was omitted due to a flag from our content filters, `tool_calls` if the + * model called a tool, or `function_call` (deprecated) if the model called a + * function. + */ + finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call'; + + /** + * The index of the choice in the list of choices. + */ + index: number; + + /** + * Log probability information for the choice. + */ + logprobs: Choice.Logprobs | null; + + /** + * A chat completion message generated by the model. + */ + message: ChatCompletionsAPI.ChatCompletionMessage; + } + + export namespace Choice { + /** + * Log probability information for the choice. + */ + export interface Logprobs { + /** + * A list of message content tokens with log probability information. + */ + content: Array | null; + + /** + * A list of message refusal tokens with log probability information. + */ + refusal: Array | null; + } + } +} + +/** + * Messages sent by the model in response to user messages. + */ +export interface ChatCompletionAssistantMessageParam { + /** + * The role of the messages author, in this case `assistant`. + */ + role: 'assistant'; + + /** + * Data about a previous audio response from the model. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ + audio?: ChatCompletionAssistantMessageParam.Audio | null; + + /** + * The contents of the assistant message. Required unless `tool_calls` or + * `function_call` is specified. + */ + content?: string | Array | null; + + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + function_call?: ChatCompletionAssistantMessageParam.FunctionCall | null; + + /** + * An optional name for the participant. Provides the model information to + * differentiate between participants of the same role. + */ + name?: string; + + /** + * The refusal message by the assistant. + */ + refusal?: string | null; + + /** + * The tool calls generated by the model, such as function calls. + */ + tool_calls?: Array; +} + +export namespace ChatCompletionAssistantMessageParam { + /** + * Data about a previous audio response from the model. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ + export interface Audio { + /** + * Unique identifier for a previous audio response from the model. + */ + id: string; + } + + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + export interface FunctionCall { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments: string; + + /** + * The name of the function to call. + */ + name: string; + } +} + +/** + * If the audio output modality is requested, this object contains data about the + * audio response from the model. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ +export interface ChatCompletionAudio { + /** + * Unique identifier for this audio response. + */ + id: string; + + /** + * Base64 encoded audio bytes generated by the model, in the format specified in + * the request. + */ + data: string; + + /** + * The Unix timestamp (in seconds) for when this audio response will no longer be + * accessible on the server for use in multi-turn conversations. + */ + expires_at: number; + + /** + * Transcript of the audio generated by the model. + */ + transcript: string; +} + +/** + * Parameters for audio output. Required when audio output is requested with + * `modalities: ["audio"]`. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ +export interface ChatCompletionAudioParam { + /** + * Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, + * or `pcm16`. + */ + format: 'wav' | 'mp3' | 'flac' | 'opus' | 'pcm16'; + + /** + * The voice the model uses to respond. Supported voices are `ash`, `ballad`, + * `coral`, `sage`, and `verse` (also supported but not recommended are `alloy`, + * `echo`, and `shimmer`; these voices are less expressive). + */ + voice: 'alloy' | 'ash' | 'ballad' | 'coral' | 'echo' | 'sage' | 'shimmer' | 'verse'; +} + +/** + * Represents a streamed chunk of a chat completion response returned by model, + * based on the provided input. + */ +export interface ChatCompletionChunk { + /** + * A unique identifier for the chat completion. Each chunk has the same ID. + */ + id: string; + + /** + * A list of chat completion choices. Can contain more than one elements if `n` is + * greater than 1. Can also be empty for the last chunk if you set + * `stream_options: {"include_usage": true}`. + */ + choices: Array; + + /** + * The Unix timestamp (in seconds) of when the chat completion was created. Each + * chunk has the same timestamp. + */ + created: number; + + /** + * The model to generate the completion. + */ + model: string; + + /** + * The object type, which is always `chat.completion.chunk`. + */ + object: 'chat.completion.chunk'; + + /** + * The service tier used for processing the request. This field is only included if + * the `service_tier` parameter is specified in the request. + */ + service_tier?: 'scale' | 'default' | null; + + /** + * This fingerprint represents the backend configuration that the model runs with. + * Can be used in conjunction with the `seed` request parameter to understand when + * backend changes have been made that might impact determinism. + */ + system_fingerprint?: string; + + /** + * An optional field that will only be present when you set + * `stream_options: {"include_usage": true}` in your request. When present, it + * contains a null value except for the last chunk which contains the token usage + * statistics for the entire request. + */ + usage?: CompletionsAPI.CompletionUsage | null; +} + +export namespace ChatCompletionChunk { + export interface Choice { + /** + * A chat completion delta generated by streamed model responses. + */ + delta: Choice.Delta; + + /** + * The reason the model stopped generating tokens. This will be `stop` if the model + * hit a natural stop point or a provided stop sequence, `length` if the maximum + * number of tokens specified in the request was reached, `content_filter` if + * content was omitted due to a flag from our content filters, `tool_calls` if the + * model called a tool, or `function_call` (deprecated) if the model called a + * function. + */ + finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call' | null; + + /** + * The index of the choice in the list of choices. + */ + index: number; + + /** + * Log probability information for the choice. + */ + logprobs?: Choice.Logprobs | null; + } + + export namespace Choice { + /** + * A chat completion delta generated by streamed model responses. + */ + export interface Delta { + /** + * The contents of the chunk message. + */ + content?: string | null; + + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + function_call?: Delta.FunctionCall; + + /** + * The refusal message generated by the model. + */ + refusal?: string | null; + + /** + * The role of the author of this message. + */ + role?: 'system' | 'user' | 'assistant' | 'tool'; + + tool_calls?: Array; + } + + export namespace Delta { + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + export interface FunctionCall { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments?: string; + + /** + * The name of the function to call. + */ + name?: string; + } + + export interface ToolCall { + index: number; + + /** + * The ID of the tool call. + */ + id?: string; + + function?: ToolCall.Function; + + /** + * The type of the tool. Currently, only `function` is supported. + */ + type?: 'function'; + } + + export namespace ToolCall { + export interface Function { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments?: string; + + /** + * The name of the function to call. + */ + name?: string; + } + } + } + + /** + * Log probability information for the choice. + */ + export interface Logprobs { + /** + * A list of message content tokens with log probability information. + */ + content: Array | null; + + /** + * A list of message refusal tokens with log probability information. + */ + refusal: Array | null; + } + } +} + +/** + * Learn about + * [text inputs](https://platform.openai.com/docs/guides/text-generation). + */ +export type ChatCompletionContentPart = + | ChatCompletionContentPartText + | ChatCompletionContentPartImage + | ChatCompletionContentPartInputAudio; + +/** + * Learn about [image inputs](https://platform.openai.com/docs/guides/vision). + */ +export interface ChatCompletionContentPartImage { + image_url: ChatCompletionContentPartImage.ImageURL; + + /** + * The type of the content part. + */ + type: 'image_url'; +} + +export namespace ChatCompletionContentPartImage { + export interface ImageURL { + /** + * Either a URL of the image or the base64 encoded image data. + */ + url: string; + + /** + * Specifies the detail level of the image. Learn more in the + * [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). + */ + detail?: 'auto' | 'low' | 'high'; + } +} + +/** + * Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). + */ +export interface ChatCompletionContentPartInputAudio { + input_audio: ChatCompletionContentPartInputAudio.InputAudio; + + /** + * The type of the content part. Always `input_audio`. + */ + type: 'input_audio'; +} + +export namespace ChatCompletionContentPartInputAudio { + export interface InputAudio { + /** + * Base64 encoded audio data. + */ + data: string; + + /** + * The format of the encoded audio data. Currently supports "wav" and "mp3". + */ + format: 'wav' | 'mp3'; + } +} + +export interface ChatCompletionContentPartRefusal { + /** + * The refusal message generated by the model. + */ + refusal: string; + + /** + * The type of the content part. + */ + type: 'refusal'; +} + +/** + * Learn about + * [text inputs](https://platform.openai.com/docs/guides/text-generation). + */ +export interface ChatCompletionContentPartText { + /** + * The text content. + */ + text: string; + + /** + * The type of the content part. + */ + type: 'text'; +} + +/** + * Developer-provided instructions that the model should follow, regardless of + * messages sent by the user. With o1 models and newer, `developer` messages + * replace the previous `system` messages. + */ +export interface ChatCompletionDeveloperMessageParam { + /** + * The contents of the developer message. + */ + content: string | Array; + + /** + * The role of the messages author, in this case `developer`. + */ + role: 'developer'; + + /** + * An optional name for the participant. Provides the model information to + * differentiate between participants of the same role. + */ + name?: string; +} + +/** + * Specifying a particular function via `{"name": "my_function"}` forces the model + * to call that function. + */ +export interface ChatCompletionFunctionCallOption { + /** + * The name of the function to call. + */ + name: string; +} + +/** + * @deprecated + */ +export interface ChatCompletionFunctionMessageParam { + /** + * The contents of the function message. + */ + content: string | null; + + /** + * The name of the function to call. + */ + name: string; + + /** + * The role of the messages author, in this case `function`. + */ + role: 'function'; +} + +/** + * A chat completion message generated by the model. + */ +export interface ChatCompletionMessage { + /** + * The contents of the message. + */ + content: string | null; + + /** + * The refusal message generated by the model. + */ + refusal: string | null; + + /** + * The role of the author of this message. + */ + role: 'assistant'; + + /** + * If the audio output modality is requested, this object contains data about the + * audio response from the model. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ + audio?: ChatCompletionAudio | null; + + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + function_call?: ChatCompletionMessage.FunctionCall | null; + + /** + * The tool calls generated by the model, such as function calls. + */ + tool_calls?: Array; +} + +export namespace ChatCompletionMessage { + /** + * @deprecated: Deprecated and replaced by `tool_calls`. The name and arguments of + * a function that should be called, as generated by the model. + */ + export interface FunctionCall { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments: string; + + /** + * The name of the function to call. + */ + name: string; + } +} + +/** + * Developer-provided instructions that the model should follow, regardless of + * messages sent by the user. With o1 models and newer, `developer` messages + * replace the previous `system` messages. + */ +export type ChatCompletionMessageParam = + | ChatCompletionDeveloperMessageParam + | ChatCompletionSystemMessageParam + | ChatCompletionUserMessageParam + | ChatCompletionAssistantMessageParam + | ChatCompletionToolMessageParam + | ChatCompletionFunctionMessageParam; + +export interface ChatCompletionMessageToolCall { + /** + * The ID of the tool call. + */ + id: string; + + /** + * The function that the model called. + */ + function: ChatCompletionMessageToolCall.Function; + + /** + * The type of the tool. Currently, only `function` is supported. + */ + type: 'function'; +} + +export namespace ChatCompletionMessageToolCall { + /** + * The function that the model called. + */ + export interface Function { + /** + * The arguments to call the function with, as generated by the model in JSON + * format. Note that the model does not always generate valid JSON, and may + * hallucinate parameters not defined by your function schema. Validate the + * arguments in your code before calling your function. + */ + arguments: string; + + /** + * The name of the function to call. + */ + name: string; + } +} + +export type ChatCompletionModality = 'text' | 'audio'; + +/** + * Specifies a tool the model should use. Use to force the model to call a specific + * function. + */ +export interface ChatCompletionNamedToolChoice { + function: ChatCompletionNamedToolChoice.Function; + + /** + * The type of the tool. Currently, only `function` is supported. + */ + type: 'function'; +} + +export namespace ChatCompletionNamedToolChoice { + export interface Function { + /** + * The name of the function to call. + */ + name: string; + } +} + +/** + * Static predicted output content, such as the content of a text file that is + * being regenerated. + */ +export interface ChatCompletionPredictionContent { + /** + * The content that should be matched when generating a model response. If + * generated tokens would match this content, the entire model response can be + * returned much more quickly. + */ + content: string | Array; + + /** + * The type of the predicted content you want to provide. This type is currently + * always `content`. + */ + type: 'content'; +} + +/** + * **o1 models only** + * + * Constrains effort on reasoning for + * [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + * supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + * result in faster responses and fewer tokens used on reasoning in a response. + */ +export type ChatCompletionReasoningEffort = 'low' | 'medium' | 'high'; + +/** + * The role of the author of a message + */ +export type ChatCompletionRole = 'system' | 'user' | 'assistant' | 'tool' | 'function'; + +/** + * Options for streaming response. Only set this when you set `stream: true`. + */ +export interface ChatCompletionStreamOptions { + /** + * If set, an additional chunk will be streamed before the `data: [DONE]` message. + * The `usage` field on this chunk shows the token usage statistics for the entire + * request, and the `choices` field will always be an empty array. All other chunks + * will also include a `usage` field, but with a null value. + */ + include_usage?: boolean; +} + +/** + * Developer-provided instructions that the model should follow, regardless of + * messages sent by the user. With o1 models and newer, use `developer` messages + * for this purpose instead. + */ +export interface ChatCompletionSystemMessageParam { + /** + * The contents of the system message. + */ + content: string | Array; + + /** + * The role of the messages author, in this case `system`. + */ + role: 'system'; + + /** + * An optional name for the participant. Provides the model information to + * differentiate between participants of the same role. + */ + name?: string; +} + +export interface ChatCompletionTokenLogprob { + /** + * The token. + */ + token: string; + + /** + * A list of integers representing the UTF-8 bytes representation of the token. + * Useful in instances where characters are represented by multiple tokens and + * their byte representations must be combined to generate the correct text + * representation. Can be `null` if there is no bytes representation for the token. + */ + bytes: Array | null; + + /** + * The log probability of this token, if it is within the top 20 most likely + * tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + * unlikely. + */ + logprob: number; + + /** + * List of the most likely tokens and their log probability, at this token + * position. In rare cases, there may be fewer than the number of requested + * `top_logprobs` returned. + */ + top_logprobs: Array; +} + +export namespace ChatCompletionTokenLogprob { + export interface TopLogprob { + /** + * The token. + */ + token: string; + + /** + * A list of integers representing the UTF-8 bytes representation of the token. + * Useful in instances where characters are represented by multiple tokens and + * their byte representations must be combined to generate the correct text + * representation. Can be `null` if there is no bytes representation for the token. + */ + bytes: Array | null; + + /** + * The log probability of this token, if it is within the top 20 most likely + * tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + * unlikely. + */ + logprob: number; + } +} + +export interface ChatCompletionTool { + function: Shared.FunctionDefinition; + + /** + * The type of the tool. Currently, only `function` is supported. + */ + type: 'function'; +} + +/** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tool and instead generates a message. `auto` means the model can + * pick between generating a message or calling one or more tools. `required` means + * the model must call one or more tools. Specifying a particular tool via + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + * + * `none` is the default when no tools are present. `auto` is the default if tools + * are present. + */ +export type ChatCompletionToolChoiceOption = 'none' | 'auto' | 'required' | ChatCompletionNamedToolChoice; + +export interface ChatCompletionToolMessageParam { + /** + * The contents of the tool message. + */ + content: string | Array; + + /** + * The role of the messages author, in this case `tool`. + */ + role: 'tool'; + + /** + * Tool call that this message is responding to. + */ + tool_call_id: string; +} + +/** + * Messages sent by an end user, containing prompts or additional context + * information. + */ +export interface ChatCompletionUserMessageParam { + /** + * The contents of the user message. + */ + content: string | Array; + + /** + * The role of the messages author, in this case `user`. + */ + role: 'user'; + + /** + * An optional name for the participant. Provides the model information to + * differentiate between participants of the same role. + */ + name?: string; +} + +/** + * @deprecated ChatCompletionMessageParam should be used instead + */ +export type CreateChatCompletionRequestMessage = ChatCompletionMessageParam; + +export type ChatCompletionCreateParams = + | ChatCompletionCreateParamsNonStreaming + | ChatCompletionCreateParamsStreaming; + +export interface ChatCompletionCreateParamsBase { + /** + * A list of messages comprising the conversation so far. Depending on the + * [model](https://platform.openai.com/docs/models) you use, different message + * types (modalities) are supported, like + * [text](https://platform.openai.com/docs/guides/text-generation), + * [images](https://platform.openai.com/docs/guides/vision), and + * [audio](https://platform.openai.com/docs/guides/audio). + */ + messages: Array; + + /** + * ID of the model to use. See the + * [model endpoint compatibility](https://platform.openai.com/docs/models#model-endpoint-compatibility) + * table for details on which models work with the Chat API. + */ + model: (string & {}) | ChatAPI.ChatModel; + + /** + * Parameters for audio output. Required when audio output is requested with + * `modalities: ["audio"]`. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ + audio?: ChatCompletionAudioParam | null; + + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on their + * existing frequency in the text so far, decreasing the model's likelihood to + * repeat the same line verbatim. + */ + frequency_penalty?: number | null; + + /** + * Deprecated in favor of `tool_choice`. + * + * Controls which (if any) function is called by the model. + * + * `none` means the model will not call a function and instead generates a message. + * + * `auto` means the model can pick between generating a message or calling a + * function. + * + * Specifying a particular function via `{"name": "my_function"}` forces the model + * to call that function. + * + * `none` is the default when no functions are present. `auto` is the default if + * functions are present. + */ + function_call?: 'none' | 'auto' | ChatCompletionFunctionCallOption; + + /** + * Deprecated in favor of `tools`. + * + * A list of functions the model may generate JSON inputs for. + */ + functions?: Array; + + /** + * Modify the likelihood of specified tokens appearing in the completion. + * + * Accepts a JSON object that maps tokens (specified by their token ID in the + * tokenizer) to an associated bias value from -100 to 100. Mathematically, the + * bias is added to the logits generated by the model prior to sampling. The exact + * effect will vary per model, but values between -1 and 1 should decrease or + * increase likelihood of selection; values like -100 or 100 should result in a ban + * or exclusive selection of the relevant token. + */ + logit_bias?: Record | null; + + /** + * Whether to return log probabilities of the output tokens or not. If true, + * returns the log probabilities of each output token returned in the `content` of + * `message`. + */ + logprobs?: boolean | null; + + /** + * An upper bound for the number of tokens that can be generated for a completion, + * including visible output tokens and + * [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + */ + max_completion_tokens?: number | null; + + /** + * The maximum number of [tokens](/tokenizer) that can be generated in the chat + * completion. This value can be used to control + * [costs](https://openai.com/api/pricing/) for text generated via API. + * + * This value is now deprecated in favor of `max_completion_tokens`, and is not + * compatible with + * [o1 series models](https://platform.openai.com/docs/guides/reasoning). + */ + max_tokens?: number | null; + + /** + * Developer-defined tags and values used for filtering completions in the + * [dashboard](https://platform.openai.com/chat-completions). + */ + metadata?: Record | null; + + /** + * Output types that you would like the model to generate for this request. Most + * models are capable of generating text, which is the default: + * + * `["text"]` + * + * The `gpt-4o-audio-preview` model can also be used to + * [generate audio](https://platform.openai.com/docs/guides/audio). To request that + * this model generate both text and audio responses, you can use: + * + * `["text", "audio"]` + */ + modalities?: Array | null; + + /** + * How many chat completion choices to generate for each input message. Note that + * you will be charged based on the number of generated tokens across all of the + * choices. Keep `n` as `1` to minimize costs. + */ + n?: number | null; + + /** + * Whether to enable + * [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + * during tool use. + */ + parallel_tool_calls?: boolean; + + /** + * Static predicted output content, such as the content of a text file that is + * being regenerated. + */ + prediction?: ChatCompletionPredictionContent | null; + + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on + * whether they appear in the text so far, increasing the model's likelihood to + * talk about new topics. + */ + presence_penalty?: number | null; + + /** + * **o1 models only** + * + * Constrains effort on reasoning for + * [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + * supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + * result in faster responses and fewer tokens used on reasoning in a response. + */ + reasoning_effort?: ChatCompletionReasoningEffort; + + /** + * An object specifying the format that the model must output. + * + * Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + * Outputs which ensures the model will match your supplied JSON schema. Learn more + * in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + * message the model generates is valid JSON. + * + * **Important:** when using JSON mode, you **must** also instruct the model to + * produce JSON yourself via a system or user message. Without this, the model may + * generate an unending stream of whitespace until the generation reaches the token + * limit, resulting in a long-running and seemingly "stuck" request. Also note that + * the message content may be partially cut off if `finish_reason="length"`, which + * indicates the generation exceeded `max_tokens` or the conversation exceeded the + * max context length. + */ + response_format?: + | Shared.ResponseFormatText + | Shared.ResponseFormatJSONObject + | Shared.ResponseFormatJSONSchema; + + /** + * This feature is in Beta. If specified, our system will make a best effort to + * sample deterministically, such that repeated requests with the same `seed` and + * parameters should return the same result. Determinism is not guaranteed, and you + * should refer to the `system_fingerprint` response parameter to monitor changes + * in the backend. + */ + seed?: number | null; + + /** + * Specifies the latency tier to use for processing the request. This parameter is + * relevant for customers subscribed to the scale tier service: + * + * - If set to 'auto', and the Project is Scale tier enabled, the system will + * utilize scale tier credits until they are exhausted. + * - If set to 'auto', and the Project is not Scale tier enabled, the request will + * be processed using the default service tier with a lower uptime SLA and no + * latency guarentee. + * - If set to 'default', the request will be processed using the default service + * tier with a lower uptime SLA and no latency guarentee. + * - When not set, the default behavior is 'auto'. + * + * When this parameter is set, the response body will include the `service_tier` + * utilized. + */ + service_tier?: 'auto' | 'default' | null; + + /** + * Up to 4 sequences where the API will stop generating further tokens. + */ + stop?: string | null | Array; + + /** + * Whether or not to store the output of this chat completion request for use in + * our [model distillation](https://platform.openai.com/docs/guides/distillation) + * or [evals](https://platform.openai.com/docs/guides/evals) products. + */ + store?: boolean | null; + + /** + * If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + * sent as data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream?: boolean | null; + + /** + * Options for streaming response. Only set this when you set `stream: true`. + */ + stream_options?: ChatCompletionStreamOptions | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. We generally recommend altering this or `top_p` but + * not both. + */ + temperature?: number | null; + + /** + * Controls which (if any) tool is called by the model. `none` means the model will + * not call any tool and instead generates a message. `auto` means the model can + * pick between generating a message or calling one or more tools. `required` means + * the model must call one or more tools. Specifying a particular tool via + * `{"type": "function", "function": {"name": "my_function"}}` forces the model to + * call that tool. + * + * `none` is the default when no tools are present. `auto` is the default if tools + * are present. + */ + tool_choice?: ChatCompletionToolChoiceOption; + + /** + * A list of tools the model may call. Currently, only functions are supported as a + * tool. Use this to provide a list of functions the model may generate JSON inputs + * for. A max of 128 functions are supported. + */ + tools?: Array; + + /** + * An integer between 0 and 20 specifying the number of most likely tokens to + * return at each token position, each with an associated log probability. + * `logprobs` must be set to `true` if this parameter is used. + */ + top_logprobs?: number | null; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or `temperature` but not both. + */ + top_p?: number | null; + + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} + +export namespace ChatCompletionCreateParams { + /** + * @deprecated + */ + export interface Function { + /** + * The name of the function to be called. Must be a-z, A-Z, 0-9, or contain + * underscores and dashes, with a maximum length of 64. + */ + name: string; + + /** + * A description of what the function does, used by the model to choose when and + * how to call the function. + */ + description?: string; + + /** + * The parameters the functions accepts, described as a JSON Schema object. See the + * [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + * and the + * [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + * documentation about the format. + * + * Omitting `parameters` defines a function with an empty parameter list. + */ + parameters?: Shared.FunctionParameters; + } + + export type ChatCompletionCreateParamsNonStreaming = + ChatCompletionsAPI.ChatCompletionCreateParamsNonStreaming; + export type ChatCompletionCreateParamsStreaming = ChatCompletionsAPI.ChatCompletionCreateParamsStreaming; +} + +/** + * @deprecated Use ChatCompletionCreateParams instead + */ +export type CompletionCreateParams = ChatCompletionCreateParams; + +export interface ChatCompletionCreateParamsNonStreaming extends ChatCompletionCreateParamsBase { + /** + * If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + * sent as data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream?: false | null; +} + +/** + * @deprecated Use ChatCompletionCreateParamsNonStreaming instead + */ +export type CompletionCreateParamsNonStreaming = ChatCompletionCreateParamsNonStreaming; + +export interface ChatCompletionCreateParamsStreaming extends ChatCompletionCreateParamsBase { + /** + * If set, partial message deltas will be sent, like in ChatGPT. Tokens will be + * sent as data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream: true; +} + +/** + * @deprecated Use ChatCompletionCreateParamsStreaming instead + */ +export type CompletionCreateParamsStreaming = ChatCompletionCreateParamsStreaming; + +export declare namespace Completions { + export { + type ChatCompletion as ChatCompletion, + type ChatCompletionAssistantMessageParam as ChatCompletionAssistantMessageParam, + type ChatCompletionAudio as ChatCompletionAudio, + type ChatCompletionAudioParam as ChatCompletionAudioParam, + type ChatCompletionChunk as ChatCompletionChunk, + type ChatCompletionContentPart as ChatCompletionContentPart, + type ChatCompletionContentPartImage as ChatCompletionContentPartImage, + type ChatCompletionContentPartInputAudio as ChatCompletionContentPartInputAudio, + type ChatCompletionContentPartRefusal as ChatCompletionContentPartRefusal, + type ChatCompletionContentPartText as ChatCompletionContentPartText, + type ChatCompletionDeveloperMessageParam as ChatCompletionDeveloperMessageParam, + type ChatCompletionFunctionCallOption as ChatCompletionFunctionCallOption, + type ChatCompletionFunctionMessageParam as ChatCompletionFunctionMessageParam, + type ChatCompletionMessage as ChatCompletionMessage, + type ChatCompletionMessageParam as ChatCompletionMessageParam, + type ChatCompletionMessageToolCall as ChatCompletionMessageToolCall, + type ChatCompletionModality as ChatCompletionModality, + type ChatCompletionNamedToolChoice as ChatCompletionNamedToolChoice, + type ChatCompletionPredictionContent as ChatCompletionPredictionContent, + type ChatCompletionReasoningEffort as ChatCompletionReasoningEffort, + type ChatCompletionRole as ChatCompletionRole, + type ChatCompletionStreamOptions as ChatCompletionStreamOptions, + type ChatCompletionSystemMessageParam as ChatCompletionSystemMessageParam, + type ChatCompletionTokenLogprob as ChatCompletionTokenLogprob, + type ChatCompletionTool as ChatCompletionTool, + type ChatCompletionToolChoiceOption as ChatCompletionToolChoiceOption, + type ChatCompletionToolMessageParam as ChatCompletionToolMessageParam, + type ChatCompletionUserMessageParam as ChatCompletionUserMessageParam, + type CreateChatCompletionRequestMessage as CreateChatCompletionRequestMessage, + type ChatCompletionCreateParams as ChatCompletionCreateParams, + type CompletionCreateParams as CompletionCreateParams, + type ChatCompletionCreateParamsNonStreaming as ChatCompletionCreateParamsNonStreaming, + type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming, + type ChatCompletionCreateParamsStreaming as ChatCompletionCreateParamsStreaming, + type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming, + }; +} diff --git a/node_modules/openai/src/resources/chat/index.ts b/node_modules/openai/src/resources/chat/index.ts new file mode 100644 index 0000000..c3be194 --- /dev/null +++ b/node_modules/openai/src/resources/chat/index.ts @@ -0,0 +1,41 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { Chat, type ChatModel } from './chat'; +export { + Completions, + type ChatCompletion, + type ChatCompletionAssistantMessageParam, + type ChatCompletionAudio, + type ChatCompletionAudioParam, + type ChatCompletionChunk, + type ChatCompletionContentPart, + type ChatCompletionContentPartImage, + type ChatCompletionContentPartInputAudio, + type ChatCompletionContentPartRefusal, + type ChatCompletionContentPartText, + type ChatCompletionDeveloperMessageParam, + type ChatCompletionFunctionCallOption, + type ChatCompletionFunctionMessageParam, + type ChatCompletionMessage, + type ChatCompletionMessageParam, + type ChatCompletionMessageToolCall, + type ChatCompletionModality, + type ChatCompletionNamedToolChoice, + type ChatCompletionPredictionContent, + type ChatCompletionReasoningEffort, + type ChatCompletionRole, + type ChatCompletionStreamOptions, + type ChatCompletionSystemMessageParam, + type ChatCompletionTokenLogprob, + type ChatCompletionTool, + type ChatCompletionToolChoiceOption, + type ChatCompletionToolMessageParam, + type ChatCompletionUserMessageParam, + type CreateChatCompletionRequestMessage, + type ChatCompletionCreateParams, + type CompletionCreateParams, + type ChatCompletionCreateParamsNonStreaming, + type CompletionCreateParamsNonStreaming, + type ChatCompletionCreateParamsStreaming, + type CompletionCreateParamsStreaming, +} from './completions'; diff --git a/node_modules/openai/src/resources/completions.ts b/node_modules/openai/src/resources/completions.ts new file mode 100644 index 0000000..be75a46 --- /dev/null +++ b/node_modules/openai/src/resources/completions.ts @@ -0,0 +1,387 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../resource'; +import { APIPromise } from '../core'; +import * as Core from '../core'; +import * as CompletionsAPI from './completions'; +import * as ChatCompletionsAPI from './chat/completions'; +import { Stream } from '../streaming'; + +export class Completions extends APIResource { + /** + * Creates a completion for the provided prompt and parameters. + */ + create(body: CompletionCreateParamsNonStreaming, options?: Core.RequestOptions): APIPromise; + create( + body: CompletionCreateParamsStreaming, + options?: Core.RequestOptions, + ): APIPromise>; + create( + body: CompletionCreateParamsBase, + options?: Core.RequestOptions, + ): APIPromise | Completion>; + create( + body: CompletionCreateParams, + options?: Core.RequestOptions, + ): APIPromise | APIPromise> { + return this._client.post('/completions', { body, ...options, stream: body.stream ?? false }) as + | APIPromise + | APIPromise>; + } +} + +/** + * Represents a completion response from the API. Note: both the streamed and + * non-streamed response objects share the same shape (unlike the chat endpoint). + */ +export interface Completion { + /** + * A unique identifier for the completion. + */ + id: string; + + /** + * The list of completion choices the model generated for the input prompt. + */ + choices: Array; + + /** + * The Unix timestamp (in seconds) of when the completion was created. + */ + created: number; + + /** + * The model used for completion. + */ + model: string; + + /** + * The object type, which is always "text_completion" + */ + object: 'text_completion'; + + /** + * This fingerprint represents the backend configuration that the model runs with. + * + * Can be used in conjunction with the `seed` request parameter to understand when + * backend changes have been made that might impact determinism. + */ + system_fingerprint?: string; + + /** + * Usage statistics for the completion request. + */ + usage?: CompletionUsage; +} + +export interface CompletionChoice { + /** + * The reason the model stopped generating tokens. This will be `stop` if the model + * hit a natural stop point or a provided stop sequence, `length` if the maximum + * number of tokens specified in the request was reached, or `content_filter` if + * content was omitted due to a flag from our content filters. + */ + finish_reason: 'stop' | 'length' | 'content_filter'; + + index: number; + + logprobs: CompletionChoice.Logprobs | null; + + text: string; +} + +export namespace CompletionChoice { + export interface Logprobs { + text_offset?: Array; + + token_logprobs?: Array; + + tokens?: Array; + + top_logprobs?: Array>; + } +} + +/** + * Usage statistics for the completion request. + */ +export interface CompletionUsage { + /** + * Number of tokens in the generated completion. + */ + completion_tokens: number; + + /** + * Number of tokens in the prompt. + */ + prompt_tokens: number; + + /** + * Total number of tokens used in the request (prompt + completion). + */ + total_tokens: number; + + /** + * Breakdown of tokens used in a completion. + */ + completion_tokens_details?: CompletionUsage.CompletionTokensDetails; + + /** + * Breakdown of tokens used in the prompt. + */ + prompt_tokens_details?: CompletionUsage.PromptTokensDetails; +} + +export namespace CompletionUsage { + /** + * Breakdown of tokens used in a completion. + */ + export interface CompletionTokensDetails { + /** + * When using Predicted Outputs, the number of tokens in the prediction that + * appeared in the completion. + */ + accepted_prediction_tokens?: number; + + /** + * Audio input tokens generated by the model. + */ + audio_tokens?: number; + + /** + * Tokens generated by the model for reasoning. + */ + reasoning_tokens?: number; + + /** + * When using Predicted Outputs, the number of tokens in the prediction that did + * not appear in the completion. However, like reasoning tokens, these tokens are + * still counted in the total completion tokens for purposes of billing, output, + * and context window limits. + */ + rejected_prediction_tokens?: number; + } + + /** + * Breakdown of tokens used in the prompt. + */ + export interface PromptTokensDetails { + /** + * Audio input tokens present in the prompt. + */ + audio_tokens?: number; + + /** + * Cached tokens present in the prompt. + */ + cached_tokens?: number; + } +} + +export type CompletionCreateParams = CompletionCreateParamsNonStreaming | CompletionCreateParamsStreaming; + +export interface CompletionCreateParamsBase { + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model: (string & {}) | 'gpt-3.5-turbo-instruct' | 'davinci-002' | 'babbage-002'; + + /** + * The prompt(s) to generate completions for, encoded as a string, array of + * strings, array of tokens, or array of token arrays. + * + * Note that <|endoftext|> is the document separator that the model sees during + * training, so if a prompt is not specified the model will generate as if from the + * beginning of a new document. + */ + prompt: string | Array | Array | Array> | null; + + /** + * Generates `best_of` completions server-side and returns the "best" (the one with + * the highest log probability per token). Results cannot be streamed. + * + * When used with `n`, `best_of` controls the number of candidate completions and + * `n` specifies how many to return – `best_of` must be greater than `n`. + * + * **Note:** Because this parameter generates many completions, it can quickly + * consume your token quota. Use carefully and ensure that you have reasonable + * settings for `max_tokens` and `stop`. + */ + best_of?: number | null; + + /** + * Echo back the prompt in addition to the completion + */ + echo?: boolean | null; + + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on their + * existing frequency in the text so far, decreasing the model's likelihood to + * repeat the same line verbatim. + * + * [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + */ + frequency_penalty?: number | null; + + /** + * Modify the likelihood of specified tokens appearing in the completion. + * + * Accepts a JSON object that maps tokens (specified by their token ID in the GPT + * tokenizer) to an associated bias value from -100 to 100. You can use this + * [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + * Mathematically, the bias is added to the logits generated by the model prior to + * sampling. The exact effect will vary per model, but values between -1 and 1 + * should decrease or increase likelihood of selection; values like -100 or 100 + * should result in a ban or exclusive selection of the relevant token. + * + * As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + * from being generated. + */ + logit_bias?: Record | null; + + /** + * Include the log probabilities on the `logprobs` most likely output tokens, as + * well the chosen tokens. For example, if `logprobs` is 5, the API will return a + * list of the 5 most likely tokens. The API will always return the `logprob` of + * the sampled token, so there may be up to `logprobs+1` elements in the response. + * + * The maximum value for `logprobs` is 5. + */ + logprobs?: number | null; + + /** + * The maximum number of [tokens](/tokenizer) that can be generated in the + * completion. + * + * The token count of your prompt plus `max_tokens` cannot exceed the model's + * context length. + * [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + * for counting tokens. + */ + max_tokens?: number | null; + + /** + * How many completions to generate for each prompt. + * + * **Note:** Because this parameter generates many completions, it can quickly + * consume your token quota. Use carefully and ensure that you have reasonable + * settings for `max_tokens` and `stop`. + */ + n?: number | null; + + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on + * whether they appear in the text so far, increasing the model's likelihood to + * talk about new topics. + * + * [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + */ + presence_penalty?: number | null; + + /** + * If specified, our system will make a best effort to sample deterministically, + * such that repeated requests with the same `seed` and parameters should return + * the same result. + * + * Determinism is not guaranteed, and you should refer to the `system_fingerprint` + * response parameter to monitor changes in the backend. + */ + seed?: number | null; + + /** + * Up to 4 sequences where the API will stop generating further tokens. The + * returned text will not contain the stop sequence. + */ + stop?: string | null | Array; + + /** + * Whether to stream back partial progress. If set, tokens will be sent as + * data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream?: boolean | null; + + /** + * Options for streaming response. Only set this when you set `stream: true`. + */ + stream_options?: ChatCompletionsAPI.ChatCompletionStreamOptions | null; + + /** + * The suffix that comes after a completion of inserted text. + * + * This parameter is only supported for `gpt-3.5-turbo-instruct`. + */ + suffix?: string | null; + + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + * make the output more random, while lower values like 0.2 will make it more + * focused and deterministic. + * + * We generally recommend altering this or `top_p` but not both. + */ + temperature?: number | null; + + /** + * An alternative to sampling with temperature, called nucleus sampling, where the + * model considers the results of the tokens with top_p probability mass. So 0.1 + * means only the tokens comprising the top 10% probability mass are considered. + * + * We generally recommend altering this or `temperature` but not both. + */ + top_p?: number | null; + + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} + +export namespace CompletionCreateParams { + export type CompletionCreateParamsNonStreaming = CompletionsAPI.CompletionCreateParamsNonStreaming; + export type CompletionCreateParamsStreaming = CompletionsAPI.CompletionCreateParamsStreaming; +} + +export interface CompletionCreateParamsNonStreaming extends CompletionCreateParamsBase { + /** + * Whether to stream back partial progress. If set, tokens will be sent as + * data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream?: false | null; +} + +export interface CompletionCreateParamsStreaming extends CompletionCreateParamsBase { + /** + * Whether to stream back partial progress. If set, tokens will be sent as + * data-only + * [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) + * as they become available, with the stream terminated by a `data: [DONE]` + * message. + * [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions). + */ + stream: true; +} + +export declare namespace Completions { + export { + type Completion as Completion, + type CompletionChoice as CompletionChoice, + type CompletionUsage as CompletionUsage, + type CompletionCreateParams as CompletionCreateParams, + type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming, + type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming, + }; +} diff --git a/node_modules/openai/src/resources/embeddings.ts b/node_modules/openai/src/resources/embeddings.ts new file mode 100644 index 0000000..4b1644a --- /dev/null +++ b/node_modules/openai/src/resources/embeddings.ts @@ -0,0 +1,129 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../resource'; +import * as Core from '../core'; + +export class Embeddings extends APIResource { + /** + * Creates an embedding vector representing the input text. + */ + create( + body: EmbeddingCreateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post('/embeddings', { body, ...options }); + } +} + +export interface CreateEmbeddingResponse { + /** + * The list of embeddings generated by the model. + */ + data: Array; + + /** + * The name of the model used to generate the embedding. + */ + model: string; + + /** + * The object type, which is always "list". + */ + object: 'list'; + + /** + * The usage information for the request. + */ + usage: CreateEmbeddingResponse.Usage; +} + +export namespace CreateEmbeddingResponse { + /** + * The usage information for the request. + */ + export interface Usage { + /** + * The number of tokens used by the prompt. + */ + prompt_tokens: number; + + /** + * The total number of tokens used by the request. + */ + total_tokens: number; + } +} + +/** + * Represents an embedding vector returned by embedding endpoint. + */ +export interface Embedding { + /** + * The embedding vector, which is a list of floats. The length of vector depends on + * the model as listed in the + * [embedding guide](https://platform.openai.com/docs/guides/embeddings). + */ + embedding: Array; + + /** + * The index of the embedding in the list of embeddings. + */ + index: number; + + /** + * The object type, which is always "embedding". + */ + object: 'embedding'; +} + +export type EmbeddingModel = 'text-embedding-ada-002' | 'text-embedding-3-small' | 'text-embedding-3-large'; + +export interface EmbeddingCreateParams { + /** + * Input text to embed, encoded as a string or array of tokens. To embed multiple + * inputs in a single request, pass an array of strings or array of token arrays. + * The input must not exceed the max input tokens for the model (8192 tokens for + * `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + * dimensions or less. + * [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + * for counting tokens. + */ + input: string | Array | Array | Array>; + + /** + * ID of the model to use. You can use the + * [List models](https://platform.openai.com/docs/api-reference/models/list) API to + * see all of your available models, or see our + * [Model overview](https://platform.openai.com/docs/models) for descriptions of + * them. + */ + model: (string & {}) | EmbeddingModel; + + /** + * The number of dimensions the resulting output embeddings should have. Only + * supported in `text-embedding-3` and later models. + */ + dimensions?: number; + + /** + * The format to return the embeddings in. Can be either `float` or + * [`base64`](https://pypi.org/project/pybase64/). + */ + encoding_format?: 'float' | 'base64'; + + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} + +export declare namespace Embeddings { + export { + type CreateEmbeddingResponse as CreateEmbeddingResponse, + type Embedding as Embedding, + type EmbeddingModel as EmbeddingModel, + type EmbeddingCreateParams as EmbeddingCreateParams, + }; +} diff --git a/node_modules/openai/src/resources/files.ts b/node_modules/openai/src/resources/files.ts new file mode 100644 index 0000000..42a7bdf --- /dev/null +++ b/node_modules/openai/src/resources/files.ts @@ -0,0 +1,238 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../resource'; +import { isRequestOptions } from '../core'; +import { sleep } from '../core'; +import { APIConnectionTimeoutError } from '../error'; +import * as Core from '../core'; +import { CursorPage, type CursorPageParams } from '../pagination'; +import { type Response } from '../_shims/index'; + +export class Files extends APIResource { + /** + * Upload a file that can be used across various endpoints. Individual files can be + * up to 512 MB, and the size of all files uploaded by one organization can be up + * to 100 GB. + * + * The Assistants API supports files up to 2 million tokens and of specific file + * types. See the + * [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for + * details. + * + * The Fine-tuning API only supports `.jsonl` files. The input also has certain + * required formats for fine-tuning + * [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or + * [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + * models. + * + * The Batch API only supports `.jsonl` files up to 200 MB in size. The input also + * has a specific required + * [format](https://platform.openai.com/docs/api-reference/batch/request-input). + * + * Please [contact us](https://help.openai.com/) if you need to increase these + * storage limits. + */ + create(body: FileCreateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/files', Core.multipartFormRequestOptions({ body, ...options })); + } + + /** + * Returns information about a specific file. + */ + retrieve(fileId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/files/${fileId}`, options); + } + + /** + * Returns a list of files. + */ + list(query?: FileListParams, options?: Core.RequestOptions): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + list( + query: FileListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/files', FileObjectsPage, { query, ...options }); + } + + /** + * Delete a file. + */ + del(fileId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.delete(`/files/${fileId}`, options); + } + + /** + * Returns the contents of the specified file. + */ + content(fileId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/files/${fileId}/content`, { ...options, __binaryResponse: true }); + } + + /** + * Returns the contents of the specified file. + * + * @deprecated The `.content()` method should be used instead + */ + retrieveContent(fileId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/files/${fileId}/content`, { + ...options, + headers: { Accept: 'application/json', ...options?.headers }, + }); + } + + /** + * Waits for the given file to be processed, default timeout is 30 mins. + */ + async waitForProcessing( + id: string, + { pollInterval = 5000, maxWait = 30 * 60 * 1000 }: { pollInterval?: number; maxWait?: number } = {}, + ): Promise { + const TERMINAL_STATES = new Set(['processed', 'error', 'deleted']); + + const start = Date.now(); + let file = await this.retrieve(id); + + while (!file.status || !TERMINAL_STATES.has(file.status)) { + await sleep(pollInterval); + + file = await this.retrieve(id); + if (Date.now() - start > maxWait) { + throw new APIConnectionTimeoutError({ + message: `Giving up on waiting for file ${id} to finish processing after ${maxWait} milliseconds.`, + }); + } + } + + return file; + } +} + +export class FileObjectsPage extends CursorPage {} + +export type FileContent = string; + +export interface FileDeleted { + id: string; + + deleted: boolean; + + object: 'file'; +} + +/** + * The `File` object represents a document that has been uploaded to OpenAI. + */ +export interface FileObject { + /** + * The file identifier, which can be referenced in the API endpoints. + */ + id: string; + + /** + * The size of the file, in bytes. + */ + bytes: number; + + /** + * The Unix timestamp (in seconds) for when the file was created. + */ + created_at: number; + + /** + * The name of the file. + */ + filename: string; + + /** + * The object type, which is always `file`. + */ + object: 'file'; + + /** + * The intended purpose of the file. Supported values are `assistants`, + * `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` + * and `vision`. + */ + purpose: + | 'assistants' + | 'assistants_output' + | 'batch' + | 'batch_output' + | 'fine-tune' + | 'fine-tune-results' + | 'vision'; + + /** + * @deprecated: Deprecated. The current status of the file, which can be either + * `uploaded`, `processed`, or `error`. + */ + status: 'uploaded' | 'processed' | 'error'; + + /** + * @deprecated: Deprecated. For details on why a fine-tuning training file failed + * validation, see the `error` field on `fine_tuning.job`. + */ + status_details?: string; +} + +/** + * The intended purpose of the uploaded file. + * + * Use "assistants" for + * [Assistants](https://platform.openai.com/docs/api-reference/assistants) and + * [Message](https://platform.openai.com/docs/api-reference/messages) files, + * "vision" for Assistants image file inputs, "batch" for + * [Batch API](https://platform.openai.com/docs/guides/batch), and "fine-tune" for + * [Fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning). + */ +export type FilePurpose = 'assistants' | 'batch' | 'fine-tune' | 'vision'; + +export interface FileCreateParams { + /** + * The File object (not file name) to be uploaded. + */ + file: Core.Uploadable; + + /** + * The intended purpose of the uploaded file. + * + * Use "assistants" for + * [Assistants](https://platform.openai.com/docs/api-reference/assistants) and + * [Message](https://platform.openai.com/docs/api-reference/messages) files, + * "vision" for Assistants image file inputs, "batch" for + * [Batch API](https://platform.openai.com/docs/guides/batch), and "fine-tune" for + * [Fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning). + */ + purpose: FilePurpose; +} + +export interface FileListParams extends CursorPageParams { + /** + * Sort order by the `created_at` timestamp of the objects. `asc` for ascending + * order and `desc` for descending order. + */ + order?: 'asc' | 'desc'; + + /** + * Only return files with the given purpose. + */ + purpose?: string; +} + +Files.FileObjectsPage = FileObjectsPage; + +export declare namespace Files { + export { + type FileContent as FileContent, + type FileDeleted as FileDeleted, + type FileObject as FileObject, + type FilePurpose as FilePurpose, + FileObjectsPage as FileObjectsPage, + type FileCreateParams as FileCreateParams, + type FileListParams as FileListParams, + }; +} diff --git a/node_modules/openai/src/resources/fine-tuning/fine-tuning.ts b/node_modules/openai/src/resources/fine-tuning/fine-tuning.ts new file mode 100644 index 0000000..df013c8 --- /dev/null +++ b/node_modules/openai/src/resources/fine-tuning/fine-tuning.ts @@ -0,0 +1,41 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import * as JobsAPI from './jobs/jobs'; +import { + FineTuningJob, + FineTuningJobEvent, + FineTuningJobEventsPage, + FineTuningJobIntegration, + FineTuningJobWandbIntegration, + FineTuningJobWandbIntegrationObject, + FineTuningJobsPage, + JobCreateParams, + JobListEventsParams, + JobListParams, + Jobs, +} from './jobs/jobs'; + +export class FineTuning extends APIResource { + jobs: JobsAPI.Jobs = new JobsAPI.Jobs(this._client); +} + +FineTuning.Jobs = Jobs; +FineTuning.FineTuningJobsPage = FineTuningJobsPage; +FineTuning.FineTuningJobEventsPage = FineTuningJobEventsPage; + +export declare namespace FineTuning { + export { + Jobs as Jobs, + type FineTuningJob as FineTuningJob, + type FineTuningJobEvent as FineTuningJobEvent, + type FineTuningJobIntegration as FineTuningJobIntegration, + type FineTuningJobWandbIntegration as FineTuningJobWandbIntegration, + type FineTuningJobWandbIntegrationObject as FineTuningJobWandbIntegrationObject, + FineTuningJobsPage as FineTuningJobsPage, + FineTuningJobEventsPage as FineTuningJobEventsPage, + type JobCreateParams as JobCreateParams, + type JobListParams as JobListParams, + type JobListEventsParams as JobListEventsParams, + }; +} diff --git a/node_modules/openai/src/resources/fine-tuning/index.ts b/node_modules/openai/src/resources/fine-tuning/index.ts new file mode 100644 index 0000000..4954406 --- /dev/null +++ b/node_modules/openai/src/resources/fine-tuning/index.ts @@ -0,0 +1,16 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { FineTuning } from './fine-tuning'; +export { + FineTuningJobsPage, + FineTuningJobEventsPage, + Jobs, + type FineTuningJob, + type FineTuningJobEvent, + type FineTuningJobIntegration, + type FineTuningJobWandbIntegration, + type FineTuningJobWandbIntegrationObject, + type JobCreateParams, + type JobListParams, + type JobListEventsParams, +} from './jobs/index'; diff --git a/node_modules/openai/src/resources/fine-tuning/jobs/checkpoints.ts b/node_modules/openai/src/resources/fine-tuning/jobs/checkpoints.ts new file mode 100644 index 0000000..b3018ac --- /dev/null +++ b/node_modules/openai/src/resources/fine-tuning/jobs/checkpoints.ts @@ -0,0 +1,111 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../resource'; +import { isRequestOptions } from '../../../core'; +import * as Core from '../../../core'; +import { CursorPage, type CursorPageParams } from '../../../pagination'; + +export class Checkpoints extends APIResource { + /** + * List checkpoints for a fine-tuning job. + */ + list( + fineTuningJobId: string, + query?: CheckpointListParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + list( + fineTuningJobId: string, + options?: Core.RequestOptions, + ): Core.PagePromise; + list( + fineTuningJobId: string, + query: CheckpointListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list(fineTuningJobId, {}, query); + } + return this._client.getAPIList( + `/fine_tuning/jobs/${fineTuningJobId}/checkpoints`, + FineTuningJobCheckpointsPage, + { query, ...options }, + ); + } +} + +export class FineTuningJobCheckpointsPage extends CursorPage {} + +/** + * The `fine_tuning.job.checkpoint` object represents a model checkpoint for a + * fine-tuning job that is ready to use. + */ +export interface FineTuningJobCheckpoint { + /** + * The checkpoint identifier, which can be referenced in the API endpoints. + */ + id: string; + + /** + * The Unix timestamp (in seconds) for when the checkpoint was created. + */ + created_at: number; + + /** + * The name of the fine-tuned checkpoint model that is created. + */ + fine_tuned_model_checkpoint: string; + + /** + * The name of the fine-tuning job that this checkpoint was created from. + */ + fine_tuning_job_id: string; + + /** + * Metrics at the step number during the fine-tuning job. + */ + metrics: FineTuningJobCheckpoint.Metrics; + + /** + * The object type, which is always "fine_tuning.job.checkpoint". + */ + object: 'fine_tuning.job.checkpoint'; + + /** + * The step number that the checkpoint was created at. + */ + step_number: number; +} + +export namespace FineTuningJobCheckpoint { + /** + * Metrics at the step number during the fine-tuning job. + */ + export interface Metrics { + full_valid_loss?: number; + + full_valid_mean_token_accuracy?: number; + + step?: number; + + train_loss?: number; + + train_mean_token_accuracy?: number; + + valid_loss?: number; + + valid_mean_token_accuracy?: number; + } +} + +export interface CheckpointListParams extends CursorPageParams {} + +Checkpoints.FineTuningJobCheckpointsPage = FineTuningJobCheckpointsPage; + +export declare namespace Checkpoints { + export { + type FineTuningJobCheckpoint as FineTuningJobCheckpoint, + FineTuningJobCheckpointsPage as FineTuningJobCheckpointsPage, + type CheckpointListParams as CheckpointListParams, + }; +} diff --git a/node_modules/openai/src/resources/fine-tuning/jobs/index.ts b/node_modules/openai/src/resources/fine-tuning/jobs/index.ts new file mode 100644 index 0000000..7a05b48 --- /dev/null +++ b/node_modules/openai/src/resources/fine-tuning/jobs/index.ts @@ -0,0 +1,21 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { + FineTuningJobCheckpointsPage, + Checkpoints, + type FineTuningJobCheckpoint, + type CheckpointListParams, +} from './checkpoints'; +export { + FineTuningJobsPage, + FineTuningJobEventsPage, + Jobs, + type FineTuningJob, + type FineTuningJobEvent, + type FineTuningJobIntegration, + type FineTuningJobWandbIntegration, + type FineTuningJobWandbIntegrationObject, + type JobCreateParams, + type JobListParams, + type JobListEventsParams, +} from './jobs'; diff --git a/node_modules/openai/src/resources/fine-tuning/jobs/jobs.ts b/node_modules/openai/src/resources/fine-tuning/jobs/jobs.ts new file mode 100644 index 0000000..44dd011 --- /dev/null +++ b/node_modules/openai/src/resources/fine-tuning/jobs/jobs.ts @@ -0,0 +1,722 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../../resource'; +import { isRequestOptions } from '../../../core'; +import * as Core from '../../../core'; +import * as CheckpointsAPI from './checkpoints'; +import { + CheckpointListParams, + Checkpoints, + FineTuningJobCheckpoint, + FineTuningJobCheckpointsPage, +} from './checkpoints'; +import { CursorPage, type CursorPageParams } from '../../../pagination'; + +export class Jobs extends APIResource { + checkpoints: CheckpointsAPI.Checkpoints = new CheckpointsAPI.Checkpoints(this._client); + + /** + * Creates a fine-tuning job which begins the process of creating a new model from + * a given dataset. + * + * Response includes details of the enqueued job including job status and the name + * of the fine-tuned models once complete. + * + * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + */ + create(body: JobCreateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/fine_tuning/jobs', { body, ...options }); + } + + /** + * Get info about a fine-tuning job. + * + * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + */ + retrieve(fineTuningJobId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/fine_tuning/jobs/${fineTuningJobId}`, options); + } + + /** + * List your organization's fine-tuning jobs + */ + list( + query?: JobListParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + list(options?: Core.RequestOptions): Core.PagePromise; + list( + query: JobListParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.list({}, query); + } + return this._client.getAPIList('/fine_tuning/jobs', FineTuningJobsPage, { query, ...options }); + } + + /** + * Immediately cancel a fine-tune job. + */ + cancel(fineTuningJobId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post(`/fine_tuning/jobs/${fineTuningJobId}/cancel`, options); + } + + /** + * Get status updates for a fine-tuning job. + */ + listEvents( + fineTuningJobId: string, + query?: JobListEventsParams, + options?: Core.RequestOptions, + ): Core.PagePromise; + listEvents( + fineTuningJobId: string, + options?: Core.RequestOptions, + ): Core.PagePromise; + listEvents( + fineTuningJobId: string, + query: JobListEventsParams | Core.RequestOptions = {}, + options?: Core.RequestOptions, + ): Core.PagePromise { + if (isRequestOptions(query)) { + return this.listEvents(fineTuningJobId, {}, query); + } + return this._client.getAPIList(`/fine_tuning/jobs/${fineTuningJobId}/events`, FineTuningJobEventsPage, { + query, + ...options, + }); + } +} + +export class FineTuningJobsPage extends CursorPage {} + +export class FineTuningJobEventsPage extends CursorPage {} + +/** + * The `fine_tuning.job` object represents a fine-tuning job that has been created + * through the API. + */ +export interface FineTuningJob { + /** + * The object identifier, which can be referenced in the API endpoints. + */ + id: string; + + /** + * The Unix timestamp (in seconds) for when the fine-tuning job was created. + */ + created_at: number; + + /** + * For fine-tuning jobs that have `failed`, this will contain more information on + * the cause of the failure. + */ + error: FineTuningJob.Error | null; + + /** + * The name of the fine-tuned model that is being created. The value will be null + * if the fine-tuning job is still running. + */ + fine_tuned_model: string | null; + + /** + * The Unix timestamp (in seconds) for when the fine-tuning job was finished. The + * value will be null if the fine-tuning job is still running. + */ + finished_at: number | null; + + /** + * The hyperparameters used for the fine-tuning job. This value will only be + * returned when running `supervised` jobs. + */ + hyperparameters: FineTuningJob.Hyperparameters; + + /** + * The base model that is being fine-tuned. + */ + model: string; + + /** + * The object type, which is always "fine_tuning.job". + */ + object: 'fine_tuning.job'; + + /** + * The organization that owns the fine-tuning job. + */ + organization_id: string; + + /** + * The compiled results file ID(s) for the fine-tuning job. You can retrieve the + * results with the + * [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + */ + result_files: Array; + + /** + * The seed used for the fine-tuning job. + */ + seed: number; + + /** + * The current status of the fine-tuning job, which can be either + * `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. + */ + status: 'validating_files' | 'queued' | 'running' | 'succeeded' | 'failed' | 'cancelled'; + + /** + * The total number of billable tokens processed by this fine-tuning job. The value + * will be null if the fine-tuning job is still running. + */ + trained_tokens: number | null; + + /** + * The file ID used for training. You can retrieve the training data with the + * [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + */ + training_file: string; + + /** + * The file ID used for validation. You can retrieve the validation results with + * the + * [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + */ + validation_file: string | null; + + /** + * The Unix timestamp (in seconds) for when the fine-tuning job is estimated to + * finish. The value will be null if the fine-tuning job is not running. + */ + estimated_finish?: number | null; + + /** + * A list of integrations to enable for this fine-tuning job. + */ + integrations?: Array | null; + + /** + * The method used for fine-tuning. + */ + method?: FineTuningJob.Method; +} + +export namespace FineTuningJob { + /** + * For fine-tuning jobs that have `failed`, this will contain more information on + * the cause of the failure. + */ + export interface Error { + /** + * A machine-readable error code. + */ + code: string; + + /** + * A human-readable error message. + */ + message: string; + + /** + * The parameter that was invalid, usually `training_file` or `validation_file`. + * This field will be null if the failure was not parameter-specific. + */ + param: string | null; + } + + /** + * The hyperparameters used for the fine-tuning job. This value will only be + * returned when running `supervised` jobs. + */ + export interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + + /** + * The method used for fine-tuning. + */ + export interface Method { + /** + * Configuration for the DPO fine-tuning method. + */ + dpo?: Method.Dpo; + + /** + * Configuration for the supervised fine-tuning method. + */ + supervised?: Method.Supervised; + + /** + * The type of method. Is either `supervised` or `dpo`. + */ + type?: 'supervised' | 'dpo'; + } + + export namespace Method { + /** + * Configuration for the DPO fine-tuning method. + */ + export interface Dpo { + /** + * The hyperparameters used for the fine-tuning job. + */ + hyperparameters?: Dpo.Hyperparameters; + } + + export namespace Dpo { + /** + * The hyperparameters used for the fine-tuning job. + */ + export interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + + /** + * The beta value for the DPO method. A higher beta value will increase the weight + * of the penalty between the policy and reference model. + */ + beta?: 'auto' | number; + + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + } + + /** + * Configuration for the supervised fine-tuning method. + */ + export interface Supervised { + /** + * The hyperparameters used for the fine-tuning job. + */ + hyperparameters?: Supervised.Hyperparameters; + } + + export namespace Supervised { + /** + * The hyperparameters used for the fine-tuning job. + */ + export interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + } + } +} + +/** + * Fine-tuning job event object + */ +export interface FineTuningJobEvent { + /** + * The object identifier. + */ + id: string; + + /** + * The Unix timestamp (in seconds) for when the fine-tuning job was created. + */ + created_at: number; + + /** + * The log level of the event. + */ + level: 'info' | 'warn' | 'error'; + + /** + * The message of the event. + */ + message: string; + + /** + * The object type, which is always "fine_tuning.job.event". + */ + object: 'fine_tuning.job.event'; + + /** + * The data associated with the event. + */ + data?: unknown; + + /** + * The type of event. + */ + type?: 'message' | 'metrics'; +} + +export type FineTuningJobIntegration = FineTuningJobWandbIntegrationObject; + +/** + * The settings for your integration with Weights and Biases. This payload + * specifies the project that metrics will be sent to. Optionally, you can set an + * explicit display name for your run, add tags to your run, and set a default + * entity (team, username, etc) to be associated with your run. + */ +export interface FineTuningJobWandbIntegration { + /** + * The name of the project that the new run will be created under. + */ + project: string; + + /** + * The entity to use for the run. This allows you to set the team or username of + * the WandB user that you would like associated with the run. If not set, the + * default entity for the registered WandB API key is used. + */ + entity?: string | null; + + /** + * A display name to set for the run. If not set, we will use the Job ID as the + * name. + */ + name?: string | null; + + /** + * A list of tags to be attached to the newly created run. These tags are passed + * through directly to WandB. Some default tags are generated by OpenAI: + * "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + */ + tags?: Array; +} + +export interface FineTuningJobWandbIntegrationObject { + /** + * The type of the integration being enabled for the fine-tuning job + */ + type: 'wandb'; + + /** + * The settings for your integration with Weights and Biases. This payload + * specifies the project that metrics will be sent to. Optionally, you can set an + * explicit display name for your run, add tags to your run, and set a default + * entity (team, username, etc) to be associated with your run. + */ + wandb: FineTuningJobWandbIntegration; +} + +export interface JobCreateParams { + /** + * The name of the model to fine-tune. You can select one of the + * [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). + */ + model: (string & {}) | 'babbage-002' | 'davinci-002' | 'gpt-3.5-turbo' | 'gpt-4o-mini'; + + /** + * The ID of an uploaded file that contains training data. + * + * See [upload file](https://platform.openai.com/docs/api-reference/files/create) + * for how to upload a file. + * + * Your dataset must be formatted as a JSONL file. Additionally, you must upload + * your file with the purpose `fine-tune`. + * + * The contents of the file should differ depending on if the model uses the + * [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), + * [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + * format, or if the fine-tuning method uses the + * [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) + * format. + * + * See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + * for more details. + */ + training_file: string; + + /** + * The hyperparameters used for the fine-tuning job. This value is now deprecated + * in favor of `method`, and should be passed in under the `method` parameter. + */ + hyperparameters?: JobCreateParams.Hyperparameters; + + /** + * A list of integrations to enable for your fine-tuning job. + */ + integrations?: Array | null; + + /** + * The method used for fine-tuning. + */ + method?: JobCreateParams.Method; + + /** + * The seed controls the reproducibility of the job. Passing in the same seed and + * job parameters should produce the same results, but may differ in rare cases. If + * a seed is not specified, one will be generated for you. + */ + seed?: number | null; + + /** + * A string of up to 64 characters that will be added to your fine-tuned model + * name. + * + * For example, a `suffix` of "custom-model-name" would produce a model name like + * `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. + */ + suffix?: string | null; + + /** + * The ID of an uploaded file that contains validation data. + * + * If you provide this file, the data is used to generate validation metrics + * periodically during fine-tuning. These metrics can be viewed in the fine-tuning + * results file. The same data should not be present in both train and validation + * files. + * + * Your dataset must be formatted as a JSONL file. You must upload your file with + * the purpose `fine-tune`. + * + * See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + * for more details. + */ + validation_file?: string | null; +} + +export namespace JobCreateParams { + /** + * @deprecated: The hyperparameters used for the fine-tuning job. This value is now + * deprecated in favor of `method`, and should be passed in under the `method` + * parameter. + */ + export interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + + export interface Integration { + /** + * The type of integration to enable. Currently, only "wandb" (Weights and Biases) + * is supported. + */ + type: 'wandb'; + + /** + * The settings for your integration with Weights and Biases. This payload + * specifies the project that metrics will be sent to. Optionally, you can set an + * explicit display name for your run, add tags to your run, and set a default + * entity (team, username, etc) to be associated with your run. + */ + wandb: Integration.Wandb; + } + + export namespace Integration { + /** + * The settings for your integration with Weights and Biases. This payload + * specifies the project that metrics will be sent to. Optionally, you can set an + * explicit display name for your run, add tags to your run, and set a default + * entity (team, username, etc) to be associated with your run. + */ + export interface Wandb { + /** + * The name of the project that the new run will be created under. + */ + project: string; + + /** + * The entity to use for the run. This allows you to set the team or username of + * the WandB user that you would like associated with the run. If not set, the + * default entity for the registered WandB API key is used. + */ + entity?: string | null; + + /** + * A display name to set for the run. If not set, we will use the Job ID as the + * name. + */ + name?: string | null; + + /** + * A list of tags to be attached to the newly created run. These tags are passed + * through directly to WandB. Some default tags are generated by OpenAI: + * "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + */ + tags?: Array; + } + } + + /** + * The method used for fine-tuning. + */ + export interface Method { + /** + * Configuration for the DPO fine-tuning method. + */ + dpo?: Method.Dpo; + + /** + * Configuration for the supervised fine-tuning method. + */ + supervised?: Method.Supervised; + + /** + * The type of method. Is either `supervised` or `dpo`. + */ + type?: 'supervised' | 'dpo'; + } + + export namespace Method { + /** + * Configuration for the DPO fine-tuning method. + */ + export interface Dpo { + /** + * The hyperparameters used for the fine-tuning job. + */ + hyperparameters?: Dpo.Hyperparameters; + } + + export namespace Dpo { + /** + * The hyperparameters used for the fine-tuning job. + */ + export interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + + /** + * The beta value for the DPO method. A higher beta value will increase the weight + * of the penalty between the policy and reference model. + */ + beta?: 'auto' | number; + + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + } + + /** + * Configuration for the supervised fine-tuning method. + */ + export interface Supervised { + /** + * The hyperparameters used for the fine-tuning job. + */ + hyperparameters?: Supervised.Hyperparameters; + } + + export namespace Supervised { + /** + * The hyperparameters used for the fine-tuning job. + */ + export interface Hyperparameters { + /** + * Number of examples in each batch. A larger batch size means that model + * parameters are updated less frequently, but with lower variance. + */ + batch_size?: 'auto' | number; + + /** + * Scaling factor for the learning rate. A smaller learning rate may be useful to + * avoid overfitting. + */ + learning_rate_multiplier?: 'auto' | number; + + /** + * The number of epochs to train the model for. An epoch refers to one full cycle + * through the training dataset. + */ + n_epochs?: 'auto' | number; + } + } + } +} + +export interface JobListParams extends CursorPageParams {} + +export interface JobListEventsParams extends CursorPageParams {} + +Jobs.FineTuningJobsPage = FineTuningJobsPage; +Jobs.FineTuningJobEventsPage = FineTuningJobEventsPage; +Jobs.Checkpoints = Checkpoints; +Jobs.FineTuningJobCheckpointsPage = FineTuningJobCheckpointsPage; + +export declare namespace Jobs { + export { + type FineTuningJob as FineTuningJob, + type FineTuningJobEvent as FineTuningJobEvent, + type FineTuningJobIntegration as FineTuningJobIntegration, + type FineTuningJobWandbIntegration as FineTuningJobWandbIntegration, + type FineTuningJobWandbIntegrationObject as FineTuningJobWandbIntegrationObject, + FineTuningJobsPage as FineTuningJobsPage, + FineTuningJobEventsPage as FineTuningJobEventsPage, + type JobCreateParams as JobCreateParams, + type JobListParams as JobListParams, + type JobListEventsParams as JobListEventsParams, + }; + + export { + Checkpoints as Checkpoints, + type FineTuningJobCheckpoint as FineTuningJobCheckpoint, + FineTuningJobCheckpointsPage as FineTuningJobCheckpointsPage, + type CheckpointListParams as CheckpointListParams, + }; +} diff --git a/node_modules/openai/src/resources/images.ts b/node_modules/openai/src/resources/images.ts new file mode 100644 index 0000000..8e1c6d9 --- /dev/null +++ b/node_modules/openai/src/resources/images.ts @@ -0,0 +1,218 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../resource'; +import * as Core from '../core'; + +export class Images extends APIResource { + /** + * Creates a variation of a given image. + */ + createVariation( + body: ImageCreateVariationParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post('/images/variations', Core.multipartFormRequestOptions({ body, ...options })); + } + + /** + * Creates an edited or extended image given an original image and a prompt. + */ + edit(body: ImageEditParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/images/edits', Core.multipartFormRequestOptions({ body, ...options })); + } + + /** + * Creates an image given a prompt. + */ + generate(body: ImageGenerateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/images/generations', { body, ...options }); + } +} + +/** + * Represents the url or the content of an image generated by the OpenAI API. + */ +export interface Image { + /** + * The base64-encoded JSON of the generated image, if `response_format` is + * `b64_json`. + */ + b64_json?: string; + + /** + * The prompt that was used to generate the image, if there was any revision to the + * prompt. + */ + revised_prompt?: string; + + /** + * The URL of the generated image, if `response_format` is `url` (default). + */ + url?: string; +} + +export type ImageModel = 'dall-e-2' | 'dall-e-3'; + +export interface ImagesResponse { + created: number; + + data: Array; +} + +export interface ImageCreateVariationParams { + /** + * The image to use as the basis for the variation(s). Must be a valid PNG file, + * less than 4MB, and square. + */ + image: Core.Uploadable; + + /** + * The model to use for image generation. Only `dall-e-2` is supported at this + * time. + */ + model?: (string & {}) | ImageModel | null; + + /** + * The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + * `n=1` is supported. + */ + n?: number | null; + + /** + * The format in which the generated images are returned. Must be one of `url` or + * `b64_json`. URLs are only valid for 60 minutes after the image has been + * generated. + */ + response_format?: 'url' | 'b64_json' | null; + + /** + * The size of the generated images. Must be one of `256x256`, `512x512`, or + * `1024x1024`. + */ + size?: '256x256' | '512x512' | '1024x1024' | null; + + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} + +export interface ImageEditParams { + /** + * The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask + * is not provided, image must have transparency, which will be used as the mask. + */ + image: Core.Uploadable; + + /** + * A text description of the desired image(s). The maximum length is 1000 + * characters. + */ + prompt: string; + + /** + * An additional image whose fully transparent areas (e.g. where alpha is zero) + * indicate where `image` should be edited. Must be a valid PNG file, less than + * 4MB, and have the same dimensions as `image`. + */ + mask?: Core.Uploadable; + + /** + * The model to use for image generation. Only `dall-e-2` is supported at this + * time. + */ + model?: (string & {}) | ImageModel | null; + + /** + * The number of images to generate. Must be between 1 and 10. + */ + n?: number | null; + + /** + * The format in which the generated images are returned. Must be one of `url` or + * `b64_json`. URLs are only valid for 60 minutes after the image has been + * generated. + */ + response_format?: 'url' | 'b64_json' | null; + + /** + * The size of the generated images. Must be one of `256x256`, `512x512`, or + * `1024x1024`. + */ + size?: '256x256' | '512x512' | '1024x1024' | null; + + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} + +export interface ImageGenerateParams { + /** + * A text description of the desired image(s). The maximum length is 1000 + * characters for `dall-e-2` and 4000 characters for `dall-e-3`. + */ + prompt: string; + + /** + * The model to use for image generation. + */ + model?: (string & {}) | ImageModel | null; + + /** + * The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + * `n=1` is supported. + */ + n?: number | null; + + /** + * The quality of the image that will be generated. `hd` creates images with finer + * details and greater consistency across the image. This param is only supported + * for `dall-e-3`. + */ + quality?: 'standard' | 'hd'; + + /** + * The format in which the generated images are returned. Must be one of `url` or + * `b64_json`. URLs are only valid for 60 minutes after the image has been + * generated. + */ + response_format?: 'url' | 'b64_json' | null; + + /** + * The size of the generated images. Must be one of `256x256`, `512x512`, or + * `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + * `1024x1792` for `dall-e-3` models. + */ + size?: '256x256' | '512x512' | '1024x1024' | '1792x1024' | '1024x1792' | null; + + /** + * The style of the generated images. Must be one of `vivid` or `natural`. Vivid + * causes the model to lean towards generating hyper-real and dramatic images. + * Natural causes the model to produce more natural, less hyper-real looking + * images. This param is only supported for `dall-e-3`. + */ + style?: 'vivid' | 'natural' | null; + + /** + * A unique identifier representing your end-user, which can help OpenAI to monitor + * and detect abuse. + * [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + */ + user?: string; +} + +export declare namespace Images { + export { + type Image as Image, + type ImageModel as ImageModel, + type ImagesResponse as ImagesResponse, + type ImageCreateVariationParams as ImageCreateVariationParams, + type ImageEditParams as ImageEditParams, + type ImageGenerateParams as ImageGenerateParams, + }; +} diff --git a/node_modules/openai/src/resources/index.ts b/node_modules/openai/src/resources/index.ts new file mode 100644 index 0000000..ad03023 --- /dev/null +++ b/node_modules/openai/src/resources/index.ts @@ -0,0 +1,63 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export * from './chat/index'; +export * from './shared'; +export { Audio, type AudioModel, type AudioResponseFormat } from './audio/audio'; +export { + BatchesPage, + Batches, + type Batch, + type BatchError, + type BatchRequestCounts, + type BatchCreateParams, + type BatchListParams, +} from './batches'; +export { Beta } from './beta/beta'; +export { + Completions, + type Completion, + type CompletionChoice, + type CompletionUsage, + type CompletionCreateParams, + type CompletionCreateParamsNonStreaming, + type CompletionCreateParamsStreaming, +} from './completions'; +export { + Embeddings, + type CreateEmbeddingResponse, + type Embedding, + type EmbeddingModel, + type EmbeddingCreateParams, +} from './embeddings'; +export { + FileObjectsPage, + Files, + type FileContent, + type FileDeleted, + type FileObject, + type FilePurpose, + type FileCreateParams, + type FileListParams, +} from './files'; +export { FineTuning } from './fine-tuning/fine-tuning'; +export { + Images, + type Image, + type ImageModel, + type ImagesResponse, + type ImageCreateVariationParams, + type ImageEditParams, + type ImageGenerateParams, +} from './images'; +export { ModelsPage, Models, type Model, type ModelDeleted } from './models'; +export { + Moderations, + type Moderation, + type ModerationImageURLInput, + type ModerationModel, + type ModerationMultiModalInput, + type ModerationTextInput, + type ModerationCreateResponse, + type ModerationCreateParams, +} from './moderations'; +export { Uploads, type Upload, type UploadCreateParams, type UploadCompleteParams } from './uploads/uploads'; diff --git a/node_modules/openai/src/resources/models.ts b/node_modules/openai/src/resources/models.ts new file mode 100644 index 0000000..6d8cd52 --- /dev/null +++ b/node_modules/openai/src/resources/models.ts @@ -0,0 +1,75 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../resource'; +import * as Core from '../core'; +import { Page } from '../pagination'; + +export class Models extends APIResource { + /** + * Retrieves a model instance, providing basic information about the model such as + * the owner and permissioning. + */ + retrieve(model: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.get(`/models/${model}`, options); + } + + /** + * Lists the currently available models, and provides basic information about each + * one such as the owner and availability. + */ + list(options?: Core.RequestOptions): Core.PagePromise { + return this._client.getAPIList('/models', ModelsPage, options); + } + + /** + * Delete a fine-tuned model. You must have the Owner role in your organization to + * delete a model. + */ + del(model: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.delete(`/models/${model}`, options); + } +} + +/** + * Note: no pagination actually occurs yet, this is for forwards-compatibility. + */ +export class ModelsPage extends Page {} + +/** + * Describes an OpenAI model offering that can be used with the API. + */ +export interface Model { + /** + * The model identifier, which can be referenced in the API endpoints. + */ + id: string; + + /** + * The Unix timestamp (in seconds) when the model was created. + */ + created: number; + + /** + * The object type, which is always "model". + */ + object: 'model'; + + /** + * The organization that owns the model. + */ + owned_by: string; +} + +export interface ModelDeleted { + id: string; + + deleted: boolean; + + object: string; +} + +Models.ModelsPage = ModelsPage; + +export declare namespace Models { + export { type Model as Model, type ModelDeleted as ModelDeleted, ModelsPage as ModelsPage }; +} diff --git a/node_modules/openai/src/resources/moderations.ts b/node_modules/openai/src/resources/moderations.ts new file mode 100644 index 0000000..f7b1616 --- /dev/null +++ b/node_modules/openai/src/resources/moderations.ts @@ -0,0 +1,369 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../resource'; +import * as Core from '../core'; + +export class Moderations extends APIResource { + /** + * Classifies if text and/or image inputs are potentially harmful. Learn more in + * the [moderation guide](https://platform.openai.com/docs/guides/moderation). + */ + create( + body: ModerationCreateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post('/moderations', { body, ...options }); + } +} + +export interface Moderation { + /** + * A list of the categories, and whether they are flagged or not. + */ + categories: Moderation.Categories; + + /** + * A list of the categories along with the input type(s) that the score applies to. + */ + category_applied_input_types: Moderation.CategoryAppliedInputTypes; + + /** + * A list of the categories along with their scores as predicted by model. + */ + category_scores: Moderation.CategoryScores; + + /** + * Whether any of the below categories are flagged. + */ + flagged: boolean; +} + +export namespace Moderation { + /** + * A list of the categories, and whether they are flagged or not. + */ + export interface Categories { + /** + * Content that expresses, incites, or promotes harassing language towards any + * target. + */ + harassment: boolean; + + /** + * Harassment content that also includes violence or serious harm towards any + * target. + */ + 'harassment/threatening': boolean; + + /** + * Content that expresses, incites, or promotes hate based on race, gender, + * ethnicity, religion, nationality, sexual orientation, disability status, or + * caste. Hateful content aimed at non-protected groups (e.g., chess players) is + * harassment. + */ + hate: boolean; + + /** + * Hateful content that also includes violence or serious harm towards the targeted + * group based on race, gender, ethnicity, religion, nationality, sexual + * orientation, disability status, or caste. + */ + 'hate/threatening': boolean; + + /** + * Content that includes instructions or advice that facilitate the planning or + * execution of wrongdoing, or that gives advice or instruction on how to commit + * illicit acts. For example, "how to shoplift" would fit this category. + */ + illicit: boolean; + + /** + * Content that includes instructions or advice that facilitate the planning or + * execution of wrongdoing that also includes violence, or that gives advice or + * instruction on the procurement of any weapon. + */ + 'illicit/violent': boolean; + + /** + * Content that promotes, encourages, or depicts acts of self-harm, such as + * suicide, cutting, and eating disorders. + */ + 'self-harm': boolean; + + /** + * Content that encourages performing acts of self-harm, such as suicide, cutting, + * and eating disorders, or that gives instructions or advice on how to commit such + * acts. + */ + 'self-harm/instructions': boolean; + + /** + * Content where the speaker expresses that they are engaging or intend to engage + * in acts of self-harm, such as suicide, cutting, and eating disorders. + */ + 'self-harm/intent': boolean; + + /** + * Content meant to arouse sexual excitement, such as the description of sexual + * activity, or that promotes sexual services (excluding sex education and + * wellness). + */ + sexual: boolean; + + /** + * Sexual content that includes an individual who is under 18 years old. + */ + 'sexual/minors': boolean; + + /** + * Content that depicts death, violence, or physical injury. + */ + violence: boolean; + + /** + * Content that depicts death, violence, or physical injury in graphic detail. + */ + 'violence/graphic': boolean; + } + + /** + * A list of the categories along with the input type(s) that the score applies to. + */ + export interface CategoryAppliedInputTypes { + /** + * The applied input type(s) for the category 'harassment'. + */ + harassment: Array<'text'>; + + /** + * The applied input type(s) for the category 'harassment/threatening'. + */ + 'harassment/threatening': Array<'text'>; + + /** + * The applied input type(s) for the category 'hate'. + */ + hate: Array<'text'>; + + /** + * The applied input type(s) for the category 'hate/threatening'. + */ + 'hate/threatening': Array<'text'>; + + /** + * The applied input type(s) for the category 'illicit'. + */ + illicit: Array<'text'>; + + /** + * The applied input type(s) for the category 'illicit/violent'. + */ + 'illicit/violent': Array<'text'>; + + /** + * The applied input type(s) for the category 'self-harm'. + */ + 'self-harm': Array<'text' | 'image'>; + + /** + * The applied input type(s) for the category 'self-harm/instructions'. + */ + 'self-harm/instructions': Array<'text' | 'image'>; + + /** + * The applied input type(s) for the category 'self-harm/intent'. + */ + 'self-harm/intent': Array<'text' | 'image'>; + + /** + * The applied input type(s) for the category 'sexual'. + */ + sexual: Array<'text' | 'image'>; + + /** + * The applied input type(s) for the category 'sexual/minors'. + */ + 'sexual/minors': Array<'text'>; + + /** + * The applied input type(s) for the category 'violence'. + */ + violence: Array<'text' | 'image'>; + + /** + * The applied input type(s) for the category 'violence/graphic'. + */ + 'violence/graphic': Array<'text' | 'image'>; + } + + /** + * A list of the categories along with their scores as predicted by model. + */ + export interface CategoryScores { + /** + * The score for the category 'harassment'. + */ + harassment: number; + + /** + * The score for the category 'harassment/threatening'. + */ + 'harassment/threatening': number; + + /** + * The score for the category 'hate'. + */ + hate: number; + + /** + * The score for the category 'hate/threatening'. + */ + 'hate/threatening': number; + + /** + * The score for the category 'illicit'. + */ + illicit: number; + + /** + * The score for the category 'illicit/violent'. + */ + 'illicit/violent': number; + + /** + * The score for the category 'self-harm'. + */ + 'self-harm': number; + + /** + * The score for the category 'self-harm/instructions'. + */ + 'self-harm/instructions': number; + + /** + * The score for the category 'self-harm/intent'. + */ + 'self-harm/intent': number; + + /** + * The score for the category 'sexual'. + */ + sexual: number; + + /** + * The score for the category 'sexual/minors'. + */ + 'sexual/minors': number; + + /** + * The score for the category 'violence'. + */ + violence: number; + + /** + * The score for the category 'violence/graphic'. + */ + 'violence/graphic': number; + } +} + +/** + * An object describing an image to classify. + */ +export interface ModerationImageURLInput { + /** + * Contains either an image URL or a data URL for a base64 encoded image. + */ + image_url: ModerationImageURLInput.ImageURL; + + /** + * Always `image_url`. + */ + type: 'image_url'; +} + +export namespace ModerationImageURLInput { + /** + * Contains either an image URL or a data URL for a base64 encoded image. + */ + export interface ImageURL { + /** + * Either a URL of the image or the base64 encoded image data. + */ + url: string; + } +} + +export type ModerationModel = + | 'omni-moderation-latest' + | 'omni-moderation-2024-09-26' + | 'text-moderation-latest' + | 'text-moderation-stable'; + +/** + * An object describing an image to classify. + */ +export type ModerationMultiModalInput = ModerationImageURLInput | ModerationTextInput; + +/** + * An object describing text to classify. + */ +export interface ModerationTextInput { + /** + * A string of text to classify. + */ + text: string; + + /** + * Always `text`. + */ + type: 'text'; +} + +/** + * Represents if a given text input is potentially harmful. + */ +export interface ModerationCreateResponse { + /** + * The unique identifier for the moderation request. + */ + id: string; + + /** + * The model used to generate the moderation results. + */ + model: string; + + /** + * A list of moderation objects. + */ + results: Array; +} + +export interface ModerationCreateParams { + /** + * Input (or inputs) to classify. Can be a single string, an array of strings, or + * an array of multi-modal input objects similar to other models. + */ + input: string | Array | Array; + + /** + * The content moderation model you would like to use. Learn more in + * [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + * learn about available models + * [here](https://platform.openai.com/docs/models#moderation). + */ + model?: (string & {}) | ModerationModel; +} + +export declare namespace Moderations { + export { + type Moderation as Moderation, + type ModerationImageURLInput as ModerationImageURLInput, + type ModerationModel as ModerationModel, + type ModerationMultiModalInput as ModerationMultiModalInput, + type ModerationTextInput as ModerationTextInput, + type ModerationCreateResponse as ModerationCreateResponse, + type ModerationCreateParams as ModerationCreateParams, + }; +} diff --git a/node_modules/openai/src/resources/shared.ts b/node_modules/openai/src/resources/shared.ts new file mode 100644 index 0000000..f44fda8 --- /dev/null +++ b/node_modules/openai/src/resources/shared.ts @@ -0,0 +1,109 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export interface ErrorObject { + code: string | null; + + message: string; + + param: string | null; + + type: string; +} + +export interface FunctionDefinition { + /** + * The name of the function to be called. Must be a-z, A-Z, 0-9, or contain + * underscores and dashes, with a maximum length of 64. + */ + name: string; + + /** + * A description of what the function does, used by the model to choose when and + * how to call the function. + */ + description?: string; + + /** + * The parameters the functions accepts, described as a JSON Schema object. See the + * [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + * and the + * [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + * documentation about the format. + * + * Omitting `parameters` defines a function with an empty parameter list. + */ + parameters?: FunctionParameters; + + /** + * Whether to enable strict schema adherence when generating the function call. If + * set to true, the model will follow the exact schema defined in the `parameters` + * field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn + * more about Structured Outputs in the + * [function calling guide](docs/guides/function-calling). + */ + strict?: boolean | null; +} + +/** + * The parameters the functions accepts, described as a JSON Schema object. See the + * [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + * and the + * [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + * documentation about the format. + * + * Omitting `parameters` defines a function with an empty parameter list. + */ +export type FunctionParameters = Record; + +export interface ResponseFormatJSONObject { + /** + * The type of response format being defined: `json_object` + */ + type: 'json_object'; +} + +export interface ResponseFormatJSONSchema { + json_schema: ResponseFormatJSONSchema.JSONSchema; + + /** + * The type of response format being defined: `json_schema` + */ + type: 'json_schema'; +} + +export namespace ResponseFormatJSONSchema { + export interface JSONSchema { + /** + * The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores + * and dashes, with a maximum length of 64. + */ + name: string; + + /** + * A description of what the response format is for, used by the model to determine + * how to respond in the format. + */ + description?: string; + + /** + * The schema for the response format, described as a JSON Schema object. + */ + schema?: Record; + + /** + * Whether to enable strict schema adherence when generating the output. If set to + * true, the model will always follow the exact schema defined in the `schema` + * field. Only a subset of JSON Schema is supported when `strict` is `true`. To + * learn more, read the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + */ + strict?: boolean | null; + } +} + +export interface ResponseFormatText { + /** + * The type of response format being defined: `text` + */ + type: 'text'; +} diff --git a/node_modules/openai/src/resources/uploads/index.ts b/node_modules/openai/src/resources/uploads/index.ts new file mode 100644 index 0000000..200d356 --- /dev/null +++ b/node_modules/openai/src/resources/uploads/index.ts @@ -0,0 +1,4 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +export { Parts, type UploadPart, type PartCreateParams } from './parts'; +export { Uploads, type Upload, type UploadCreateParams, type UploadCompleteParams } from './uploads'; diff --git a/node_modules/openai/src/resources/uploads/parts.ts b/node_modules/openai/src/resources/uploads/parts.ts new file mode 100644 index 0000000..9b54c99 --- /dev/null +++ b/node_modules/openai/src/resources/uploads/parts.ts @@ -0,0 +1,66 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import * as Core from '../../core'; + +export class Parts extends APIResource { + /** + * Adds a + * [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. + * A Part represents a chunk of bytes from the file you are trying to upload. + * + * Each Part can be at most 64 MB, and you can add Parts until you hit the Upload + * maximum of 8 GB. + * + * It is possible to add multiple Parts in parallel. You can decide the intended + * order of the Parts when you + * [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). + */ + create( + uploadId: string, + body: PartCreateParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post( + `/uploads/${uploadId}/parts`, + Core.multipartFormRequestOptions({ body, ...options }), + ); + } +} + +/** + * The upload Part represents a chunk of bytes we can add to an Upload object. + */ +export interface UploadPart { + /** + * The upload Part unique identifier, which can be referenced in API endpoints. + */ + id: string; + + /** + * The Unix timestamp (in seconds) for when the Part was created. + */ + created_at: number; + + /** + * The object type, which is always `upload.part`. + */ + object: 'upload.part'; + + /** + * The ID of the Upload object that this Part was added to. + */ + upload_id: string; +} + +export interface PartCreateParams { + /** + * The chunk of bytes for this Part. + */ + data: Core.Uploadable; +} + +export declare namespace Parts { + export { type UploadPart as UploadPart, type PartCreateParams as PartCreateParams }; +} diff --git a/node_modules/openai/src/resources/uploads/uploads.ts b/node_modules/openai/src/resources/uploads/uploads.ts new file mode 100644 index 0000000..8491d0f --- /dev/null +++ b/node_modules/openai/src/resources/uploads/uploads.ts @@ -0,0 +1,172 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { APIResource } from '../../resource'; +import * as Core from '../../core'; +import * as FilesAPI from '../files'; +import * as PartsAPI from './parts'; +import { PartCreateParams, Parts, UploadPart } from './parts'; + +export class Uploads extends APIResource { + parts: PartsAPI.Parts = new PartsAPI.Parts(this._client); + + /** + * Creates an intermediate + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object + * that you can add + * [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. + * Currently, an Upload can accept at most 8 GB in total and expires after an hour + * after you create it. + * + * Once you complete the Upload, we will create a + * [File](https://platform.openai.com/docs/api-reference/files/object) object that + * contains all the parts you uploaded. This File is usable in the rest of our + * platform as a regular File object. + * + * For certain `purpose`s, the correct `mime_type` must be specified. Please refer + * to documentation for the supported MIME types for your use case: + * + * - [Assistants](https://platform.openai.com/docs/assistants/tools/file-search#supported-files) + * + * For guidance on the proper filename extensions for each purpose, please follow + * the documentation on + * [creating a File](https://platform.openai.com/docs/api-reference/files/create). + */ + create(body: UploadCreateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/uploads', { body, ...options }); + } + + /** + * Cancels the Upload. No Parts may be added after an Upload is cancelled. + */ + cancel(uploadId: string, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post(`/uploads/${uploadId}/cancel`, options); + } + + /** + * Completes the + * [Upload](https://platform.openai.com/docs/api-reference/uploads/object). + * + * Within the returned Upload object, there is a nested + * [File](https://platform.openai.com/docs/api-reference/files/object) object that + * is ready to use in the rest of the platform. + * + * You can specify the order of the Parts by passing in an ordered list of the Part + * IDs. + * + * The number of bytes uploaded upon completion must match the number of bytes + * initially specified when creating the Upload object. No Parts may be added after + * an Upload is completed. + */ + complete( + uploadId: string, + body: UploadCompleteParams, + options?: Core.RequestOptions, + ): Core.APIPromise { + return this._client.post(`/uploads/${uploadId}/complete`, { body, ...options }); + } +} + +/** + * The Upload object can accept byte chunks in the form of Parts. + */ +export interface Upload { + /** + * The Upload unique identifier, which can be referenced in API endpoints. + */ + id: string; + + /** + * The intended number of bytes to be uploaded. + */ + bytes: number; + + /** + * The Unix timestamp (in seconds) for when the Upload was created. + */ + created_at: number; + + /** + * The Unix timestamp (in seconds) for when the Upload was created. + */ + expires_at: number; + + /** + * The name of the file to be uploaded. + */ + filename: string; + + /** + * The object type, which is always "upload". + */ + object: 'upload'; + + /** + * The intended purpose of the file. + * [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose) + * for acceptable values. + */ + purpose: string; + + /** + * The status of the Upload. + */ + status: 'pending' | 'completed' | 'cancelled' | 'expired'; + + /** + * The ready File object after the Upload is completed. + */ + file?: FilesAPI.FileObject | null; +} + +export interface UploadCreateParams { + /** + * The number of bytes in the file you are uploading. + */ + bytes: number; + + /** + * The name of the file to upload. + */ + filename: string; + + /** + * The MIME type of the file. + * + * This must fall within the supported MIME types for your file purpose. See the + * supported MIME types for assistants and vision. + */ + mime_type: string; + + /** + * The intended purpose of the uploaded file. + * + * See the + * [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). + */ + purpose: FilesAPI.FilePurpose; +} + +export interface UploadCompleteParams { + /** + * The ordered list of Part IDs. + */ + part_ids: Array; + + /** + * The optional md5 checksum for the file contents to verify if the bytes uploaded + * matches what you expect. + */ + md5?: string; +} + +Uploads.Parts = Parts; + +export declare namespace Uploads { + export { + type Upload as Upload, + type UploadCreateParams as UploadCreateParams, + type UploadCompleteParams as UploadCompleteParams, + }; + + export { Parts as Parts, type UploadPart as UploadPart, type PartCreateParams as PartCreateParams }; +} diff --git a/node_modules/openai/src/shims/node.ts b/node_modules/openai/src/shims/node.ts new file mode 100644 index 0000000..73df560 --- /dev/null +++ b/node_modules/openai/src/shims/node.ts @@ -0,0 +1,50 @@ +// @ts-ignore +import * as types from '../_shims/node-types'; +import { setShims } from '../_shims/registry'; +import { getRuntime } from '../_shims/node-runtime'; +setShims(getRuntime()); + +declare module '../_shims/manual-types' { + export namespace manual { + // @ts-ignore + export type Agent = types.Agent; + // @ts-ignore + export import fetch = types.fetch; + // @ts-ignore + export type Request = types.Request; + // @ts-ignore + export type RequestInfo = types.RequestInfo; + // @ts-ignore + export type RequestInit = types.RequestInit; + // @ts-ignore + export type Response = types.Response; + // @ts-ignore + export type ResponseInit = types.ResponseInit; + // @ts-ignore + export type ResponseType = types.ResponseType; + // @ts-ignore + export type BodyInit = types.BodyInit; + // @ts-ignore + export type Headers = types.Headers; + // @ts-ignore + export type HeadersInit = types.HeadersInit; + // @ts-ignore + export type BlobPropertyBag = types.BlobPropertyBag; + // @ts-ignore + export type FilePropertyBag = types.FilePropertyBag; + // @ts-ignore + export type FileFromPathOptions = types.FileFromPathOptions; + // @ts-ignore + export import FormData = types.FormData; + // @ts-ignore + export import File = types.File; + // @ts-ignore + export import Blob = types.Blob; + // @ts-ignore + export type Readable = types.Readable; + // @ts-ignore + export type FsReadStream = types.FsReadStream; + // @ts-ignore + export import ReadableStream = types.ReadableStream; + } +} diff --git a/node_modules/openai/src/shims/web.ts b/node_modules/openai/src/shims/web.ts new file mode 100644 index 0000000..f72d784 --- /dev/null +++ b/node_modules/openai/src/shims/web.ts @@ -0,0 +1,50 @@ +// @ts-ignore +import * as types from '../_shims/web-types'; +import { setShims } from '../_shims/registry'; +import { getRuntime } from '../_shims/web-runtime'; +setShims(getRuntime({ manuallyImported: true })); + +declare module '../_shims/manual-types' { + export namespace manual { + // @ts-ignore + export type Agent = types.Agent; + // @ts-ignore + export import fetch = types.fetch; + // @ts-ignore + export type Request = types.Request; + // @ts-ignore + export type RequestInfo = types.RequestInfo; + // @ts-ignore + export type RequestInit = types.RequestInit; + // @ts-ignore + export type Response = types.Response; + // @ts-ignore + export type ResponseInit = types.ResponseInit; + // @ts-ignore + export type ResponseType = types.ResponseType; + // @ts-ignore + export type BodyInit = types.BodyInit; + // @ts-ignore + export type Headers = types.Headers; + // @ts-ignore + export type HeadersInit = types.HeadersInit; + // @ts-ignore + export type BlobPropertyBag = types.BlobPropertyBag; + // @ts-ignore + export type FilePropertyBag = types.FilePropertyBag; + // @ts-ignore + export type FileFromPathOptions = types.FileFromPathOptions; + // @ts-ignore + export import FormData = types.FormData; + // @ts-ignore + export import File = types.File; + // @ts-ignore + export import Blob = types.Blob; + // @ts-ignore + export type Readable = types.Readable; + // @ts-ignore + export type FsReadStream = types.FsReadStream; + // @ts-ignore + export import ReadableStream = types.ReadableStream; + } +} diff --git a/node_modules/openai/src/streaming.ts b/node_modules/openai/src/streaming.ts new file mode 100644 index 0000000..2891e6a --- /dev/null +++ b/node_modules/openai/src/streaming.ts @@ -0,0 +1,398 @@ +import { ReadableStream, type Response } from './_shims/index'; +import { OpenAIError } from './error'; +import { LineDecoder } from './internal/decoders/line'; + +import { APIError } from './error'; + +type Bytes = string | ArrayBuffer | Uint8Array | Buffer | null | undefined; + +export type ServerSentEvent = { + event: string | null; + data: string; + raw: string[]; +}; + +export class Stream implements AsyncIterable { + controller: AbortController; + + constructor( + private iterator: () => AsyncIterator, + controller: AbortController, + ) { + this.controller = controller; + } + + static fromSSEResponse(response: Response, controller: AbortController): Stream { + let consumed = false; + + async function* iterator(): AsyncIterator { + if (consumed) { + throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); + } + consumed = true; + let done = false; + try { + for await (const sse of _iterSSEMessages(response, controller)) { + if (done) continue; + + if (sse.data.startsWith('[DONE]')) { + done = true; + continue; + } + + if (sse.event === null) { + let data; + + try { + data = JSON.parse(sse.data); + } catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + + if (data && data.error) { + throw new APIError(undefined, data.error, undefined, undefined); + } + + yield data; + } else { + let data; + try { + data = JSON.parse(sse.data); + } catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + // TODO: Is this where the error should be thrown? + if (sse.event == 'error') { + throw new APIError(undefined, data.error, data.message, undefined); + } + yield { event: sse.event, data: data } as any; + } + } + done = true; + } catch (e) { + // If the user calls `stream.controller.abort()`, we should exit without throwing. + if (e instanceof Error && e.name === 'AbortError') return; + throw e; + } finally { + // If the user `break`s, abort the ongoing request. + if (!done) controller.abort(); + } + } + + return new Stream(iterator, controller); + } + + /** + * Generates a Stream from a newline-separated ReadableStream + * where each item is a JSON value. + */ + static fromReadableStream(readableStream: ReadableStream, controller: AbortController): Stream { + let consumed = false; + + async function* iterLines(): AsyncGenerator { + const lineDecoder = new LineDecoder(); + + const iter = readableStreamAsyncIterable(readableStream); + for await (const chunk of iter) { + for (const line of lineDecoder.decode(chunk)) { + yield line; + } + } + + for (const line of lineDecoder.flush()) { + yield line; + } + } + + async function* iterator(): AsyncIterator { + if (consumed) { + throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); + } + consumed = true; + let done = false; + try { + for await (const line of iterLines()) { + if (done) continue; + if (line) yield JSON.parse(line); + } + done = true; + } catch (e) { + // If the user calls `stream.controller.abort()`, we should exit without throwing. + if (e instanceof Error && e.name === 'AbortError') return; + throw e; + } finally { + // If the user `break`s, abort the ongoing request. + if (!done) controller.abort(); + } + } + + return new Stream(iterator, controller); + } + + [Symbol.asyncIterator](): AsyncIterator { + return this.iterator(); + } + + /** + * Splits the stream into two streams which can be + * independently read from at different speeds. + */ + tee(): [Stream, Stream] { + const left: Array>> = []; + const right: Array>> = []; + const iterator = this.iterator(); + + const teeIterator = (queue: Array>>): AsyncIterator => { + return { + next: () => { + if (queue.length === 0) { + const result = iterator.next(); + left.push(result); + right.push(result); + } + return queue.shift()!; + }, + }; + }; + + return [ + new Stream(() => teeIterator(left), this.controller), + new Stream(() => teeIterator(right), this.controller), + ]; + } + + /** + * Converts this stream to a newline-separated ReadableStream of + * JSON stringified values in the stream + * which can be turned back into a Stream with `Stream.fromReadableStream()`. + */ + toReadableStream(): ReadableStream { + const self = this; + let iter: AsyncIterator; + const encoder = new TextEncoder(); + + return new ReadableStream({ + async start() { + iter = self[Symbol.asyncIterator](); + }, + async pull(ctrl: any) { + try { + const { value, done } = await iter.next(); + if (done) return ctrl.close(); + + const bytes = encoder.encode(JSON.stringify(value) + '\n'); + + ctrl.enqueue(bytes); + } catch (err) { + ctrl.error(err); + } + }, + async cancel() { + await iter.return?.(); + }, + }); + } +} + +export async function* _iterSSEMessages( + response: Response, + controller: AbortController, +): AsyncGenerator { + if (!response.body) { + controller.abort(); + throw new OpenAIError(`Attempted to iterate over a response with no body`); + } + + const sseDecoder = new SSEDecoder(); + const lineDecoder = new LineDecoder(); + + const iter = readableStreamAsyncIterable(response.body); + for await (const sseChunk of iterSSEChunks(iter)) { + for (const line of lineDecoder.decode(sseChunk)) { + const sse = sseDecoder.decode(line); + if (sse) yield sse; + } + } + + for (const line of lineDecoder.flush()) { + const sse = sseDecoder.decode(line); + if (sse) yield sse; + } +} + +/** + * Given an async iterable iterator, iterates over it and yields full + * SSE chunks, i.e. yields when a double new-line is encountered. + */ +async function* iterSSEChunks(iterator: AsyncIterableIterator): AsyncGenerator { + let data = new Uint8Array(); + + for await (const chunk of iterator) { + if (chunk == null) { + continue; + } + + const binaryChunk = + chunk instanceof ArrayBuffer ? new Uint8Array(chunk) + : typeof chunk === 'string' ? new TextEncoder().encode(chunk) + : chunk; + + let newData = new Uint8Array(data.length + binaryChunk.length); + newData.set(data); + newData.set(binaryChunk, data.length); + data = newData; + + let patternIndex; + while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) { + yield data.slice(0, patternIndex); + data = data.slice(patternIndex); + } + } + + if (data.length > 0) { + yield data; + } +} + +function findDoubleNewlineIndex(buffer: Uint8Array): number { + // This function searches the buffer for the end patterns (\r\r, \n\n, \r\n\r\n) + // and returns the index right after the first occurrence of any pattern, + // or -1 if none of the patterns are found. + const newline = 0x0a; // \n + const carriage = 0x0d; // \r + + for (let i = 0; i < buffer.length - 2; i++) { + if (buffer[i] === newline && buffer[i + 1] === newline) { + // \n\n + return i + 2; + } + if (buffer[i] === carriage && buffer[i + 1] === carriage) { + // \r\r + return i + 2; + } + if ( + buffer[i] === carriage && + buffer[i + 1] === newline && + i + 3 < buffer.length && + buffer[i + 2] === carriage && + buffer[i + 3] === newline + ) { + // \r\n\r\n + return i + 4; + } + } + + return -1; +} + +class SSEDecoder { + private data: string[]; + private event: string | null; + private chunks: string[]; + + constructor() { + this.event = null; + this.data = []; + this.chunks = []; + } + + decode(line: string) { + if (line.endsWith('\r')) { + line = line.substring(0, line.length - 1); + } + + if (!line) { + // empty line and we didn't previously encounter any messages + if (!this.event && !this.data.length) return null; + + const sse: ServerSentEvent = { + event: this.event, + data: this.data.join('\n'), + raw: this.chunks, + }; + + this.event = null; + this.data = []; + this.chunks = []; + + return sse; + } + + this.chunks.push(line); + + if (line.startsWith(':')) { + return null; + } + + let [fieldname, _, value] = partition(line, ':'); + + if (value.startsWith(' ')) { + value = value.substring(1); + } + + if (fieldname === 'event') { + this.event = value; + } else if (fieldname === 'data') { + this.data.push(value); + } + + return null; + } +} + +/** This is an internal helper function that's just used for testing */ +export function _decodeChunks(chunks: string[]): string[] { + const decoder = new LineDecoder(); + const lines: string[] = []; + for (const chunk of chunks) { + lines.push(...decoder.decode(chunk)); + } + + return lines; +} + +function partition(str: string, delimiter: string): [string, string, string] { + const index = str.indexOf(delimiter); + if (index !== -1) { + return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)]; + } + + return [str, '', '']; +} + +/** + * Most browsers don't yet have async iterable support for ReadableStream, + * and Node has a very different way of reading bytes from its "ReadableStream". + * + * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490 + */ +export function readableStreamAsyncIterable(stream: any): AsyncIterableIterator { + if (stream[Symbol.asyncIterator]) return stream; + + const reader = stream.getReader(); + return { + async next() { + try { + const result = await reader.read(); + if (result?.done) reader.releaseLock(); // release lock when stream becomes closed + return result; + } catch (e) { + reader.releaseLock(); // release lock when stream becomes errored + throw e; + } + }, + async return() { + const cancelPromise = reader.cancel(); + reader.releaseLock(); + await cancelPromise; + return { done: true, value: undefined }; + }, + [Symbol.asyncIterator]() { + return this; + }, + }; +} diff --git a/node_modules/openai/src/tsconfig.json b/node_modules/openai/src/tsconfig.json new file mode 100644 index 0000000..e9f2d70 --- /dev/null +++ b/node_modules/openai/src/tsconfig.json @@ -0,0 +1,11 @@ +{ + // this config is included in the published src directory to prevent TS errors + // from appearing when users go to source, and VSCode opens the source .ts file + // via declaration maps + "include": ["index.ts"], + "compilerOptions": { + "target": "es2015", + "lib": ["DOM"], + "moduleResolution": "node" + } +} diff --git a/node_modules/openai/src/uploads.ts b/node_modules/openai/src/uploads.ts new file mode 100644 index 0000000..8fd2154 --- /dev/null +++ b/node_modules/openai/src/uploads.ts @@ -0,0 +1,255 @@ +import { type RequestOptions } from './core'; +import { + FormData, + File, + type Blob, + type FilePropertyBag, + getMultipartRequestOptions, + type FsReadStream, + isFsReadStream, +} from './_shims/index'; +import { MultipartBody } from './_shims/MultipartBody'; +export { fileFromPath } from './_shims/index'; + +type BlobLikePart = string | ArrayBuffer | ArrayBufferView | BlobLike | Uint8Array | DataView; +export type BlobPart = string | ArrayBuffer | ArrayBufferView | Blob | Uint8Array | DataView; + +/** + * Typically, this is a native "File" class. + * + * We provide the {@link toFile} utility to convert a variety of objects + * into the File class. + * + * For convenience, you can also pass a fetch Response, or in Node, + * the result of fs.createReadStream(). + */ +export type Uploadable = FileLike | ResponseLike | FsReadStream; + +/** + * Intended to match web.Blob, node.Blob, node-fetch.Blob, etc. + */ +export interface BlobLike { + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/size) */ + readonly size: number; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/type) */ + readonly type: string; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/text) */ + text(): Promise; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/slice) */ + slice(start?: number, end?: number): BlobLike; + // unfortunately @types/node-fetch@^2.6.4 doesn't type the arrayBuffer method +} + +/** + * Intended to match web.File, node.File, node-fetch.File, etc. + */ +export interface FileLike extends BlobLike { + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/lastModified) */ + readonly lastModified: number; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/name) */ + readonly name: string; +} + +/** + * Intended to match web.Response, node.Response, node-fetch.Response, etc. + */ +export interface ResponseLike { + url: string; + blob(): Promise; +} + +export const isResponseLike = (value: any): value is ResponseLike => + value != null && + typeof value === 'object' && + typeof value.url === 'string' && + typeof value.blob === 'function'; + +export const isFileLike = (value: any): value is FileLike => + value != null && + typeof value === 'object' && + typeof value.name === 'string' && + typeof value.lastModified === 'number' && + isBlobLike(value); + +/** + * The BlobLike type omits arrayBuffer() because @types/node-fetch@^2.6.4 lacks it; but this check + * adds the arrayBuffer() method type because it is available and used at runtime + */ +export const isBlobLike = (value: any): value is BlobLike & { arrayBuffer(): Promise } => + value != null && + typeof value === 'object' && + typeof value.size === 'number' && + typeof value.type === 'string' && + typeof value.text === 'function' && + typeof value.slice === 'function' && + typeof value.arrayBuffer === 'function'; + +export const isUploadable = (value: any): value is Uploadable => { + return isFileLike(value) || isResponseLike(value) || isFsReadStream(value); +}; + +export type ToFileInput = Uploadable | Exclude | AsyncIterable; + +/** + * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats + * @param value the raw content of the file. Can be an {@link Uploadable}, {@link BlobLikePart}, or {@link AsyncIterable} of {@link BlobLikePart}s + * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible + * @param {Object=} options additional properties + * @param {string=} options.type the MIME type of the content + * @param {number=} options.lastModified the last modified timestamp + * @returns a {@link File} with the given properties + */ +export async function toFile( + value: ToFileInput | PromiseLike, + name?: string | null | undefined, + options?: FilePropertyBag | undefined, +): Promise { + // If it's a promise, resolve it. + value = await value; + + // If we've been given a `File` we don't need to do anything + if (isFileLike(value)) { + return value; + } + + if (isResponseLike(value)) { + const blob = await value.blob(); + name ||= new URL(value.url).pathname.split(/[\\/]/).pop() ?? 'unknown_file'; + + // we need to convert the `Blob` into an array buffer because the `Blob` class + // that `node-fetch` defines is incompatible with the web standard which results + // in `new File` interpreting it as a string instead of binary data. + const data = isBlobLike(blob) ? [(await blob.arrayBuffer()) as any] : [blob]; + + return new File(data, name, options); + } + + const bits = await getBytes(value); + + name ||= getName(value) ?? 'unknown_file'; + + if (!options?.type) { + const type = (bits[0] as any)?.type; + if (typeof type === 'string') { + options = { ...options, type }; + } + } + + return new File(bits, name, options); +} + +async function getBytes(value: ToFileInput): Promise> { + let parts: Array = []; + if ( + typeof value === 'string' || + ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc. + value instanceof ArrayBuffer + ) { + parts.push(value); + } else if (isBlobLike(value)) { + parts.push(await value.arrayBuffer()); + } else if ( + isAsyncIterableIterator(value) // includes Readable, ReadableStream, etc. + ) { + for await (const chunk of value) { + parts.push(chunk as BlobPart); // TODO, consider validating? + } + } else { + throw new Error( + `Unexpected data type: ${typeof value}; constructor: ${value?.constructor + ?.name}; props: ${propsForError(value)}`, + ); + } + + return parts; +} + +function propsForError(value: any): string { + const props = Object.getOwnPropertyNames(value); + return `[${props.map((p) => `"${p}"`).join(', ')}]`; +} + +function getName(value: any): string | undefined { + return ( + getStringFromMaybeBuffer(value.name) || + getStringFromMaybeBuffer(value.filename) || + // For fs.ReadStream + getStringFromMaybeBuffer(value.path)?.split(/[\\/]/).pop() + ); +} + +const getStringFromMaybeBuffer = (x: string | Buffer | unknown): string | undefined => { + if (typeof x === 'string') return x; + if (typeof Buffer !== 'undefined' && x instanceof Buffer) return String(x); + return undefined; +}; + +const isAsyncIterableIterator = (value: any): value is AsyncIterableIterator => + value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function'; + +export const isMultipartBody = (body: any): body is MultipartBody => + body && typeof body === 'object' && body.body && body[Symbol.toStringTag] === 'MultipartBody'; + +/** + * Returns a multipart/form-data request if any part of the given request body contains a File / Blob value. + * Otherwise returns the request as is. + */ +export const maybeMultipartFormRequestOptions = async >( + opts: RequestOptions, +): Promise> => { + if (!hasUploadableValue(opts.body)) return opts; + + const form = await createForm(opts.body); + return getMultipartRequestOptions(form, opts); +}; + +export const multipartFormRequestOptions = async >( + opts: RequestOptions, +): Promise> => { + const form = await createForm(opts.body); + return getMultipartRequestOptions(form, opts); +}; + +export const createForm = async >(body: T | undefined): Promise => { + const form = new FormData(); + await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value))); + return form; +}; + +const hasUploadableValue = (value: unknown): boolean => { + if (isUploadable(value)) return true; + if (Array.isArray(value)) return value.some(hasUploadableValue); + if (value && typeof value === 'object') { + for (const k in value) { + if (hasUploadableValue((value as any)[k])) return true; + } + } + return false; +}; + +const addFormValue = async (form: FormData, key: string, value: unknown): Promise => { + if (value === undefined) return; + if (value == null) { + throw new TypeError( + `Received null for "${key}"; to pass null in FormData, you must use the string 'null'`, + ); + } + + // TODO: make nested formats configurable + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + form.append(key, String(value)); + } else if (isUploadable(value)) { + const file = await toFile(value); + form.append(key, file as File); + } else if (Array.isArray(value)) { + await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry))); + } else if (typeof value === 'object') { + await Promise.all( + Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop)), + ); + } else { + throw new TypeError( + `Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`, + ); + } +}; diff --git a/node_modules/openai/src/version.ts b/node_modules/openai/src/version.ts new file mode 100644 index 0000000..fdf4e52 --- /dev/null +++ b/node_modules/openai/src/version.ts @@ -0,0 +1 @@ +export const VERSION = '4.77.0'; // x-release-please-version diff --git a/node_modules/openai/streaming.d.ts b/node_modules/openai/streaming.d.ts new file mode 100644 index 0000000..2714946 --- /dev/null +++ b/node_modules/openai/streaming.d.ts @@ -0,0 +1,41 @@ + +import { ReadableStream, type Response } from "./_shims/index.js"; +export type ServerSentEvent = { + event: string | null; + data: string; + raw: string[]; +}; +export declare class Stream implements AsyncIterable { + private iterator; + controller: AbortController; + constructor(iterator: () => AsyncIterator, controller: AbortController); + static fromSSEResponse(response: Response, controller: AbortController): Stream; + /** + * Generates a Stream from a newline-separated ReadableStream + * where each item is a JSON value. + */ + static fromReadableStream(readableStream: ReadableStream, controller: AbortController): Stream; + [Symbol.asyncIterator](): AsyncIterator; + /** + * Splits the stream into two streams which can be + * independently read from at different speeds. + */ + tee(): [Stream, Stream]; + /** + * Converts this stream to a newline-separated ReadableStream of + * JSON stringified values in the stream + * which can be turned back into a Stream with `Stream.fromReadableStream()`. + */ + toReadableStream(): ReadableStream; +} +export declare function _iterSSEMessages(response: Response, controller: AbortController): AsyncGenerator; +/** This is an internal helper function that's just used for testing */ +export declare function _decodeChunks(chunks: string[]): string[]; +/** + * Most browsers don't yet have async iterable support for ReadableStream, + * and Node has a very different way of reading bytes from its "ReadableStream". + * + * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490 + */ +export declare function readableStreamAsyncIterable(stream: any): AsyncIterableIterator; +//# sourceMappingURL=streaming.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/streaming.d.ts.map b/node_modules/openai/streaming.d.ts.map new file mode 100644 index 0000000..63f357f --- /dev/null +++ b/node_modules/openai/streaming.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"streaming.d.ts","sourceRoot":"","sources":["src/streaming.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,cAAc,EAAE,KAAK,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAQ/D,MAAM,MAAM,eAAe,GAAG;IAC5B,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,EAAE,MAAM,EAAE,CAAC;CACf,CAAC;AAEF,qBAAa,MAAM,CAAC,IAAI,CAAE,YAAW,aAAa,CAAC,IAAI,CAAC;IAIpD,OAAO,CAAC,QAAQ;IAHlB,UAAU,EAAE,eAAe,CAAC;gBAGlB,QAAQ,EAAE,MAAM,aAAa,CAAC,IAAI,CAAC,EAC3C,UAAU,EAAE,eAAe;IAK7B,MAAM,CAAC,eAAe,CAAC,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,UAAU,EAAE,eAAe,GAAG,MAAM,CAAC,IAAI,CAAC;IAgE3F;;;OAGG;IACH,MAAM,CAAC,kBAAkB,CAAC,IAAI,EAAE,cAAc,EAAE,cAAc,EAAE,UAAU,EAAE,eAAe,GAAG,MAAM,CAAC,IAAI,CAAC;IA2C1G,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,IAAI,CAAC;IAI7C;;;OAGG;IACH,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAwBnC;;;;OAIG;IACH,gBAAgB,IAAI,cAAc;CA0BnC;AAED,wBAAuB,gBAAgB,CACrC,QAAQ,EAAE,QAAQ,EAClB,UAAU,EAAE,eAAe,GAC1B,cAAc,CAAC,eAAe,EAAE,IAAI,EAAE,OAAO,CAAC,CAqBhD;AA0HD,uEAAuE;AACvE,wBAAgB,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,CAQxD;AAWD;;;;;GAKG;AACH,wBAAgB,2BAA2B,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,GAAG,qBAAqB,CAAC,CAAC,CAAC,CAyBpF"} \ No newline at end of file diff --git a/node_modules/openai/streaming.js b/node_modules/openai/streaming.js new file mode 100644 index 0000000..9c56fac --- /dev/null +++ b/node_modules/openai/streaming.js @@ -0,0 +1,352 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readableStreamAsyncIterable = exports._decodeChunks = exports._iterSSEMessages = exports.Stream = void 0; +const index_1 = require("./_shims/index.js"); +const error_1 = require("./error.js"); +const line_1 = require("./internal/decoders/line.js"); +const error_2 = require("./error.js"); +class Stream { + constructor(iterator, controller) { + this.iterator = iterator; + this.controller = controller; + } + static fromSSEResponse(response, controller) { + let consumed = false; + async function* iterator() { + if (consumed) { + throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); + } + consumed = true; + let done = false; + try { + for await (const sse of _iterSSEMessages(response, controller)) { + if (done) + continue; + if (sse.data.startsWith('[DONE]')) { + done = true; + continue; + } + if (sse.event === null) { + let data; + try { + data = JSON.parse(sse.data); + } + catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + if (data && data.error) { + throw new error_2.APIError(undefined, data.error, undefined, undefined); + } + yield data; + } + else { + let data; + try { + data = JSON.parse(sse.data); + } + catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + // TODO: Is this where the error should be thrown? + if (sse.event == 'error') { + throw new error_2.APIError(undefined, data.error, data.message, undefined); + } + yield { event: sse.event, data: data }; + } + } + done = true; + } + catch (e) { + // If the user calls `stream.controller.abort()`, we should exit without throwing. + if (e instanceof Error && e.name === 'AbortError') + return; + throw e; + } + finally { + // If the user `break`s, abort the ongoing request. + if (!done) + controller.abort(); + } + } + return new Stream(iterator, controller); + } + /** + * Generates a Stream from a newline-separated ReadableStream + * where each item is a JSON value. + */ + static fromReadableStream(readableStream, controller) { + let consumed = false; + async function* iterLines() { + const lineDecoder = new line_1.LineDecoder(); + const iter = readableStreamAsyncIterable(readableStream); + for await (const chunk of iter) { + for (const line of lineDecoder.decode(chunk)) { + yield line; + } + } + for (const line of lineDecoder.flush()) { + yield line; + } + } + async function* iterator() { + if (consumed) { + throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); + } + consumed = true; + let done = false; + try { + for await (const line of iterLines()) { + if (done) + continue; + if (line) + yield JSON.parse(line); + } + done = true; + } + catch (e) { + // If the user calls `stream.controller.abort()`, we should exit without throwing. + if (e instanceof Error && e.name === 'AbortError') + return; + throw e; + } + finally { + // If the user `break`s, abort the ongoing request. + if (!done) + controller.abort(); + } + } + return new Stream(iterator, controller); + } + [Symbol.asyncIterator]() { + return this.iterator(); + } + /** + * Splits the stream into two streams which can be + * independently read from at different speeds. + */ + tee() { + const left = []; + const right = []; + const iterator = this.iterator(); + const teeIterator = (queue) => { + return { + next: () => { + if (queue.length === 0) { + const result = iterator.next(); + left.push(result); + right.push(result); + } + return queue.shift(); + }, + }; + }; + return [ + new Stream(() => teeIterator(left), this.controller), + new Stream(() => teeIterator(right), this.controller), + ]; + } + /** + * Converts this stream to a newline-separated ReadableStream of + * JSON stringified values in the stream + * which can be turned back into a Stream with `Stream.fromReadableStream()`. + */ + toReadableStream() { + const self = this; + let iter; + const encoder = new TextEncoder(); + return new index_1.ReadableStream({ + async start() { + iter = self[Symbol.asyncIterator](); + }, + async pull(ctrl) { + try { + const { value, done } = await iter.next(); + if (done) + return ctrl.close(); + const bytes = encoder.encode(JSON.stringify(value) + '\n'); + ctrl.enqueue(bytes); + } + catch (err) { + ctrl.error(err); + } + }, + async cancel() { + await iter.return?.(); + }, + }); + } +} +exports.Stream = Stream; +async function* _iterSSEMessages(response, controller) { + if (!response.body) { + controller.abort(); + throw new error_1.OpenAIError(`Attempted to iterate over a response with no body`); + } + const sseDecoder = new SSEDecoder(); + const lineDecoder = new line_1.LineDecoder(); + const iter = readableStreamAsyncIterable(response.body); + for await (const sseChunk of iterSSEChunks(iter)) { + for (const line of lineDecoder.decode(sseChunk)) { + const sse = sseDecoder.decode(line); + if (sse) + yield sse; + } + } + for (const line of lineDecoder.flush()) { + const sse = sseDecoder.decode(line); + if (sse) + yield sse; + } +} +exports._iterSSEMessages = _iterSSEMessages; +/** + * Given an async iterable iterator, iterates over it and yields full + * SSE chunks, i.e. yields when a double new-line is encountered. + */ +async function* iterSSEChunks(iterator) { + let data = new Uint8Array(); + for await (const chunk of iterator) { + if (chunk == null) { + continue; + } + const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk) + : typeof chunk === 'string' ? new TextEncoder().encode(chunk) + : chunk; + let newData = new Uint8Array(data.length + binaryChunk.length); + newData.set(data); + newData.set(binaryChunk, data.length); + data = newData; + let patternIndex; + while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) { + yield data.slice(0, patternIndex); + data = data.slice(patternIndex); + } + } + if (data.length > 0) { + yield data; + } +} +function findDoubleNewlineIndex(buffer) { + // This function searches the buffer for the end patterns (\r\r, \n\n, \r\n\r\n) + // and returns the index right after the first occurrence of any pattern, + // or -1 if none of the patterns are found. + const newline = 0x0a; // \n + const carriage = 0x0d; // \r + for (let i = 0; i < buffer.length - 2; i++) { + if (buffer[i] === newline && buffer[i + 1] === newline) { + // \n\n + return i + 2; + } + if (buffer[i] === carriage && buffer[i + 1] === carriage) { + // \r\r + return i + 2; + } + if (buffer[i] === carriage && + buffer[i + 1] === newline && + i + 3 < buffer.length && + buffer[i + 2] === carriage && + buffer[i + 3] === newline) { + // \r\n\r\n + return i + 4; + } + } + return -1; +} +class SSEDecoder { + constructor() { + this.event = null; + this.data = []; + this.chunks = []; + } + decode(line) { + if (line.endsWith('\r')) { + line = line.substring(0, line.length - 1); + } + if (!line) { + // empty line and we didn't previously encounter any messages + if (!this.event && !this.data.length) + return null; + const sse = { + event: this.event, + data: this.data.join('\n'), + raw: this.chunks, + }; + this.event = null; + this.data = []; + this.chunks = []; + return sse; + } + this.chunks.push(line); + if (line.startsWith(':')) { + return null; + } + let [fieldname, _, value] = partition(line, ':'); + if (value.startsWith(' ')) { + value = value.substring(1); + } + if (fieldname === 'event') { + this.event = value; + } + else if (fieldname === 'data') { + this.data.push(value); + } + return null; + } +} +/** This is an internal helper function that's just used for testing */ +function _decodeChunks(chunks) { + const decoder = new line_1.LineDecoder(); + const lines = []; + for (const chunk of chunks) { + lines.push(...decoder.decode(chunk)); + } + return lines; +} +exports._decodeChunks = _decodeChunks; +function partition(str, delimiter) { + const index = str.indexOf(delimiter); + if (index !== -1) { + return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)]; + } + return [str, '', '']; +} +/** + * Most browsers don't yet have async iterable support for ReadableStream, + * and Node has a very different way of reading bytes from its "ReadableStream". + * + * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490 + */ +function readableStreamAsyncIterable(stream) { + if (stream[Symbol.asyncIterator]) + return stream; + const reader = stream.getReader(); + return { + async next() { + try { + const result = await reader.read(); + if (result?.done) + reader.releaseLock(); // release lock when stream becomes closed + return result; + } + catch (e) { + reader.releaseLock(); // release lock when stream becomes errored + throw e; + } + }, + async return() { + const cancelPromise = reader.cancel(); + reader.releaseLock(); + await cancelPromise; + return { done: true, value: undefined }; + }, + [Symbol.asyncIterator]() { + return this; + }, + }; +} +exports.readableStreamAsyncIterable = readableStreamAsyncIterable; +//# sourceMappingURL=streaming.js.map \ No newline at end of file diff --git a/node_modules/openai/streaming.js.map b/node_modules/openai/streaming.js.map new file mode 100644 index 0000000..c192732 --- /dev/null +++ b/node_modules/openai/streaming.js.map @@ -0,0 +1 @@ +{"version":3,"file":"streaming.js","sourceRoot":"","sources":["src/streaming.ts"],"names":[],"mappings":";;;AAAA,6CAA+D;AAC/D,sCAAsC;AACtC,sDAAuD;AAEvD,sCAAmC;AAUnC,MAAa,MAAM;IAGjB,YACU,QAAmC,EAC3C,UAA2B;QADnB,aAAQ,GAAR,QAAQ,CAA2B;QAG3C,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED,MAAM,CAAC,eAAe,CAAO,QAAkB,EAAE,UAA2B;QAC1E,IAAI,QAAQ,GAAG,KAAK,CAAC;QAErB,KAAK,SAAS,CAAC,CAAC,QAAQ;YACtB,IAAI,QAAQ,EAAE;gBACZ,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;aAC7F;YACD,QAAQ,GAAG,IAAI,CAAC;YAChB,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,IAAI;gBACF,IAAI,KAAK,EAAE,MAAM,GAAG,IAAI,gBAAgB,CAAC,QAAQ,EAAE,UAAU,CAAC,EAAE;oBAC9D,IAAI,IAAI;wBAAE,SAAS;oBAEnB,IAAI,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;wBACjC,IAAI,GAAG,IAAI,CAAC;wBACZ,SAAS;qBACV;oBAED,IAAI,GAAG,CAAC,KAAK,KAAK,IAAI,EAAE;wBACtB,IAAI,IAAI,CAAC;wBAET,IAAI;4BACF,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;yBAC7B;wBAAC,OAAO,CAAC,EAAE;4BACV,OAAO,CAAC,KAAK,CAAC,oCAAoC,EAAE,GAAG,CAAC,IAAI,CAAC,CAAC;4BAC9D,OAAO,CAAC,KAAK,CAAC,aAAa,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC;4BACtC,MAAM,CAAC,CAAC;yBACT;wBAED,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,EAAE;4BACtB,MAAM,IAAI,gBAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;yBACjE;wBAED,MAAM,IAAI,CAAC;qBACZ;yBAAM;wBACL,IAAI,IAAI,CAAC;wBACT,IAAI;4BACF,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;yBAC7B;wBAAC,OAAO,CAAC,EAAE;4BACV,OAAO,CAAC,KAAK,CAAC,oCAAoC,EAAE,GAAG,CAAC,IAAI,CAAC,CAAC;4BAC9D,OAAO,CAAC,KAAK,CAAC,aAAa,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC;4BACtC,MAAM,CAAC,CAAC;yBACT;wBACD,kDAAkD;wBAClD,IAAI,GAAG,CAAC,KAAK,IAAI,OAAO,EAAE;4BACxB,MAAM,IAAI,gBAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;yBACpE;wBACD,MAAM,EAAE,KAAK,EAAE,GAAG,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,EAAS,CAAC;qBAC/C;iBACF;gBACD,IAAI,GAAG,IAAI,CAAC;aACb;YAAC,OAAO,CAAC,EAAE;gBACV,kFAAkF;gBAClF,IAAI,CAAC,YAAY,KAAK,IAAI,CAAC,CAAC,IAAI,KAAK,YAAY;oBAAE,OAAO;gBAC1D,MAAM,CAAC,CAAC;aACT;oBAAS;gBACR,mDAAmD;gBACnD,IAAI,CAAC,IAAI;oBAAE,UAAU,CAAC,KAAK,EAAE,CAAC;aAC/B;QACH,CAAC;QAED,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;IAC1C,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,kBAAkB,CAAO,cAA8B,EAAE,UAA2B;QACzF,IAAI,QAAQ,GAAG,KAAK,CAAC;QAErB,KAAK,SAAS,CAAC,CAAC,SAAS;YACvB,MAAM,WAAW,GAAG,IAAI,kBAAW,EAAE,CAAC;YAEtC,MAAM,IAAI,GAAG,2BAA2B,CAAQ,cAAc,CAAC,CAAC;YAChE,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,IAAI,EAAE;gBAC9B,KAAK,MAAM,IAAI,IAAI,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;oBAC5C,MAAM,IAAI,CAAC;iBACZ;aACF;YAED,KAAK,MAAM,IAAI,IAAI,WAAW,CAAC,KAAK,EAAE,EAAE;gBACtC,MAAM,IAAI,CAAC;aACZ;QACH,CAAC;QAED,KAAK,SAAS,CAAC,CAAC,QAAQ;YACtB,IAAI,QAAQ,EAAE;gBACZ,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;aAC7F;YACD,QAAQ,GAAG,IAAI,CAAC;YAChB,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,IAAI;gBACF,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,SAAS,EAAE,EAAE;oBACpC,IAAI,IAAI;wBAAE,SAAS;oBACnB,IAAI,IAAI;wBAAE,MAAM,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;iBAClC;gBACD,IAAI,GAAG,IAAI,CAAC;aACb;YAAC,OAAO,CAAC,EAAE;gBACV,kFAAkF;gBAClF,IAAI,CAAC,YAAY,KAAK,IAAI,CAAC,CAAC,IAAI,KAAK,YAAY;oBAAE,OAAO;gBAC1D,MAAM,CAAC,CAAC;aACT;oBAAS;gBACR,mDAAmD;gBACnD,IAAI,CAAC,IAAI;oBAAE,UAAU,CAAC,KAAK,EAAE,CAAC;aAC/B;QACH,CAAC;QAED,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;IAC1C,CAAC;IAED,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,QAAQ,EAAE,CAAC;IACzB,CAAC;IAED;;;OAGG;IACH,GAAG;QACD,MAAM,IAAI,GAAyC,EAAE,CAAC;QACtD,MAAM,KAAK,GAAyC,EAAE,CAAC;QACvD,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;QAEjC,MAAM,WAAW,GAAG,CAAC,KAA2C,EAAuB,EAAE;YACvF,OAAO;gBACL,IAAI,EAAE,GAAG,EAAE;oBACT,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;wBACtB,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC;wBAC/B,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;wBAClB,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;qBACpB;oBACD,OAAO,KAAK,CAAC,KAAK,EAAG,CAAC;gBACxB,CAAC;aACF,CAAC;QACJ,CAAC,CAAC;QAEF,OAAO;YACL,IAAI,MAAM,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC;YACpD,IAAI,MAAM,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC;SACtD,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACH,gBAAgB;QACd,MAAM,IAAI,GAAG,IAAI,CAAC;QAClB,IAAI,IAAyB,CAAC;QAC9B,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAElC,OAAO,IAAI,sBAAc,CAAC;YACxB,KAAK,CAAC,KAAK;gBACT,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAC;YACtC,CAAC;YACD,KAAK,CAAC,IAAI,CAAC,IAAS;gBAClB,IAAI;oBACF,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;oBAC1C,IAAI,IAAI;wBAAE,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC;oBAE9B,MAAM,KAAK,GAAG,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;oBAE3D,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;iBACrB;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;iBACjB;YACH,CAAC;YACD,KAAK,CAAC,MAAM;gBACV,MAAM,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YACxB,CAAC;SACF,CAAC,CAAC;IACL,CAAC;CACF;AAxLD,wBAwLC;AAEM,KAAK,SAAS,CAAC,CAAC,gBAAgB,CACrC,QAAkB,EAClB,UAA2B;IAE3B,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE;QAClB,UAAU,CAAC,KAAK,EAAE,CAAC;QACnB,MAAM,IAAI,mBAAW,CAAC,mDAAmD,CAAC,CAAC;KAC5E;IAED,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;IACpC,MAAM,WAAW,GAAG,IAAI,kBAAW,EAAE,CAAC;IAEtC,MAAM,IAAI,GAAG,2BAA2B,CAAQ,QAAQ,CAAC,IAAI,CAAC,CAAC;IAC/D,IAAI,KAAK,EAAE,MAAM,QAAQ,IAAI,aAAa,CAAC,IAAI,CAAC,EAAE;QAChD,KAAK,MAAM,IAAI,IAAI,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE;YAC/C,MAAM,GAAG,GAAG,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;YACpC,IAAI,GAAG;gBAAE,MAAM,GAAG,CAAC;SACpB;KACF;IAED,KAAK,MAAM,IAAI,IAAI,WAAW,CAAC,KAAK,EAAE,EAAE;QACtC,MAAM,GAAG,GAAG,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QACpC,IAAI,GAAG;YAAE,MAAM,GAAG,CAAC;KACpB;AACH,CAAC;AAxBD,4CAwBC;AAED;;;GAGG;AACH,KAAK,SAAS,CAAC,CAAC,aAAa,CAAC,QAAsC;IAClE,IAAI,IAAI,GAAG,IAAI,UAAU,EAAE,CAAC;IAE5B,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,QAAQ,EAAE;QAClC,IAAI,KAAK,IAAI,IAAI,EAAE;YACjB,SAAS;SACV;QAED,MAAM,WAAW,GACf,KAAK,YAAY,WAAW,CAAC,CAAC,CAAC,IAAI,UAAU,CAAC,KAAK,CAAC;YACpD,CAAC,CAAC,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC7D,CAAC,CAAC,KAAK,CAAC;QAEV,IAAI,OAAO,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC;QAC/D,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAClB,OAAO,CAAC,GAAG,CAAC,WAAW,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QACtC,IAAI,GAAG,OAAO,CAAC;QAEf,IAAI,YAAY,CAAC;QACjB,OAAO,CAAC,YAAY,GAAG,sBAAsB,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;YAC3D,MAAM,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;YAClC,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACjC;KACF;IAED,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;QACnB,MAAM,IAAI,CAAC;KACZ;AACH,CAAC;AAED,SAAS,sBAAsB,CAAC,MAAkB;IAChD,gFAAgF;IAChF,yEAAyE;IACzE,2CAA2C;IAC3C,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,KAAK;IAC3B,MAAM,QAAQ,GAAG,IAAI,CAAC,CAAC,KAAK;IAE5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;QAC1C,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,OAAO,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,OAAO,EAAE;YACtD,OAAO;YACP,OAAO,CAAC,GAAG,CAAC,CAAC;SACd;QACD,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,QAAQ,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,QAAQ,EAAE;YACxD,OAAO;YACP,OAAO,CAAC,GAAG,CAAC,CAAC;SACd;QACD,IACE,MAAM,CAAC,CAAC,CAAC,KAAK,QAAQ;YACtB,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,OAAO;YACzB,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM;YACrB,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,QAAQ;YAC1B,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,OAAO,EACzB;YACA,WAAW;YACX,OAAO,CAAC,GAAG,CAAC,CAAC;SACd;KACF;IAED,OAAO,CAAC,CAAC,CAAC;AACZ,CAAC;AAED,MAAM,UAAU;IAKd;QACE,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;QACf,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;IACnB,CAAC;IAED,MAAM,CAAC,IAAY;QACjB,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;SAC3C;QAED,IAAI,CAAC,IAAI,EAAE;YACT,6DAA6D;YAC7D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM;gBAAE,OAAO,IAAI,CAAC;YAElD,MAAM,GAAG,GAAoB;gBAC3B,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC;gBAC1B,GAAG,EAAE,IAAI,CAAC,MAAM;aACjB,CAAC;YAEF,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;YAClB,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;YACf,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;YAEjB,OAAO,GAAG,CAAC;SACZ;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAEvB,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACxB,OAAO,IAAI,CAAC;SACb;QAED,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,GAAG,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;QAEjD,IAAI,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACzB,KAAK,GAAG,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;SAC5B;QAED,IAAI,SAAS,KAAK,OAAO,EAAE;YACzB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;SACpB;aAAM,IAAI,SAAS,KAAK,MAAM,EAAE;YAC/B,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;SACvB;QAED,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AAED,uEAAuE;AACvE,SAAgB,aAAa,CAAC,MAAgB;IAC5C,MAAM,OAAO,GAAG,IAAI,kBAAW,EAAE,CAAC;IAClC,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;QAC1B,KAAK,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACtC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AARD,sCAQC;AAED,SAAS,SAAS,CAAC,GAAW,EAAE,SAAiB;IAC/C,MAAM,KAAK,GAAG,GAAG,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACrC,IAAI,KAAK,KAAK,CAAC,CAAC,EAAE;QAChB,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,SAAS,EAAE,GAAG,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;KACtF;IAED,OAAO,CAAC,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;AACvB,CAAC;AAED;;;;;GAKG;AACH,SAAgB,2BAA2B,CAAI,MAAW;IACxD,IAAI,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC;QAAE,OAAO,MAAM,CAAC;IAEhD,MAAM,MAAM,GAAG,MAAM,CAAC,SAAS,EAAE,CAAC;IAClC,OAAO;QACL,KAAK,CAAC,IAAI;YACR,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;gBACnC,IAAI,MAAM,EAAE,IAAI;oBAAE,MAAM,CAAC,WAAW,EAAE,CAAC,CAAC,0CAA0C;gBAClF,OAAO,MAAM,CAAC;aACf;YAAC,OAAO,CAAC,EAAE;gBACV,MAAM,CAAC,WAAW,EAAE,CAAC,CAAC,2CAA2C;gBACjE,MAAM,CAAC,CAAC;aACT;QACH,CAAC;QACD,KAAK,CAAC,MAAM;YACV,MAAM,aAAa,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;YACtC,MAAM,CAAC,WAAW,EAAE,CAAC;YACrB,MAAM,aAAa,CAAC;YACpB,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC;QAC1C,CAAC;QACD,CAAC,MAAM,CAAC,aAAa,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;KACF,CAAC;AACJ,CAAC;AAzBD,kEAyBC"} \ No newline at end of file diff --git a/node_modules/openai/streaming.mjs b/node_modules/openai/streaming.mjs new file mode 100644 index 0000000..0a25684 --- /dev/null +++ b/node_modules/openai/streaming.mjs @@ -0,0 +1,345 @@ +import { ReadableStream } from "./_shims/index.mjs"; +import { OpenAIError } from "./error.mjs"; +import { LineDecoder } from "./internal/decoders/line.mjs"; +import { APIError } from "./error.mjs"; +export class Stream { + constructor(iterator, controller) { + this.iterator = iterator; + this.controller = controller; + } + static fromSSEResponse(response, controller) { + let consumed = false; + async function* iterator() { + if (consumed) { + throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); + } + consumed = true; + let done = false; + try { + for await (const sse of _iterSSEMessages(response, controller)) { + if (done) + continue; + if (sse.data.startsWith('[DONE]')) { + done = true; + continue; + } + if (sse.event === null) { + let data; + try { + data = JSON.parse(sse.data); + } + catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + if (data && data.error) { + throw new APIError(undefined, data.error, undefined, undefined); + } + yield data; + } + else { + let data; + try { + data = JSON.parse(sse.data); + } + catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + // TODO: Is this where the error should be thrown? + if (sse.event == 'error') { + throw new APIError(undefined, data.error, data.message, undefined); + } + yield { event: sse.event, data: data }; + } + } + done = true; + } + catch (e) { + // If the user calls `stream.controller.abort()`, we should exit without throwing. + if (e instanceof Error && e.name === 'AbortError') + return; + throw e; + } + finally { + // If the user `break`s, abort the ongoing request. + if (!done) + controller.abort(); + } + } + return new Stream(iterator, controller); + } + /** + * Generates a Stream from a newline-separated ReadableStream + * where each item is a JSON value. + */ + static fromReadableStream(readableStream, controller) { + let consumed = false; + async function* iterLines() { + const lineDecoder = new LineDecoder(); + const iter = readableStreamAsyncIterable(readableStream); + for await (const chunk of iter) { + for (const line of lineDecoder.decode(chunk)) { + yield line; + } + } + for (const line of lineDecoder.flush()) { + yield line; + } + } + async function* iterator() { + if (consumed) { + throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); + } + consumed = true; + let done = false; + try { + for await (const line of iterLines()) { + if (done) + continue; + if (line) + yield JSON.parse(line); + } + done = true; + } + catch (e) { + // If the user calls `stream.controller.abort()`, we should exit without throwing. + if (e instanceof Error && e.name === 'AbortError') + return; + throw e; + } + finally { + // If the user `break`s, abort the ongoing request. + if (!done) + controller.abort(); + } + } + return new Stream(iterator, controller); + } + [Symbol.asyncIterator]() { + return this.iterator(); + } + /** + * Splits the stream into two streams which can be + * independently read from at different speeds. + */ + tee() { + const left = []; + const right = []; + const iterator = this.iterator(); + const teeIterator = (queue) => { + return { + next: () => { + if (queue.length === 0) { + const result = iterator.next(); + left.push(result); + right.push(result); + } + return queue.shift(); + }, + }; + }; + return [ + new Stream(() => teeIterator(left), this.controller), + new Stream(() => teeIterator(right), this.controller), + ]; + } + /** + * Converts this stream to a newline-separated ReadableStream of + * JSON stringified values in the stream + * which can be turned back into a Stream with `Stream.fromReadableStream()`. + */ + toReadableStream() { + const self = this; + let iter; + const encoder = new TextEncoder(); + return new ReadableStream({ + async start() { + iter = self[Symbol.asyncIterator](); + }, + async pull(ctrl) { + try { + const { value, done } = await iter.next(); + if (done) + return ctrl.close(); + const bytes = encoder.encode(JSON.stringify(value) + '\n'); + ctrl.enqueue(bytes); + } + catch (err) { + ctrl.error(err); + } + }, + async cancel() { + await iter.return?.(); + }, + }); + } +} +export async function* _iterSSEMessages(response, controller) { + if (!response.body) { + controller.abort(); + throw new OpenAIError(`Attempted to iterate over a response with no body`); + } + const sseDecoder = new SSEDecoder(); + const lineDecoder = new LineDecoder(); + const iter = readableStreamAsyncIterable(response.body); + for await (const sseChunk of iterSSEChunks(iter)) { + for (const line of lineDecoder.decode(sseChunk)) { + const sse = sseDecoder.decode(line); + if (sse) + yield sse; + } + } + for (const line of lineDecoder.flush()) { + const sse = sseDecoder.decode(line); + if (sse) + yield sse; + } +} +/** + * Given an async iterable iterator, iterates over it and yields full + * SSE chunks, i.e. yields when a double new-line is encountered. + */ +async function* iterSSEChunks(iterator) { + let data = new Uint8Array(); + for await (const chunk of iterator) { + if (chunk == null) { + continue; + } + const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk) + : typeof chunk === 'string' ? new TextEncoder().encode(chunk) + : chunk; + let newData = new Uint8Array(data.length + binaryChunk.length); + newData.set(data); + newData.set(binaryChunk, data.length); + data = newData; + let patternIndex; + while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) { + yield data.slice(0, patternIndex); + data = data.slice(patternIndex); + } + } + if (data.length > 0) { + yield data; + } +} +function findDoubleNewlineIndex(buffer) { + // This function searches the buffer for the end patterns (\r\r, \n\n, \r\n\r\n) + // and returns the index right after the first occurrence of any pattern, + // or -1 if none of the patterns are found. + const newline = 0x0a; // \n + const carriage = 0x0d; // \r + for (let i = 0; i < buffer.length - 2; i++) { + if (buffer[i] === newline && buffer[i + 1] === newline) { + // \n\n + return i + 2; + } + if (buffer[i] === carriage && buffer[i + 1] === carriage) { + // \r\r + return i + 2; + } + if (buffer[i] === carriage && + buffer[i + 1] === newline && + i + 3 < buffer.length && + buffer[i + 2] === carriage && + buffer[i + 3] === newline) { + // \r\n\r\n + return i + 4; + } + } + return -1; +} +class SSEDecoder { + constructor() { + this.event = null; + this.data = []; + this.chunks = []; + } + decode(line) { + if (line.endsWith('\r')) { + line = line.substring(0, line.length - 1); + } + if (!line) { + // empty line and we didn't previously encounter any messages + if (!this.event && !this.data.length) + return null; + const sse = { + event: this.event, + data: this.data.join('\n'), + raw: this.chunks, + }; + this.event = null; + this.data = []; + this.chunks = []; + return sse; + } + this.chunks.push(line); + if (line.startsWith(':')) { + return null; + } + let [fieldname, _, value] = partition(line, ':'); + if (value.startsWith(' ')) { + value = value.substring(1); + } + if (fieldname === 'event') { + this.event = value; + } + else if (fieldname === 'data') { + this.data.push(value); + } + return null; + } +} +/** This is an internal helper function that's just used for testing */ +export function _decodeChunks(chunks) { + const decoder = new LineDecoder(); + const lines = []; + for (const chunk of chunks) { + lines.push(...decoder.decode(chunk)); + } + return lines; +} +function partition(str, delimiter) { + const index = str.indexOf(delimiter); + if (index !== -1) { + return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)]; + } + return [str, '', '']; +} +/** + * Most browsers don't yet have async iterable support for ReadableStream, + * and Node has a very different way of reading bytes from its "ReadableStream". + * + * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490 + */ +export function readableStreamAsyncIterable(stream) { + if (stream[Symbol.asyncIterator]) + return stream; + const reader = stream.getReader(); + return { + async next() { + try { + const result = await reader.read(); + if (result?.done) + reader.releaseLock(); // release lock when stream becomes closed + return result; + } + catch (e) { + reader.releaseLock(); // release lock when stream becomes errored + throw e; + } + }, + async return() { + const cancelPromise = reader.cancel(); + reader.releaseLock(); + await cancelPromise; + return { done: true, value: undefined }; + }, + [Symbol.asyncIterator]() { + return this; + }, + }; +} +//# sourceMappingURL=streaming.mjs.map \ No newline at end of file diff --git a/node_modules/openai/streaming.mjs.map b/node_modules/openai/streaming.mjs.map new file mode 100644 index 0000000..b82f54f --- /dev/null +++ b/node_modules/openai/streaming.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"streaming.mjs","sourceRoot":"","sources":["src/streaming.ts"],"names":[],"mappings":"OAAO,EAAE,cAAc,EAAiB;OACjC,EAAE,WAAW,EAAE;OACf,EAAE,WAAW,EAAE;OAEf,EAAE,QAAQ,EAAE;AAUnB,MAAM,OAAO,MAAM;IAGjB,YACU,QAAmC,EAC3C,UAA2B;QADnB,aAAQ,GAAR,QAAQ,CAA2B;QAG3C,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED,MAAM,CAAC,eAAe,CAAO,QAAkB,EAAE,UAA2B;QAC1E,IAAI,QAAQ,GAAG,KAAK,CAAC;QAErB,KAAK,SAAS,CAAC,CAAC,QAAQ;YACtB,IAAI,QAAQ,EAAE;gBACZ,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;aAC7F;YACD,QAAQ,GAAG,IAAI,CAAC;YAChB,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,IAAI;gBACF,IAAI,KAAK,EAAE,MAAM,GAAG,IAAI,gBAAgB,CAAC,QAAQ,EAAE,UAAU,CAAC,EAAE;oBAC9D,IAAI,IAAI;wBAAE,SAAS;oBAEnB,IAAI,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;wBACjC,IAAI,GAAG,IAAI,CAAC;wBACZ,SAAS;qBACV;oBAED,IAAI,GAAG,CAAC,KAAK,KAAK,IAAI,EAAE;wBACtB,IAAI,IAAI,CAAC;wBAET,IAAI;4BACF,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;yBAC7B;wBAAC,OAAO,CAAC,EAAE;4BACV,OAAO,CAAC,KAAK,CAAC,oCAAoC,EAAE,GAAG,CAAC,IAAI,CAAC,CAAC;4BAC9D,OAAO,CAAC,KAAK,CAAC,aAAa,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC;4BACtC,MAAM,CAAC,CAAC;yBACT;wBAED,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,EAAE;4BACtB,MAAM,IAAI,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;yBACjE;wBAED,MAAM,IAAI,CAAC;qBACZ;yBAAM;wBACL,IAAI,IAAI,CAAC;wBACT,IAAI;4BACF,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;yBAC7B;wBAAC,OAAO,CAAC,EAAE;4BACV,OAAO,CAAC,KAAK,CAAC,oCAAoC,EAAE,GAAG,CAAC,IAAI,CAAC,CAAC;4BAC9D,OAAO,CAAC,KAAK,CAAC,aAAa,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC;4BACtC,MAAM,CAAC,CAAC;yBACT;wBACD,kDAAkD;wBAClD,IAAI,GAAG,CAAC,KAAK,IAAI,OAAO,EAAE;4BACxB,MAAM,IAAI,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;yBACpE;wBACD,MAAM,EAAE,KAAK,EAAE,GAAG,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,EAAS,CAAC;qBAC/C;iBACF;gBACD,IAAI,GAAG,IAAI,CAAC;aACb;YAAC,OAAO,CAAC,EAAE;gBACV,kFAAkF;gBAClF,IAAI,CAAC,YAAY,KAAK,IAAI,CAAC,CAAC,IAAI,KAAK,YAAY;oBAAE,OAAO;gBAC1D,MAAM,CAAC,CAAC;aACT;oBAAS;gBACR,mDAAmD;gBACnD,IAAI,CAAC,IAAI;oBAAE,UAAU,CAAC,KAAK,EAAE,CAAC;aAC/B;QACH,CAAC;QAED,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;IAC1C,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,kBAAkB,CAAO,cAA8B,EAAE,UAA2B;QACzF,IAAI,QAAQ,GAAG,KAAK,CAAC;QAErB,KAAK,SAAS,CAAC,CAAC,SAAS;YACvB,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;YAEtC,MAAM,IAAI,GAAG,2BAA2B,CAAQ,cAAc,CAAC,CAAC;YAChE,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,IAAI,EAAE;gBAC9B,KAAK,MAAM,IAAI,IAAI,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;oBAC5C,MAAM,IAAI,CAAC;iBACZ;aACF;YAED,KAAK,MAAM,IAAI,IAAI,WAAW,CAAC,KAAK,EAAE,EAAE;gBACtC,MAAM,IAAI,CAAC;aACZ;QACH,CAAC;QAED,KAAK,SAAS,CAAC,CAAC,QAAQ;YACtB,IAAI,QAAQ,EAAE;gBACZ,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;aAC7F;YACD,QAAQ,GAAG,IAAI,CAAC;YAChB,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,IAAI;gBACF,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,SAAS,EAAE,EAAE;oBACpC,IAAI,IAAI;wBAAE,SAAS;oBACnB,IAAI,IAAI;wBAAE,MAAM,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;iBAClC;gBACD,IAAI,GAAG,IAAI,CAAC;aACb;YAAC,OAAO,CAAC,EAAE;gBACV,kFAAkF;gBAClF,IAAI,CAAC,YAAY,KAAK,IAAI,CAAC,CAAC,IAAI,KAAK,YAAY;oBAAE,OAAO;gBAC1D,MAAM,CAAC,CAAC;aACT;oBAAS;gBACR,mDAAmD;gBACnD,IAAI,CAAC,IAAI;oBAAE,UAAU,CAAC,KAAK,EAAE,CAAC;aAC/B;QACH,CAAC;QAED,OAAO,IAAI,MAAM,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;IAC1C,CAAC;IAED,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,QAAQ,EAAE,CAAC;IACzB,CAAC;IAED;;;OAGG;IACH,GAAG;QACD,MAAM,IAAI,GAAyC,EAAE,CAAC;QACtD,MAAM,KAAK,GAAyC,EAAE,CAAC;QACvD,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;QAEjC,MAAM,WAAW,GAAG,CAAC,KAA2C,EAAuB,EAAE;YACvF,OAAO;gBACL,IAAI,EAAE,GAAG,EAAE;oBACT,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;wBACtB,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC;wBAC/B,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;wBAClB,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;qBACpB;oBACD,OAAO,KAAK,CAAC,KAAK,EAAG,CAAC;gBACxB,CAAC;aACF,CAAC;QACJ,CAAC,CAAC;QAEF,OAAO;YACL,IAAI,MAAM,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC;YACpD,IAAI,MAAM,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC;SACtD,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACH,gBAAgB;QACd,MAAM,IAAI,GAAG,IAAI,CAAC;QAClB,IAAI,IAAyB,CAAC;QAC9B,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAElC,OAAO,IAAI,cAAc,CAAC;YACxB,KAAK,CAAC,KAAK;gBACT,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAC;YACtC,CAAC;YACD,KAAK,CAAC,IAAI,CAAC,IAAS;gBAClB,IAAI;oBACF,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;oBAC1C,IAAI,IAAI;wBAAE,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC;oBAE9B,MAAM,KAAK,GAAG,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;oBAE3D,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;iBACrB;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;iBACjB;YACH,CAAC;YACD,KAAK,CAAC,MAAM;gBACV,MAAM,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;YACxB,CAAC;SACF,CAAC,CAAC;IACL,CAAC;CACF;AAED,MAAM,CAAC,KAAK,SAAS,CAAC,CAAC,gBAAgB,CACrC,QAAkB,EAClB,UAA2B;IAE3B,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE;QAClB,UAAU,CAAC,KAAK,EAAE,CAAC;QACnB,MAAM,IAAI,WAAW,CAAC,mDAAmD,CAAC,CAAC;KAC5E;IAED,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;IACpC,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,MAAM,IAAI,GAAG,2BAA2B,CAAQ,QAAQ,CAAC,IAAI,CAAC,CAAC;IAC/D,IAAI,KAAK,EAAE,MAAM,QAAQ,IAAI,aAAa,CAAC,IAAI,CAAC,EAAE;QAChD,KAAK,MAAM,IAAI,IAAI,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE;YAC/C,MAAM,GAAG,GAAG,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;YACpC,IAAI,GAAG;gBAAE,MAAM,GAAG,CAAC;SACpB;KACF;IAED,KAAK,MAAM,IAAI,IAAI,WAAW,CAAC,KAAK,EAAE,EAAE;QACtC,MAAM,GAAG,GAAG,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QACpC,IAAI,GAAG;YAAE,MAAM,GAAG,CAAC;KACpB;AACH,CAAC;AAED;;;GAGG;AACH,KAAK,SAAS,CAAC,CAAC,aAAa,CAAC,QAAsC;IAClE,IAAI,IAAI,GAAG,IAAI,UAAU,EAAE,CAAC;IAE5B,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,QAAQ,EAAE;QAClC,IAAI,KAAK,IAAI,IAAI,EAAE;YACjB,SAAS;SACV;QAED,MAAM,WAAW,GACf,KAAK,YAAY,WAAW,CAAC,CAAC,CAAC,IAAI,UAAU,CAAC,KAAK,CAAC;YACpD,CAAC,CAAC,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC7D,CAAC,CAAC,KAAK,CAAC;QAEV,IAAI,OAAO,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC;QAC/D,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAClB,OAAO,CAAC,GAAG,CAAC,WAAW,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QACtC,IAAI,GAAG,OAAO,CAAC;QAEf,IAAI,YAAY,CAAC;QACjB,OAAO,CAAC,YAAY,GAAG,sBAAsB,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;YAC3D,MAAM,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;YAClC,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACjC;KACF;IAED,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;QACnB,MAAM,IAAI,CAAC;KACZ;AACH,CAAC;AAED,SAAS,sBAAsB,CAAC,MAAkB;IAChD,gFAAgF;IAChF,yEAAyE;IACzE,2CAA2C;IAC3C,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,KAAK;IAC3B,MAAM,QAAQ,GAAG,IAAI,CAAC,CAAC,KAAK;IAE5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;QAC1C,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,OAAO,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,OAAO,EAAE;YACtD,OAAO;YACP,OAAO,CAAC,GAAG,CAAC,CAAC;SACd;QACD,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,QAAQ,IAAI,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,QAAQ,EAAE;YACxD,OAAO;YACP,OAAO,CAAC,GAAG,CAAC,CAAC;SACd;QACD,IACE,MAAM,CAAC,CAAC,CAAC,KAAK,QAAQ;YACtB,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,OAAO;YACzB,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM;YACrB,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,QAAQ;YAC1B,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,OAAO,EACzB;YACA,WAAW;YACX,OAAO,CAAC,GAAG,CAAC,CAAC;SACd;KACF;IAED,OAAO,CAAC,CAAC,CAAC;AACZ,CAAC;AAED,MAAM,UAAU;IAKd;QACE,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;QACf,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;IACnB,CAAC;IAED,MAAM,CAAC,IAAY;QACjB,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YACvB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;SAC3C;QAED,IAAI,CAAC,IAAI,EAAE;YACT,6DAA6D;YAC7D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM;gBAAE,OAAO,IAAI,CAAC;YAElD,MAAM,GAAG,GAAoB;gBAC3B,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC;gBAC1B,GAAG,EAAE,IAAI,CAAC,MAAM;aACjB,CAAC;YAEF,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;YAClB,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;YACf,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;YAEjB,OAAO,GAAG,CAAC;SACZ;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAEvB,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACxB,OAAO,IAAI,CAAC;SACb;QAED,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,GAAG,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;QAEjD,IAAI,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACzB,KAAK,GAAG,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;SAC5B;QAED,IAAI,SAAS,KAAK,OAAO,EAAE;YACzB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;SACpB;aAAM,IAAI,SAAS,KAAK,MAAM,EAAE;YAC/B,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;SACvB;QAED,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AAED,uEAAuE;AACvE,MAAM,UAAU,aAAa,CAAC,MAAgB;IAC5C,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;IAClC,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;QAC1B,KAAK,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACtC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,SAAS,CAAC,GAAW,EAAE,SAAiB;IAC/C,MAAM,KAAK,GAAG,GAAG,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACrC,IAAI,KAAK,KAAK,CAAC,CAAC,EAAE;QAChB,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,SAAS,EAAE,GAAG,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;KACtF;IAED,OAAO,CAAC,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;AACvB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,2BAA2B,CAAI,MAAW;IACxD,IAAI,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC;QAAE,OAAO,MAAM,CAAC;IAEhD,MAAM,MAAM,GAAG,MAAM,CAAC,SAAS,EAAE,CAAC;IAClC,OAAO;QACL,KAAK,CAAC,IAAI;YACR,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;gBACnC,IAAI,MAAM,EAAE,IAAI;oBAAE,MAAM,CAAC,WAAW,EAAE,CAAC,CAAC,0CAA0C;gBAClF,OAAO,MAAM,CAAC;aACf;YAAC,OAAO,CAAC,EAAE;gBACV,MAAM,CAAC,WAAW,EAAE,CAAC,CAAC,2CAA2C;gBACjE,MAAM,CAAC,CAAC;aACT;QACH,CAAC;QACD,KAAK,CAAC,MAAM;YACV,MAAM,aAAa,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;YACtC,MAAM,CAAC,WAAW,EAAE,CAAC;YACrB,MAAM,aAAa,CAAC;YACpB,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,SAAS,EAAE,CAAC;QAC1C,CAAC;QACD,CAAC,MAAM,CAAC,aAAa,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;KACF,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/uploads.d.ts b/node_modules/openai/uploads.d.ts new file mode 100644 index 0000000..55faa0c --- /dev/null +++ b/node_modules/openai/uploads.d.ts @@ -0,0 +1,75 @@ +import { type RequestOptions } from "./core.js"; +import { FormData, type Blob, type FilePropertyBag, type FsReadStream } from "./_shims/index.js"; +import { MultipartBody } from "./_shims/MultipartBody.js"; +export { fileFromPath } from "./_shims/index.js"; +type BlobLikePart = string | ArrayBuffer | ArrayBufferView | BlobLike | Uint8Array | DataView; +export type BlobPart = string | ArrayBuffer | ArrayBufferView | Blob | Uint8Array | DataView; +/** + * Typically, this is a native "File" class. + * + * We provide the {@link toFile} utility to convert a variety of objects + * into the File class. + * + * For convenience, you can also pass a fetch Response, or in Node, + * the result of fs.createReadStream(). + */ +export type Uploadable = FileLike | ResponseLike | FsReadStream; +/** + * Intended to match web.Blob, node.Blob, node-fetch.Blob, etc. + */ +export interface BlobLike { + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/size) */ + readonly size: number; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/type) */ + readonly type: string; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/text) */ + text(): Promise; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/slice) */ + slice(start?: number, end?: number): BlobLike; +} +/** + * Intended to match web.File, node.File, node-fetch.File, etc. + */ +export interface FileLike extends BlobLike { + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/lastModified) */ + readonly lastModified: number; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/name) */ + readonly name: string; +} +/** + * Intended to match web.Response, node.Response, node-fetch.Response, etc. + */ +export interface ResponseLike { + url: string; + blob(): Promise; +} +export declare const isResponseLike: (value: any) => value is ResponseLike; +export declare const isFileLike: (value: any) => value is FileLike; +/** + * The BlobLike type omits arrayBuffer() because @types/node-fetch@^2.6.4 lacks it; but this check + * adds the arrayBuffer() method type because it is available and used at runtime + */ +export declare const isBlobLike: (value: any) => value is BlobLike & { + arrayBuffer(): Promise; +}; +export declare const isUploadable: (value: any) => value is Uploadable; +export type ToFileInput = Uploadable | Exclude | AsyncIterable; +/** + * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats + * @param value the raw content of the file. Can be an {@link Uploadable}, {@link BlobLikePart}, or {@link AsyncIterable} of {@link BlobLikePart}s + * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible + * @param {Object=} options additional properties + * @param {string=} options.type the MIME type of the content + * @param {number=} options.lastModified the last modified timestamp + * @returns a {@link File} with the given properties + */ +export declare function toFile(value: ToFileInput | PromiseLike, name?: string | null | undefined, options?: FilePropertyBag | undefined): Promise; +export declare const isMultipartBody: (body: any) => body is MultipartBody; +/** + * Returns a multipart/form-data request if any part of the given request body contains a File / Blob value. + * Otherwise returns the request as is. + */ +export declare const maybeMultipartFormRequestOptions: >(opts: RequestOptions) => Promise>; +export declare const multipartFormRequestOptions: >(opts: RequestOptions) => Promise>; +export declare const createForm: >(body: T | undefined) => Promise; +//# sourceMappingURL=uploads.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/uploads.d.ts.map b/node_modules/openai/uploads.d.ts.map new file mode 100644 index 0000000..227dc5f --- /dev/null +++ b/node_modules/openai/uploads.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"uploads.d.ts","sourceRoot":"","sources":["src/uploads.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,cAAc,EAAE,MAAM,QAAQ,CAAC;AAC7C,OAAO,EACL,QAAQ,EAER,KAAK,IAAI,EACT,KAAK,eAAe,EAEpB,KAAK,YAAY,EAElB,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAE,aAAa,EAAE,MAAM,wBAAwB,CAAC;AACvD,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAE9C,KAAK,YAAY,GAAG,MAAM,GAAG,WAAW,GAAG,eAAe,GAAG,QAAQ,GAAG,UAAU,GAAG,QAAQ,CAAC;AAC9F,MAAM,MAAM,QAAQ,GAAG,MAAM,GAAG,WAAW,GAAG,eAAe,GAAG,IAAI,GAAG,UAAU,GAAG,QAAQ,CAAC;AAE7F;;;;;;;;GAQG;AACH,MAAM,MAAM,UAAU,GAAG,QAAQ,GAAG,YAAY,GAAG,YAAY,CAAC;AAEhE;;GAEG;AACH,MAAM,WAAW,QAAQ;IACvB,4EAA4E;IAC5E,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,4EAA4E;IAC5E,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,4EAA4E;IAC5E,IAAI,IAAI,OAAO,CAAC,MAAM,CAAC,CAAC;IACxB,6EAA6E;IAC7E,KAAK,CAAC,KAAK,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,MAAM,GAAG,QAAQ,CAAC;CAE/C;AAED;;GAEG;AACH,MAAM,WAAW,QAAS,SAAQ,QAAQ;IACxC,oFAAoF;IACpF,QAAQ,CAAC,YAAY,EAAE,MAAM,CAAC;IAC9B,4EAA4E;IAC5E,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,IAAI,OAAO,CAAC,QAAQ,CAAC,CAAC;CAC3B;AAED,eAAO,MAAM,cAAc,UAAW,GAAG,0BAIP,CAAC;AAEnC,eAAO,MAAM,UAAU,UAAW,GAAG,sBAKlB,CAAC;AAEpB;;;GAGG;AACH,eAAO,MAAM,UAAU,UAAW,GAAG;mBAAwC,QAAQ,WAAW,CAAC;CAOxD,CAAC;AAE1C,eAAO,MAAM,YAAY,UAAW,GAAG,wBAEtC,CAAC;AAEF,MAAM,MAAM,WAAW,GAAG,UAAU,GAAG,OAAO,CAAC,YAAY,EAAE,MAAM,CAAC,GAAG,aAAa,CAAC,YAAY,CAAC,CAAC;AAEnG;;;;;;;;GAQG;AACH,wBAAsB,MAAM,CAC1B,KAAK,EAAE,WAAW,GAAG,WAAW,CAAC,WAAW,CAAC,EAC7C,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,EAChC,OAAO,CAAC,EAAE,eAAe,GAAG,SAAS,GACpC,OAAO,CAAC,QAAQ,CAAC,CAiCnB;AAmDD,eAAO,MAAM,eAAe,SAAU,GAAG,0BACsD,CAAC;AAEhG;;;GAGG;AACH,eAAO,MAAM,gCAAgC,sGAO5C,CAAC;AAEF,eAAO,MAAM,2BAA2B,sGAKvC,CAAC;AAEF,eAAO,MAAM,UAAU,wDAA6D,QAAQ,QAAQ,CAInG,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/uploads.js b/node_modules/openai/uploads.js new file mode 100644 index 0000000..6daae95 --- /dev/null +++ b/node_modules/openai/uploads.js @@ -0,0 +1,171 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createForm = exports.multipartFormRequestOptions = exports.maybeMultipartFormRequestOptions = exports.isMultipartBody = exports.toFile = exports.isUploadable = exports.isBlobLike = exports.isFileLike = exports.isResponseLike = exports.fileFromPath = void 0; +const index_1 = require("./_shims/index.js"); +var index_2 = require("./_shims/index.js"); +Object.defineProperty(exports, "fileFromPath", { enumerable: true, get: function () { return index_2.fileFromPath; } }); +const isResponseLike = (value) => value != null && + typeof value === 'object' && + typeof value.url === 'string' && + typeof value.blob === 'function'; +exports.isResponseLike = isResponseLike; +const isFileLike = (value) => value != null && + typeof value === 'object' && + typeof value.name === 'string' && + typeof value.lastModified === 'number' && + (0, exports.isBlobLike)(value); +exports.isFileLike = isFileLike; +/** + * The BlobLike type omits arrayBuffer() because @types/node-fetch@^2.6.4 lacks it; but this check + * adds the arrayBuffer() method type because it is available and used at runtime + */ +const isBlobLike = (value) => value != null && + typeof value === 'object' && + typeof value.size === 'number' && + typeof value.type === 'string' && + typeof value.text === 'function' && + typeof value.slice === 'function' && + typeof value.arrayBuffer === 'function'; +exports.isBlobLike = isBlobLike; +const isUploadable = (value) => { + return (0, exports.isFileLike)(value) || (0, exports.isResponseLike)(value) || (0, index_1.isFsReadStream)(value); +}; +exports.isUploadable = isUploadable; +/** + * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats + * @param value the raw content of the file. Can be an {@link Uploadable}, {@link BlobLikePart}, or {@link AsyncIterable} of {@link BlobLikePart}s + * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible + * @param {Object=} options additional properties + * @param {string=} options.type the MIME type of the content + * @param {number=} options.lastModified the last modified timestamp + * @returns a {@link File} with the given properties + */ +async function toFile(value, name, options) { + // If it's a promise, resolve it. + value = await value; + // If we've been given a `File` we don't need to do anything + if ((0, exports.isFileLike)(value)) { + return value; + } + if ((0, exports.isResponseLike)(value)) { + const blob = await value.blob(); + name || (name = new URL(value.url).pathname.split(/[\\/]/).pop() ?? 'unknown_file'); + // we need to convert the `Blob` into an array buffer because the `Blob` class + // that `node-fetch` defines is incompatible with the web standard which results + // in `new File` interpreting it as a string instead of binary data. + const data = (0, exports.isBlobLike)(blob) ? [(await blob.arrayBuffer())] : [blob]; + return new index_1.File(data, name, options); + } + const bits = await getBytes(value); + name || (name = getName(value) ?? 'unknown_file'); + if (!options?.type) { + const type = bits[0]?.type; + if (typeof type === 'string') { + options = { ...options, type }; + } + } + return new index_1.File(bits, name, options); +} +exports.toFile = toFile; +async function getBytes(value) { + let parts = []; + if (typeof value === 'string' || + ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc. + value instanceof ArrayBuffer) { + parts.push(value); + } + else if ((0, exports.isBlobLike)(value)) { + parts.push(await value.arrayBuffer()); + } + else if (isAsyncIterableIterator(value) // includes Readable, ReadableStream, etc. + ) { + for await (const chunk of value) { + parts.push(chunk); // TODO, consider validating? + } + } + else { + throw new Error(`Unexpected data type: ${typeof value}; constructor: ${value?.constructor + ?.name}; props: ${propsForError(value)}`); + } + return parts; +} +function propsForError(value) { + const props = Object.getOwnPropertyNames(value); + return `[${props.map((p) => `"${p}"`).join(', ')}]`; +} +function getName(value) { + return (getStringFromMaybeBuffer(value.name) || + getStringFromMaybeBuffer(value.filename) || + // For fs.ReadStream + getStringFromMaybeBuffer(value.path)?.split(/[\\/]/).pop()); +} +const getStringFromMaybeBuffer = (x) => { + if (typeof x === 'string') + return x; + if (typeof Buffer !== 'undefined' && x instanceof Buffer) + return String(x); + return undefined; +}; +const isAsyncIterableIterator = (value) => value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function'; +const isMultipartBody = (body) => body && typeof body === 'object' && body.body && body[Symbol.toStringTag] === 'MultipartBody'; +exports.isMultipartBody = isMultipartBody; +/** + * Returns a multipart/form-data request if any part of the given request body contains a File / Blob value. + * Otherwise returns the request as is. + */ +const maybeMultipartFormRequestOptions = async (opts) => { + if (!hasUploadableValue(opts.body)) + return opts; + const form = await (0, exports.createForm)(opts.body); + return (0, index_1.getMultipartRequestOptions)(form, opts); +}; +exports.maybeMultipartFormRequestOptions = maybeMultipartFormRequestOptions; +const multipartFormRequestOptions = async (opts) => { + const form = await (0, exports.createForm)(opts.body); + return (0, index_1.getMultipartRequestOptions)(form, opts); +}; +exports.multipartFormRequestOptions = multipartFormRequestOptions; +const createForm = async (body) => { + const form = new index_1.FormData(); + await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value))); + return form; +}; +exports.createForm = createForm; +const hasUploadableValue = (value) => { + if ((0, exports.isUploadable)(value)) + return true; + if (Array.isArray(value)) + return value.some(hasUploadableValue); + if (value && typeof value === 'object') { + for (const k in value) { + if (hasUploadableValue(value[k])) + return true; + } + } + return false; +}; +const addFormValue = async (form, key, value) => { + if (value === undefined) + return; + if (value == null) { + throw new TypeError(`Received null for "${key}"; to pass null in FormData, you must use the string 'null'`); + } + // TODO: make nested formats configurable + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + form.append(key, String(value)); + } + else if ((0, exports.isUploadable)(value)) { + const file = await toFile(value); + form.append(key, file); + } + else if (Array.isArray(value)) { + await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry))); + } + else if (typeof value === 'object') { + await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop))); + } + else { + throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`); + } +}; +//# sourceMappingURL=uploads.js.map \ No newline at end of file diff --git a/node_modules/openai/uploads.js.map b/node_modules/openai/uploads.js.map new file mode 100644 index 0000000..7d7d7ae --- /dev/null +++ b/node_modules/openai/uploads.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uploads.js","sourceRoot":"","sources":["src/uploads.ts"],"names":[],"mappings":";;;AACA,6CAQwB;AAExB,2CAA8C;AAArC,qGAAA,YAAY,OAAA;AAiDd,MAAM,cAAc,GAAG,CAAC,KAAU,EAAyB,EAAE,CAClE,KAAK,IAAI,IAAI;IACb,OAAO,KAAK,KAAK,QAAQ;IACzB,OAAO,KAAK,CAAC,GAAG,KAAK,QAAQ;IAC7B,OAAO,KAAK,CAAC,IAAI,KAAK,UAAU,CAAC;AAJtB,QAAA,cAAc,kBAIQ;AAE5B,MAAM,UAAU,GAAG,CAAC,KAAU,EAAqB,EAAE,CAC1D,KAAK,IAAI,IAAI;IACb,OAAO,KAAK,KAAK,QAAQ;IACzB,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;IAC9B,OAAO,KAAK,CAAC,YAAY,KAAK,QAAQ;IACtC,IAAA,kBAAU,EAAC,KAAK,CAAC,CAAC;AALP,QAAA,UAAU,cAKH;AAEpB;;;GAGG;AACI,MAAM,UAAU,GAAG,CAAC,KAAU,EAA+D,EAAE,CACpG,KAAK,IAAI,IAAI;IACb,OAAO,KAAK,KAAK,QAAQ;IACzB,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;IAC9B,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;IAC9B,OAAO,KAAK,CAAC,IAAI,KAAK,UAAU;IAChC,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU;IACjC,OAAO,KAAK,CAAC,WAAW,KAAK,UAAU,CAAC;AAP7B,QAAA,UAAU,cAOmB;AAEnC,MAAM,YAAY,GAAG,CAAC,KAAU,EAAuB,EAAE;IAC9D,OAAO,IAAA,kBAAU,EAAC,KAAK,CAAC,IAAI,IAAA,sBAAc,EAAC,KAAK,CAAC,IAAI,IAAA,sBAAc,EAAC,KAAK,CAAC,CAAC;AAC7E,CAAC,CAAC;AAFW,QAAA,YAAY,gBAEvB;AAIF;;;;;;;;GAQG;AACI,KAAK,UAAU,MAAM,CAC1B,KAA6C,EAC7C,IAAgC,EAChC,OAAqC;IAErC,iCAAiC;IACjC,KAAK,GAAG,MAAM,KAAK,CAAC;IAEpB,4DAA4D;IAC5D,IAAI,IAAA,kBAAU,EAAC,KAAK,CAAC,EAAE;QACrB,OAAO,KAAK,CAAC;KACd;IAED,IAAI,IAAA,sBAAc,EAAC,KAAK,CAAC,EAAE;QACzB,MAAM,IAAI,GAAG,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC;QAChC,IAAI,KAAJ,IAAI,GAAK,IAAI,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,IAAI,cAAc,EAAC;QAE5E,8EAA8E;QAC9E,gFAAgF;QAChF,oEAAoE;QACpE,MAAM,IAAI,GAAG,IAAA,kBAAU,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,CAAC,WAAW,EAAE,CAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QAE7E,OAAO,IAAI,YAAI,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;KACtC;IAED,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,KAAK,CAAC,CAAC;IAEnC,IAAI,KAAJ,IAAI,GAAK,OAAO,CAAC,KAAK,CAAC,IAAI,cAAc,EAAC;IAE1C,IAAI,CAAC,OAAO,EAAE,IAAI,EAAE;QAClB,MAAM,IAAI,GAAI,IAAI,CAAC,CAAC,CAAS,EAAE,IAAI,CAAC;QACpC,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,OAAO,GAAG,EAAE,GAAG,OAAO,EAAE,IAAI,EAAE,CAAC;SAChC;KACF;IAED,OAAO,IAAI,YAAI,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;AACvC,CAAC;AArCD,wBAqCC;AAED,KAAK,UAAU,QAAQ,CAAC,KAAkB;IACxC,IAAI,KAAK,GAAoB,EAAE,CAAC;IAChC,IACE,OAAO,KAAK,KAAK,QAAQ;QACzB,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,oCAAoC;QACjE,KAAK,YAAY,WAAW,EAC5B;QACA,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;KACnB;SAAM,IAAI,IAAA,kBAAU,EAAC,KAAK,CAAC,EAAE;QAC5B,KAAK,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC;KACvC;SAAM,IACL,uBAAuB,CAAC,KAAK,CAAC,CAAC,0CAA0C;MACzE;QACA,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,KAAK,EAAE;YAC/B,KAAK,CAAC,IAAI,CAAC,KAAiB,CAAC,CAAC,CAAC,6BAA6B;SAC7D;KACF;SAAM;QACL,MAAM,IAAI,KAAK,CACb,yBAAyB,OAAO,KAAK,kBAAkB,KAAK,EAAE,WAAW;YACvE,EAAE,IAAI,YAAY,aAAa,CAAC,KAAK,CAAC,EAAE,CAC3C,CAAC;KACH;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,aAAa,CAAC,KAAU;IAC/B,MAAM,KAAK,GAAG,MAAM,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC;IAChD,OAAO,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC;AACtD,CAAC;AAED,SAAS,OAAO,CAAC,KAAU;IACzB,OAAO,CACL,wBAAwB,CAAC,KAAK,CAAC,IAAI,CAAC;QACpC,wBAAwB,CAAC,KAAK,CAAC,QAAQ,CAAC;QACxC,oBAAoB;QACpB,wBAAwB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,CAC3D,CAAC;AACJ,CAAC;AAED,MAAM,wBAAwB,GAAG,CAAC,CAA4B,EAAsB,EAAE;IACpF,IAAI,OAAO,CAAC,KAAK,QAAQ;QAAE,OAAO,CAAC,CAAC;IACpC,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,CAAC,YAAY,MAAM;QAAE,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAC3E,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AAEF,MAAM,uBAAuB,GAAG,CAAC,KAAU,EAA2C,EAAE,CACtF,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,UAAU,CAAC;AAE3F,MAAM,eAAe,GAAG,CAAC,IAAS,EAAyB,EAAE,CAClE,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,KAAK,eAAe,CAAC;AADnF,QAAA,eAAe,mBACoE;AAEhG;;;GAGG;AACI,MAAM,gCAAgC,GAAG,KAAK,EACnD,IAAuB,EACqB,EAAE;IAC9C,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC;QAAE,OAAO,IAAI,CAAC;IAEhD,MAAM,IAAI,GAAG,MAAM,IAAA,kBAAU,EAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACzC,OAAO,IAAA,kCAA0B,EAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAChD,CAAC,CAAC;AAPW,QAAA,gCAAgC,oCAO3C;AAEK,MAAM,2BAA2B,GAAG,KAAK,EAC9C,IAAuB,EACqB,EAAE;IAC9C,MAAM,IAAI,GAAG,MAAM,IAAA,kBAAU,EAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACzC,OAAO,IAAA,kCAA0B,EAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAChD,CAAC,CAAC;AALW,QAAA,2BAA2B,+BAKtC;AAEK,MAAM,UAAU,GAAG,KAAK,EAA+B,IAAmB,EAAqB,EAAE;IACtG,MAAM,IAAI,GAAG,IAAI,gBAAQ,EAAE,CAAC;IAC5B,MAAM,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC;IACpG,OAAO,IAAI,CAAC;AACd,CAAC,CAAC;AAJW,QAAA,UAAU,cAIrB;AAEF,MAAM,kBAAkB,GAAG,CAAC,KAAc,EAAW,EAAE;IACrD,IAAI,IAAA,oBAAY,EAAC,KAAK,CAAC;QAAE,OAAO,IAAI,CAAC;IACrC,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;IAChE,IAAI,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACtC,KAAK,MAAM,CAAC,IAAI,KAAK,EAAE;YACrB,IAAI,kBAAkB,CAAE,KAAa,CAAC,CAAC,CAAC,CAAC;gBAAE,OAAO,IAAI,CAAC;SACxD;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC,CAAC;AAEF,MAAM,YAAY,GAAG,KAAK,EAAE,IAAc,EAAE,GAAW,EAAE,KAAc,EAAiB,EAAE;IACxF,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO;IAChC,IAAI,KAAK,IAAI,IAAI,EAAE;QACjB,MAAM,IAAI,SAAS,CACjB,sBAAsB,GAAG,6DAA6D,CACvF,CAAC;KACH;IAED,yCAAyC;IACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;QACxF,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACjC;SAAM,IAAI,IAAA,oBAAY,EAAC,KAAK,CAAC,EAAE;QAC9B,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,KAAK,CAAC,CAAC;QACjC,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,IAAY,CAAC,CAAC;KAChC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC/B,MAAM,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC;KAChF;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACpC,MAAM,OAAO,CAAC,GAAG,CACf,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI,IAAI,GAAG,EAAE,IAAI,CAAC,CAAC,CACzF,CAAC;KACH;SAAM;QACL,MAAM,IAAI,SAAS,CACjB,wGAAwG,KAAK,UAAU,CACxH,CAAC;KACH;AACH,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/uploads.mjs b/node_modules/openai/uploads.mjs new file mode 100644 index 0000000..7357361 --- /dev/null +++ b/node_modules/openai/uploads.mjs @@ -0,0 +1,158 @@ +import { FormData, File, getMultipartRequestOptions, isFsReadStream, } from "./_shims/index.mjs"; +export { fileFromPath } from "./_shims/index.mjs"; +export const isResponseLike = (value) => value != null && + typeof value === 'object' && + typeof value.url === 'string' && + typeof value.blob === 'function'; +export const isFileLike = (value) => value != null && + typeof value === 'object' && + typeof value.name === 'string' && + typeof value.lastModified === 'number' && + isBlobLike(value); +/** + * The BlobLike type omits arrayBuffer() because @types/node-fetch@^2.6.4 lacks it; but this check + * adds the arrayBuffer() method type because it is available and used at runtime + */ +export const isBlobLike = (value) => value != null && + typeof value === 'object' && + typeof value.size === 'number' && + typeof value.type === 'string' && + typeof value.text === 'function' && + typeof value.slice === 'function' && + typeof value.arrayBuffer === 'function'; +export const isUploadable = (value) => { + return isFileLike(value) || isResponseLike(value) || isFsReadStream(value); +}; +/** + * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats + * @param value the raw content of the file. Can be an {@link Uploadable}, {@link BlobLikePart}, or {@link AsyncIterable} of {@link BlobLikePart}s + * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible + * @param {Object=} options additional properties + * @param {string=} options.type the MIME type of the content + * @param {number=} options.lastModified the last modified timestamp + * @returns a {@link File} with the given properties + */ +export async function toFile(value, name, options) { + // If it's a promise, resolve it. + value = await value; + // If we've been given a `File` we don't need to do anything + if (isFileLike(value)) { + return value; + } + if (isResponseLike(value)) { + const blob = await value.blob(); + name || (name = new URL(value.url).pathname.split(/[\\/]/).pop() ?? 'unknown_file'); + // we need to convert the `Blob` into an array buffer because the `Blob` class + // that `node-fetch` defines is incompatible with the web standard which results + // in `new File` interpreting it as a string instead of binary data. + const data = isBlobLike(blob) ? [(await blob.arrayBuffer())] : [blob]; + return new File(data, name, options); + } + const bits = await getBytes(value); + name || (name = getName(value) ?? 'unknown_file'); + if (!options?.type) { + const type = bits[0]?.type; + if (typeof type === 'string') { + options = { ...options, type }; + } + } + return new File(bits, name, options); +} +async function getBytes(value) { + let parts = []; + if (typeof value === 'string' || + ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc. + value instanceof ArrayBuffer) { + parts.push(value); + } + else if (isBlobLike(value)) { + parts.push(await value.arrayBuffer()); + } + else if (isAsyncIterableIterator(value) // includes Readable, ReadableStream, etc. + ) { + for await (const chunk of value) { + parts.push(chunk); // TODO, consider validating? + } + } + else { + throw new Error(`Unexpected data type: ${typeof value}; constructor: ${value?.constructor + ?.name}; props: ${propsForError(value)}`); + } + return parts; +} +function propsForError(value) { + const props = Object.getOwnPropertyNames(value); + return `[${props.map((p) => `"${p}"`).join(', ')}]`; +} +function getName(value) { + return (getStringFromMaybeBuffer(value.name) || + getStringFromMaybeBuffer(value.filename) || + // For fs.ReadStream + getStringFromMaybeBuffer(value.path)?.split(/[\\/]/).pop()); +} +const getStringFromMaybeBuffer = (x) => { + if (typeof x === 'string') + return x; + if (typeof Buffer !== 'undefined' && x instanceof Buffer) + return String(x); + return undefined; +}; +const isAsyncIterableIterator = (value) => value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function'; +export const isMultipartBody = (body) => body && typeof body === 'object' && body.body && body[Symbol.toStringTag] === 'MultipartBody'; +/** + * Returns a multipart/form-data request if any part of the given request body contains a File / Blob value. + * Otherwise returns the request as is. + */ +export const maybeMultipartFormRequestOptions = async (opts) => { + if (!hasUploadableValue(opts.body)) + return opts; + const form = await createForm(opts.body); + return getMultipartRequestOptions(form, opts); +}; +export const multipartFormRequestOptions = async (opts) => { + const form = await createForm(opts.body); + return getMultipartRequestOptions(form, opts); +}; +export const createForm = async (body) => { + const form = new FormData(); + await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value))); + return form; +}; +const hasUploadableValue = (value) => { + if (isUploadable(value)) + return true; + if (Array.isArray(value)) + return value.some(hasUploadableValue); + if (value && typeof value === 'object') { + for (const k in value) { + if (hasUploadableValue(value[k])) + return true; + } + } + return false; +}; +const addFormValue = async (form, key, value) => { + if (value === undefined) + return; + if (value == null) { + throw new TypeError(`Received null for "${key}"; to pass null in FormData, you must use the string 'null'`); + } + // TODO: make nested formats configurable + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + form.append(key, String(value)); + } + else if (isUploadable(value)) { + const file = await toFile(value); + form.append(key, file); + } + else if (Array.isArray(value)) { + await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry))); + } + else if (typeof value === 'object') { + await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop))); + } + else { + throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`); + } +}; +//# sourceMappingURL=uploads.mjs.map \ No newline at end of file diff --git a/node_modules/openai/uploads.mjs.map b/node_modules/openai/uploads.mjs.map new file mode 100644 index 0000000..e5f3219 --- /dev/null +++ b/node_modules/openai/uploads.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"uploads.mjs","sourceRoot":"","sources":["src/uploads.ts"],"names":[],"mappings":"OACO,EACL,QAAQ,EACR,IAAI,EAGJ,0BAA0B,EAE1B,cAAc,GACf;OAEM,EAAE,YAAY,EAAE;AAiDvB,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,KAAU,EAAyB,EAAE,CAClE,KAAK,IAAI,IAAI;IACb,OAAO,KAAK,KAAK,QAAQ;IACzB,OAAO,KAAK,CAAC,GAAG,KAAK,QAAQ;IAC7B,OAAO,KAAK,CAAC,IAAI,KAAK,UAAU,CAAC;AAEnC,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,KAAU,EAAqB,EAAE,CAC1D,KAAK,IAAI,IAAI;IACb,OAAO,KAAK,KAAK,QAAQ;IACzB,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;IAC9B,OAAO,KAAK,CAAC,YAAY,KAAK,QAAQ;IACtC,UAAU,CAAC,KAAK,CAAC,CAAC;AAEpB;;;GAGG;AACH,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,KAAU,EAA+D,EAAE,CACpG,KAAK,IAAI,IAAI;IACb,OAAO,KAAK,KAAK,QAAQ;IACzB,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;IAC9B,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;IAC9B,OAAO,KAAK,CAAC,IAAI,KAAK,UAAU;IAChC,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU;IACjC,OAAO,KAAK,CAAC,WAAW,KAAK,UAAU,CAAC;AAE1C,MAAM,CAAC,MAAM,YAAY,GAAG,CAAC,KAAU,EAAuB,EAAE;IAC9D,OAAO,UAAU,CAAC,KAAK,CAAC,IAAI,cAAc,CAAC,KAAK,CAAC,IAAI,cAAc,CAAC,KAAK,CAAC,CAAC;AAC7E,CAAC,CAAC;AAIF;;;;;;;;GAQG;AACH,MAAM,CAAC,KAAK,UAAU,MAAM,CAC1B,KAA6C,EAC7C,IAAgC,EAChC,OAAqC;IAErC,iCAAiC;IACjC,KAAK,GAAG,MAAM,KAAK,CAAC;IAEpB,4DAA4D;IAC5D,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE;QACrB,OAAO,KAAK,CAAC;KACd;IAED,IAAI,cAAc,CAAC,KAAK,CAAC,EAAE;QACzB,MAAM,IAAI,GAAG,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC;QAChC,IAAI,KAAJ,IAAI,GAAK,IAAI,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,IAAI,cAAc,EAAC;QAE5E,8EAA8E;QAC9E,gFAAgF;QAChF,oEAAoE;QACpE,MAAM,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,CAAC,WAAW,EAAE,CAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QAE7E,OAAO,IAAI,IAAI,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;KACtC;IAED,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,KAAK,CAAC,CAAC;IAEnC,IAAI,KAAJ,IAAI,GAAK,OAAO,CAAC,KAAK,CAAC,IAAI,cAAc,EAAC;IAE1C,IAAI,CAAC,OAAO,EAAE,IAAI,EAAE;QAClB,MAAM,IAAI,GAAI,IAAI,CAAC,CAAC,CAAS,EAAE,IAAI,CAAC;QACpC,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,OAAO,GAAG,EAAE,GAAG,OAAO,EAAE,IAAI,EAAE,CAAC;SAChC;KACF;IAED,OAAO,IAAI,IAAI,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,QAAQ,CAAC,KAAkB;IACxC,IAAI,KAAK,GAAoB,EAAE,CAAC;IAChC,IACE,OAAO,KAAK,KAAK,QAAQ;QACzB,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,oCAAoC;QACjE,KAAK,YAAY,WAAW,EAC5B;QACA,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;KACnB;SAAM,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE;QAC5B,KAAK,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC;KACvC;SAAM,IACL,uBAAuB,CAAC,KAAK,CAAC,CAAC,0CAA0C;MACzE;QACA,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,KAAK,EAAE;YAC/B,KAAK,CAAC,IAAI,CAAC,KAAiB,CAAC,CAAC,CAAC,6BAA6B;SAC7D;KACF;SAAM;QACL,MAAM,IAAI,KAAK,CACb,yBAAyB,OAAO,KAAK,kBAAkB,KAAK,EAAE,WAAW;YACvE,EAAE,IAAI,YAAY,aAAa,CAAC,KAAK,CAAC,EAAE,CAC3C,CAAC;KACH;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,aAAa,CAAC,KAAU;IAC/B,MAAM,KAAK,GAAG,MAAM,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC;IAChD,OAAO,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC;AACtD,CAAC;AAED,SAAS,OAAO,CAAC,KAAU;IACzB,OAAO,CACL,wBAAwB,CAAC,KAAK,CAAC,IAAI,CAAC;QACpC,wBAAwB,CAAC,KAAK,CAAC,QAAQ,CAAC;QACxC,oBAAoB;QACpB,wBAAwB,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,CAC3D,CAAC;AACJ,CAAC;AAED,MAAM,wBAAwB,GAAG,CAAC,CAA4B,EAAsB,EAAE;IACpF,IAAI,OAAO,CAAC,KAAK,QAAQ;QAAE,OAAO,CAAC,CAAC;IACpC,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,CAAC,YAAY,MAAM;QAAE,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAC3E,OAAO,SAAS,CAAC;AACnB,CAAC,CAAC;AAEF,MAAM,uBAAuB,GAAG,CAAC,KAAU,EAA2C,EAAE,CACtF,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,UAAU,CAAC;AAElG,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,IAAS,EAAyB,EAAE,CAClE,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,KAAK,eAAe,CAAC;AAEhG;;;GAGG;AACH,MAAM,CAAC,MAAM,gCAAgC,GAAG,KAAK,EACnD,IAAuB,EACqB,EAAE;IAC9C,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC;QAAE,OAAO,IAAI,CAAC;IAEhD,MAAM,IAAI,GAAG,MAAM,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACzC,OAAO,0BAA0B,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAChD,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAAG,KAAK,EAC9C,IAAuB,EACqB,EAAE;IAC9C,MAAM,IAAI,GAAG,MAAM,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACzC,OAAO,0BAA0B,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAChD,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAG,KAAK,EAA+B,IAAmB,EAAqB,EAAE;IACtG,MAAM,IAAI,GAAG,IAAI,QAAQ,EAAE,CAAC;IAC5B,MAAM,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC;IACpG,OAAO,IAAI,CAAC;AACd,CAAC,CAAC;AAEF,MAAM,kBAAkB,GAAG,CAAC,KAAc,EAAW,EAAE;IACrD,IAAI,YAAY,CAAC,KAAK,CAAC;QAAE,OAAO,IAAI,CAAC;IACrC,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;IAChE,IAAI,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACtC,KAAK,MAAM,CAAC,IAAI,KAAK,EAAE;YACrB,IAAI,kBAAkB,CAAE,KAAa,CAAC,CAAC,CAAC,CAAC;gBAAE,OAAO,IAAI,CAAC;SACxD;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC,CAAC;AAEF,MAAM,YAAY,GAAG,KAAK,EAAE,IAAc,EAAE,GAAW,EAAE,KAAc,EAAiB,EAAE;IACxF,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO;IAChC,IAAI,KAAK,IAAI,IAAI,EAAE;QACjB,MAAM,IAAI,SAAS,CACjB,sBAAsB,GAAG,6DAA6D,CACvF,CAAC;KACH;IAED,yCAAyC;IACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;QACxF,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACjC;SAAM,IAAI,YAAY,CAAC,KAAK,CAAC,EAAE;QAC9B,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,KAAK,CAAC,CAAC;QACjC,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,IAAY,CAAC,CAAC;KAChC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC/B,MAAM,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC;KAChF;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACpC,MAAM,OAAO,CAAC,GAAG,CACf,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI,IAAI,GAAG,EAAE,IAAI,CAAC,CAAC,CACzF,CAAC;KACH;SAAM;QACL,MAAM,IAAI,SAAS,CACjB,wGAAwG,KAAK,UAAU,CACxH,CAAC;KACH;AACH,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/version.d.ts b/node_modules/openai/version.d.ts new file mode 100644 index 0000000..0545491 --- /dev/null +++ b/node_modules/openai/version.d.ts @@ -0,0 +1,2 @@ +export declare const VERSION = "4.77.0"; +//# sourceMappingURL=version.d.ts.map \ No newline at end of file diff --git a/node_modules/openai/version.d.ts.map b/node_modules/openai/version.d.ts.map new file mode 100644 index 0000000..e072bdd --- /dev/null +++ b/node_modules/openai/version.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"version.d.ts","sourceRoot":"","sources":["src/version.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,OAAO,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/openai/version.js b/node_modules/openai/version.js new file mode 100644 index 0000000..e7b8af8 --- /dev/null +++ b/node_modules/openai/version.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VERSION = void 0; +exports.VERSION = '4.77.0'; // x-release-please-version +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/openai/version.js.map b/node_modules/openai/version.js.map new file mode 100644 index 0000000..e9a6b4f --- /dev/null +++ b/node_modules/openai/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["src/version.ts"],"names":[],"mappings":";;;AAAa,QAAA,OAAO,GAAG,QAAQ,CAAC,CAAC,2BAA2B"} \ No newline at end of file diff --git a/node_modules/openai/version.mjs b/node_modules/openai/version.mjs new file mode 100644 index 0000000..39994dd --- /dev/null +++ b/node_modules/openai/version.mjs @@ -0,0 +1,2 @@ +export const VERSION = '4.77.0'; // x-release-please-version +//# sourceMappingURL=version.mjs.map \ No newline at end of file diff --git a/node_modules/openai/version.mjs.map b/node_modules/openai/version.mjs.map new file mode 100644 index 0000000..0c09c72 --- /dev/null +++ b/node_modules/openai/version.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"version.mjs","sourceRoot":"","sources":["src/version.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,2BAA2B"} \ No newline at end of file diff --git a/node_modules/tr46/.npmignore b/node_modules/tr46/.npmignore new file mode 100644 index 0000000..96e9161 --- /dev/null +++ b/node_modules/tr46/.npmignore @@ -0,0 +1,4 @@ +scripts/ +test/ + +!lib/mapping_table.json diff --git a/node_modules/tr46/index.js b/node_modules/tr46/index.js new file mode 100644 index 0000000..9ce12ca --- /dev/null +++ b/node_modules/tr46/index.js @@ -0,0 +1,193 @@ +"use strict"; + +var punycode = require("punycode"); +var mappingTable = require("./lib/mappingTable.json"); + +var PROCESSING_OPTIONS = { + TRANSITIONAL: 0, + NONTRANSITIONAL: 1 +}; + +function normalize(str) { // fix bug in v8 + return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); +} + +function findStatus(val) { + var start = 0; + var end = mappingTable.length - 1; + + while (start <= end) { + var mid = Math.floor((start + end) / 2); + + var target = mappingTable[mid]; + if (target[0][0] <= val && target[0][1] >= val) { + return target; + } else if (target[0][0] > val) { + end = mid - 1; + } else { + start = mid + 1; + } + } + + return null; +} + +var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; + +function countSymbols(string) { + return string + // replace every surrogate pair with a BMP symbol + .replace(regexAstralSymbols, '_') + // then get the length + .length; +} + +function mapChars(domain_name, useSTD3, processing_option) { + var hasError = false; + var processed = ""; + + var len = countSymbols(domain_name); + for (var i = 0; i < len; ++i) { + var codePoint = domain_name.codePointAt(i); + var status = findStatus(codePoint); + + switch (status[1]) { + case "disallowed": + hasError = true; + processed += String.fromCodePoint(codePoint); + break; + case "ignored": + break; + case "mapped": + processed += String.fromCodePoint.apply(String, status[2]); + break; + case "deviation": + if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { + processed += String.fromCodePoint.apply(String, status[2]); + } else { + processed += String.fromCodePoint(codePoint); + } + break; + case "valid": + processed += String.fromCodePoint(codePoint); + break; + case "disallowed_STD3_mapped": + if (useSTD3) { + hasError = true; + processed += String.fromCodePoint(codePoint); + } else { + processed += String.fromCodePoint.apply(String, status[2]); + } + break; + case "disallowed_STD3_valid": + if (useSTD3) { + hasError = true; + } + + processed += String.fromCodePoint(codePoint); + break; + } + } + + return { + string: processed, + error: hasError + }; +} + +var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; + +function validateLabel(label, processing_option) { + if (label.substr(0, 4) === "xn--") { + label = punycode.toUnicode(label); + processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; + } + + var error = false; + + if (normalize(label) !== label || + (label[3] === "-" && label[4] === "-") || + label[0] === "-" || label[label.length - 1] === "-" || + label.indexOf(".") !== -1 || + label.search(combiningMarksRegex) === 0) { + error = true; + } + + var len = countSymbols(label); + for (var i = 0; i < len; ++i) { + var status = findStatus(label.codePointAt(i)); + if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || + (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && + status[1] !== "valid" && status[1] !== "deviation")) { + error = true; + break; + } + } + + return { + label: label, + error: error + }; +} + +function processing(domain_name, useSTD3, processing_option) { + var result = mapChars(domain_name, useSTD3, processing_option); + result.string = normalize(result.string); + + var labels = result.string.split("."); + for (var i = 0; i < labels.length; ++i) { + try { + var validation = validateLabel(labels[i]); + labels[i] = validation.label; + result.error = result.error || validation.error; + } catch(e) { + result.error = true; + } + } + + return { + string: labels.join("."), + error: result.error + }; +} + +module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { + var result = processing(domain_name, useSTD3, processing_option); + var labels = result.string.split("."); + labels = labels.map(function(l) { + try { + return punycode.toASCII(l); + } catch(e) { + result.error = true; + return l; + } + }); + + if (verifyDnsLength) { + var total = labels.slice(0, labels.length - 1).join(".").length; + if (total.length > 253 || total.length === 0) { + result.error = true; + } + + for (var i=0; i < labels.length; ++i) { + if (labels.length > 63 || labels.length === 0) { + result.error = true; + break; + } + } + } + + if (result.error) return null; + return labels.join("."); +}; + +module.exports.toUnicode = function(domain_name, useSTD3) { + var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); + + return { + domain: result.string, + error: result.error + }; +}; + +module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; diff --git a/node_modules/tr46/lib/.gitkeep b/node_modules/tr46/lib/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/tr46/lib/mappingTable.json b/node_modules/tr46/lib/mappingTable.json new file mode 100644 index 0000000..89cf19a --- /dev/null +++ b/node_modules/tr46/lib/mappingTable.json @@ -0,0 +1 @@ +[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]] \ No newline at end of file diff --git a/node_modules/tr46/package.json b/node_modules/tr46/package.json new file mode 100644 index 0000000..b6826da --- /dev/null +++ b/node_modules/tr46/package.json @@ -0,0 +1,31 @@ +{ + "name": "tr46", + "version": "0.0.3", + "description": "An implementation of the Unicode TR46 spec", + "main": "index.js", + "scripts": { + "test": "mocha", + "pretest": "node scripts/getLatestUnicodeTests.js", + "prepublish": "node scripts/generateMappingTable.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Sebmaster/tr46.js.git" + }, + "keywords": [ + "unicode", + "tr46", + "url", + "whatwg" + ], + "author": "Sebastian Mayr ", + "license": "MIT", + "bugs": { + "url": "https://github.com/Sebmaster/tr46.js/issues" + }, + "homepage": "https://github.com/Sebmaster/tr46.js#readme", + "devDependencies": { + "mocha": "^2.2.5", + "request": "^2.57.0" + } +} diff --git a/node_modules/tunnel/.idea/encodings.xml b/node_modules/tunnel/.idea/encodings.xml new file mode 100644 index 0000000..97626ba --- /dev/null +++ b/node_modules/tunnel/.idea/encodings.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/node_modules/tunnel/.idea/modules.xml b/node_modules/tunnel/.idea/modules.xml new file mode 100644 index 0000000..27bf888 --- /dev/null +++ b/node_modules/tunnel/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/node_modules/tunnel/.idea/node-tunnel.iml b/node_modules/tunnel/.idea/node-tunnel.iml new file mode 100644 index 0000000..24643cc --- /dev/null +++ b/node_modules/tunnel/.idea/node-tunnel.iml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/node_modules/tunnel/.idea/vcs.xml b/node_modules/tunnel/.idea/vcs.xml new file mode 100644 index 0000000..94a25f7 --- /dev/null +++ b/node_modules/tunnel/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/node_modules/tunnel/.idea/workspace.xml b/node_modules/tunnel/.idea/workspace.xml new file mode 100644 index 0000000..1a318c8 --- /dev/null +++ b/node_modules/tunnel/.idea/workspace.xml @@ -0,0 +1,797 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + max + onconne + + + + + + + + + + + + + false + + false + false + true + + + true + DEFINITION_ORDER + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +